diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 389e8c022..4d5681ad8 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -42,8 +42,11 @@ ADD --chown=vscode:vscode files/.bash_completion /home/vscode/.bash_completion RUN mkdir /home/vscode/.bash_completion.d/ # Setup AWS CLI -RUN pip3 install --user awscli poetry==1.5.1 -RUN echo "complete -C '/home/vscode/.local/bin/aws_completer' aws" >> ~/.bashrc +RUN set -e; \ + pip3 install --no-cache-dir --user awscli pipx; \ + pipx install poetry; \ + pipx inject poetry "poetry-dynamic-versioning[plugin]" poetry-plugin-export; \ + echo "complete -C '/home/vscode/.local/bin/aws_completer' aws" >> ~/.bashrc # Setup OS_NAME var for runway builds from this dev container RUN echo "export OS_NAME=ubuntu-latest" >> ~/.bashrc diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 48a2914f3..ce1c37d3c 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,18 +1,20 @@ { - "name": "Ubuntu", - "build": { - "dockerfile": "Dockerfile", - "args": { "VARIANT": "ubuntu-22.04" } - }, - "remoteUser": "vscode", - "customizations": { - "vscode": { - "extensions": [ - "ms-python.python" - ] - } - }, - "features": { - "ghcr.io/devcontainers/features/docker-in-docker:2": {} - } + "build": { + "args": { + "VARIANT": "ubuntu-22.04" + }, + "dockerfile": "Dockerfile" + }, + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python" + ] + } + }, + "features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": {} + }, + "name": "Ubuntu", + "remoteUser": "vscode" } diff --git a/.editorconfig b/.editorconfig index 0f1d28c6c..1ad016f58 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,20 +1,15 @@ root = true [*] +charset = utf-8 end_of_line = lf -indent_size = 4 +indent_size = 2 indent_style = space insert_final_newline = true trim_trailing_whitespace = true -[*.{html,js,jsx,md,rst,toml,ts,xml,yml,yaml}] -indent_size = 2 - -[*.py] -charset = utf-8 - -[*.rst] -indent_size = 2 +[{*.{json,py},Makefile}] +indent_size = 4 [Makefile] indent_style = tab diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 152a39858..1af8ae82e 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -2,6 +2,6 @@ To view our [Getting Started] guide for developers and [Contribution Requirements], please refer to the official [documentation]. -[Contribution Requirements]: https://docs.onica.com/projects/runway/page/developers/contributing.html -[Getting Started]: https://docs.onica.com/projects/runway/page/developers/getting_started.html -[documentation]: https://docs.onica.com/projects/runway +[contribution requirements]: https://runway.readthedocs.io/page/developers/contributing.html +[documentation]: https://runway.readthedocs.io +[getting started]: https://runway.readthedocs.io/page/developers/getting_started.html diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 433e735e6..8ed73f25d 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,13 +1,12 @@ --- name: Feature request about: Suggest an idea for this project -title: "[REQUEST] feature" +title: '[REQUEST] feature' labels: feature, priority:low, status:review_needed - --- **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] +A clear and concise description of what the problem is. Ex. I'm always frustrated when ... **Describe the solution you'd like** A clear and concise description of what you want to happen. diff --git a/.github/ISSUE_TEMPLATE/general_question.md b/.github/ISSUE_TEMPLATE/general_question.md index 10e2e5fb8..677763637 100644 --- a/.github/ISSUE_TEMPLATE/general_question.md +++ b/.github/ISSUE_TEMPLATE/general_question.md @@ -1,9 +1,8 @@ --- name: General Question about: General question about the project, usage, design, etc. -title: "[QUESTION]" +title: '[QUESTION]' labels: priority:low, status:review_needed, question - --- **Question** diff --git a/.github/labels.yml b/.github/labels.yml index 19ead9d6b..4aee5342b 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -48,7 +48,7 @@ color: f0db4f description: Pull request that updates Javascript code - name: maintenance - color: fbca04 + color: fbca04 # cspell:ignore fbca04 description: General repo or CI/CD upkeep - name: npm color: cc3534 @@ -68,9 +68,6 @@ - name: priority:medium color: fbca04 description: Medium priority issue or pull request - - name: pyinstaller - color: ffffff - description: Update to Pyinstaller configuration - name: python color: '306998' description: Update to Python code diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 0b3065c6c..97ca1c446 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -37,7 +37,8 @@ # Checklist -- [ ] Have you followed the guidelines in our [Contribution Requirements](https://docs.onica.com/projects/runway/page/developers/contributing.html)? + +- [ ] Have you followed the guidelines in our [Contribution Requirements](https://runway.readthedocs.io/page/developers/contributing.html)? - [ ] Have you checked to ensure there aren't other open [Pull Requests](../../../pulls) for the same update/change? - [ ] Does your submission pass tests? - [ ] Have you linted your code locally prior to submission? diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml index eb7201bd1..57ae25f9e 100644 --- a/.github/release-drafter.yml +++ b/.github/release-drafter.yml @@ -3,7 +3,7 @@ autolabeler: # cspell:ignore autolabeler - label: bug branch: [/^(bug|bugfix|fix|hotfix)\/.*/] - label: changelog:skip - branch: [/^(dependabot)\/.*/] + branch: [/^(dependabot)\/.*/, /^pre-commit-ci-update-config/, /^renovate\/.*lockfile/] - label: cloudformation files: - '**/templates/*.json' @@ -11,7 +11,7 @@ autolabeler: # cspell:ignore autolabeler - '**/templates/*.yaml' - '**/templates/*.yml' - label: dependencies - branch: [/^(depend|dependabot)\/.*/] + branch: [/^(dep|depend|dependabot|deps|renovate)\/.*/] - label: documentation branch: [/^(docs)\/.*/] - label: feature @@ -38,10 +38,6 @@ autolabeler: # cspell:ignore autolabeler - poetry.lock - poetry.toml - pyproject.toml - - label: pyinstaller - files: - - runway.file.spec - - runway.folder.spec - label: python files: - '**/*.py' diff --git a/.github/scripts/README.md b/.github/scripts/README.md deleted file mode 100644 index 6404fc095..000000000 --- a/.github/scripts/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# GitHub Action Scripts - -Scripts used by GitHub action workflows. diff --git a/.github/scripts/cicd/build_pyinstaller.sh b/.github/scripts/cicd/build_pyinstaller.sh deleted file mode 100644 index b3dd85292..000000000 --- a/.github/scripts/cicd/build_pyinstaller.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env bash - -# Create pyinstaller "onefile" build - -set -ev - -if [ "$OS_NAME" == "ubuntu-latest" ]; then - LOCAL_OS_NAME="linux" -elif [ "$OS_NAME" == "macos-12" ]; then - LOCAL_OS_NAME="osx" -elif [ "$OS_NAME" == "windows-latest" ]; then - LOCAL_OS_NAME="windows" -else - echo 'Environment variable "OS_NAME" must be one of ["ubuntu-latest", "macos-12", "windows-latest"]' - exit 1 -fi - -if [ "$1" != "file" ] && [ "$1" != "folder" ]; then - echo 'First positional argument must be one of ["file", "folder"]' - exit 1 -fi - -RUNWAY_VERSION=$(poetry version --short) - -poetry build -poetry run pip install "$(find dist -type f -name 'runway-*.tar.gz' -print | tail -n 1)" -find dist/* -exec rm -rfv "{}" + -mkdir -p "artifacts/${RUNWAY_VERSION}/${LOCAL_OS_NAME}" -poetry run pip show setuptools -poetry run pyinstaller --noconfirm --clean runway.$1.spec - -if [ "$1" == 'file' ]; then - mv dist/* "artifacts/${RUNWAY_VERSION}/$LOCAL_OS_NAME" - chmod +x "artifacts/${RUNWAY_VERSION}/$LOCAL_OS_NAME/runway" - # quick functional test - ./artifacts/${RUNWAY_VERSION}/$LOCAL_OS_NAME/runway --version -else - if [ "$OS_NAME" == "windows-latest" ]; then - 7z a -ttar -so ./runway.tar ./dist/runway/* | 7z a -si "./artifacts/${RUNWAY_VERSION}/${LOCAL_OS_NAME}/runway.tar.gz" - else - chmod +x dist/runway/runway-cli - # quick functional test - ./dist/runway/runway-cli --version - tar -C dist/runway/ -czvf ."/artifacts/${RUNWAY_VERSION}/${LOCAL_OS_NAME}/runway.tar.gz" . - fi -fi diff --git a/.github/scripts/cicd/check_distance_from_tag.sh b/.github/scripts/cicd/check_distance_from_tag.sh deleted file mode 100644 index cce90fa3e..000000000 --- a/.github/scripts/cicd/check_distance_from_tag.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env bash - -# Checks for this distance from the last tag. -# If there is no distence, return a non-zero exit code. -# -# This can be used in GitHub actions with the following steps using conditionals to handle either case. -# -# Example: -# -# - id: check_distance -# continue-on-error: true -# run: bash ./check_distance_from_tag.sh -# working-directory: .github/scripts/cicd -# - if: steps.check_distance.outcome == 'failure' -# run: echo "No distance" -# - if: steps.check_distance.outcome == 'success' -# run: echo "There is distance" - -DESCRIBE=`git describe --tags --match "v*.*.*"` -echo "Result from 'git describe': ${DESCRIBE}" -DISTANCE=`echo ${DESCRIBE} | grep -P '\-(\d)*\-g(\d)*'` -if [ -n "${DISTANCE}" ]; then - echo "Distance from last tag detected: ${DISTANCE}" - echo "It is safe to proceed with a pre-production release." - exit 0 -else - echo "No distance from last tag; skipping pre-production release." - exit 1 -fi diff --git a/.github/scripts/urlshortener/Makefile b/.github/scripts/urlshortener/Makefile deleted file mode 100644 index d6b804bb6..000000000 --- a/.github/scripts/urlshortener/Makefile +++ /dev/null @@ -1,16 +0,0 @@ -install: - @poetry install - -lint: lint-flake8 lint-pylint - -lint-flake8: - @poetry run flake8 update_urls.py - -lint-pylint: - @poetry run pylint update_urls.py \ - --rcfile=./../../../pyproject.toml - -test: - @poetry run pytest ./test_update_urls.py \ - --cov update_urls \ - --cov-config ./../../../pyproject.toml diff --git a/.github/scripts/urlshortener/README.md b/.github/scripts/urlshortener/README.md deleted file mode 100644 index 936545e65..000000000 --- a/.github/scripts/urlshortener/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# urlshortener - -Scripts for the URL shortener action. - -Table of Contents - -- [urlshortener](#urlshortener) - - [Scripts](#scripts) - - [update_urls.py](#update_urlspy) - -## Scripts - -### update_urls.py - -```text -Usage: update_urls.py [OPTIONS] - - Update/add URLs to the URL shortener. - -Options: - -b, --bucket-name - Name of S3 Bucket where Runway artifact is located. [required] - --bucket-region - AWS region where the S3 Bucket is located. [required] - --latest Update the "latest" URL. [default: False] - --table Name of the DynamoDB table containing entries for the URL shortener. [required] - --version Runway version being release. [required] - --table-region AWS region where the DynamoDB table is located. [default: us-east-1] - -h, --help Show this message and exit. [default: False] -``` diff --git a/.github/scripts/urlshortener/test_update_urls.py b/.github/scripts/urlshortener/test_update_urls.py deleted file mode 100644 index d61ba85cf..000000000 --- a/.github/scripts/urlshortener/test_update_urls.py +++ /dev/null @@ -1,140 +0,0 @@ -"""Tests for update_urls.""" - -# pylint: disable=no-member -from unittest.mock import ANY, Mock, call, patch - -import boto3 -import pytest -from botocore.stub import Stubber -from click.testing import CliRunner -from mypy_boto3_dynamodb.service_resource import Table - -from update_urls import command, handler, put_item, sanitize_version - - -def test_sanitize_version(): - """Test sanitize_version.""" - assert sanitize_version(None, None, "1.0.0") == "1.0.0" - assert sanitize_version(None, None, "v1.0.0") == "1.0.0" - assert sanitize_version(None, None, "refs/tags/1.0.0") == "1.0.0" - assert sanitize_version(None, None, "refs/tags/v1.0.0") == "1.0.0" - assert sanitize_version(None, None, "refs/tags/v1.0.0-dev1") == "1.0.0-dev1" - - with pytest.raises(ValueError): - assert not sanitize_version(None, None, "refs/tags/stable") - - -def test_put_item(): - """Test put_item.""" - table_name = "test-table" - id_val = "my_id" - target = "my_target" - table: Table = boto3.resource("dynamodb").Table(table_name) - stubber = Stubber(table.meta.client) - - stubber.add_response( - "put_item", {"Attributes": {"id": {"S": id_val}, "target": {"S": target}}} - ) - - with stubber: - assert not put_item(table, id_val, target) - - -@patch("update_urls.put_item") -def test_handler(mock_put_item: Mock): - """Test handler.""" - table = Mock() - assert not handler(table, "test-bucket", "us-west-2", "1.0.0", True) - calls = [ - call( - table=table, - id_val="runway/latest/linux", - target="https://test-bucket.s3-us-west-2.amazonaws.com/" - "runway/1.0.0/linux/runway", - ), - call( - table=table, - id_val="runway/1.0.0/linux", - target="https://test-bucket.s3-us-west-2.amazonaws.com/" - "runway/1.0.0/linux/runway", - ), - call( - table=table, - id_val="runway/latest/osx", - target="https://test-bucket.s3-us-west-2.amazonaws.com/" - "runway/1.0.0/osx/runway", - ), - call( - table=table, - id_val="runway/1.0.0/osx", - target="https://test-bucket.s3-us-west-2.amazonaws.com/" - "runway/1.0.0/osx/runway", - ), - call( - table=table, - id_val="runway/latest/windows", - target="https://test-bucket.s3-us-west-2.amazonaws.com/" - "runway/1.0.0/windows/runway.exe", - ), - call( - table=table, - id_val="runway/1.0.0/windows", - target="https://test-bucket.s3-us-west-2.amazonaws.com/" - "runway/1.0.0/windows/runway.exe", - ), - ] - - assert not handler(table, "test-bucket", "us-east-1", "1.1.0", False) - calls.append( - call( - table=table, - id_val="runway/1.1.0/linux", - target="https://test-bucket.s3-us-east-1.amazonaws.com/" - "runway/1.1.0/linux/runway", - ) - ) - calls.append( - call( - table=table, - id_val="runway/1.1.0/osx", - target="https://test-bucket.s3-us-east-1.amazonaws.com/" - "runway/1.1.0/osx/runway", - ) - ) - calls.append( - call( - table=table, - id_val="runway/1.1.0/windows", - target="https://test-bucket.s3-us-east-1.amazonaws.com/" - "runway/1.1.0/windows/runway.exe", - ) - ) - - mock_put_item.assert_has_calls(calls) - - -@patch("update_urls.handler") -def test_command(mock_handler: Mock): - """Test command.""" - runner = CliRunner() - result = runner.invoke( - command, - args=[ - "--bucket-name", - "test-bucket", - "--bucket-region", - "us-west-2", - "--version", - "refs/tags/1.0.0", - "--table", - "test-table", - "--latest", - ], - env={ - "AWS_ACCESS_KEY_ID": "testing", - "AWS_SECRET_ACCESS_KEY": "testing", - "AWS_DEFAULT_REGION": "us-east-1", - }, - ) - assert result.exit_code == 0 - mock_handler.assert_called_once_with(ANY, "test-bucket", "us-west-2", "1.0.0", True) diff --git a/.github/scripts/urlshortener/update_urls.py b/.github/scripts/urlshortener/update_urls.py deleted file mode 100755 index 6db7e8b60..000000000 --- a/.github/scripts/urlshortener/update_urls.py +++ /dev/null @@ -1,159 +0,0 @@ -"""Update Runway release URLs.""" - -# pylint: disable=no-member -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Optional, Union - -import boto3 -import click -from semver import VersionInfo - -if TYPE_CHECKING: - from mypy_boto3_dynamodb.service_resource import Table - -LOGGER = logging.getLogger("update_urls") -HDLR = logging.StreamHandler() -HDLR.setFormatter(logging.Formatter(logging.BASIC_FORMAT)) - -ID_TEMPLATE = "runway/{release}/{os}" -TARGET_TEMPLATE = ( - "https://{bucket_name}.s3-{region}.amazonaws.com/runway/{version}/{os}/runway" -) - -OS_NAMES = ["linux", "osx", "windows"] - - -def sanitize_version( - _ctx: Optional[click.Context], - _param: Optional[Union[click.Option, click.Parameter]], - value: str, -) -> str: - """Sanitize a version number by stripping git tag ref and leading "v". - - To be used as the callback of a click option or parameter. - - Args: - ctx: Click context object. - param: The click option or parameter the callback is being used with. - value: Value passed to the option or parameter from the CLI. - - Returns: - str: The SemVer version number. - - """ - version = value.replace("refs/tags/", "") # strip git ref - if version.startswith("v"): # strip leading "v" - version = version[1:] - if VersionInfo.isvalid(version): # valid SemVer - return version - raise ValueError(f'version of "{version}" does not follow SemVer') - - -def put_item(table: Table, id_val: str, target: str) -> None: - """Format and put a DDB entry.""" - LOGGER.info('Adding entry for "%s"...', id_val) - table.put_item(Item={"id": id_val, "target": target}, ReturnValues="NONE") - - -def handler( - table: Table, - bucket_name: str, - bucket_region: str, - version: str, - latest: bool = False, -) -> None: - """Handle the command. - - Core logic executed by the command aside from boto3 session/resource - initializeion and logging setup. - - Args: - table: DynamoDB table resource. - bucket_name: Name of S3 Bucket where Runway artifact is located - bucket_region: AWS region where the S3 Bucket is located. - version: SemVer version being release. - latest: Update the "latest" URL. - - """ - for os_name in OS_NAMES: - target = TARGET_TEMPLATE.format( - bucket_name=bucket_name, os=os_name, region=bucket_region, version=version - ) - if os_name == "windows": - target += ".exe" - if latest: - put_item( - table=table, - id_val=ID_TEMPLATE.format(release="latest", os=os_name), - target=target, - ) - put_item( - table=table, - id_val=ID_TEMPLATE.format(release=version, os=os_name), - target=target, - ) - - -@click.command( - context_settings={ - "help_option_names": ["-h", "--help"], - "max_content_width": 999, - "show_default": True, - } -) -@click.option( - "-b", - "--bucket-name", - metavar="", - required=True, - help="Name of S3 Bucket where Runway artifact is located.", -) -@click.option( - "--bucket-region", - metavar="", - required=True, - help="AWS region where the S3 Bucket is located.", -) -@click.option("--latest", is_flag=True, help='Update the "latest" URL.') -@click.option( - "--table", - "table_name", - metavar="
", - required=True, - help="Name of the DynamoDB table containing entries for the URL " "shortener.", -) -@click.option( - "--version", - metavar="", - required=True, - callback=sanitize_version, - help="Runway version being release.", -) -@click.option( - "--table-region", - metavar="", - default="us-east-1", - help="AWS region where the DynamoDB table is located.", -) -def command( - bucket_name: str, - bucket_region: str, - latest: bool, - table_name: str, - version: str, - table_region: str = "us-east-1", -) -> None: - """Update/add URLs to the URL shortener.""" - logging.basicConfig(level=logging.INFO, handlers=[HDLR]) - logging.getLogger("botocore").setLevel(logging.ERROR) - - session = boto3.Session(region_name=table_region) - table: Table = session.resource("dynamodb").Table(table_name) - - handler(table, bucket_name, bucket_region, version, latest) - - -if __name__ == "__main__": - command() # pylint: disable=E diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index b9d593afa..a79a0a930 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -15,7 +15,7 @@ env: AWS_DEFAULT_REGION: us-east-1 AWS_MAX_ATTEMPTS: 20 # retry attempts for AWS API calls AWS_RETRY_MODE: adaptive # defaults to "legacy"; this handles more errors - NODE_VERSION: '18' + NODE_VERSION: '20' PYTEST_ADDOPTS: --color=yes RUNWAY_TEST_NAMESPACE: gh-${{ github.run_id }} PIPENV_IGNORE_VIRTUALENVS: '1' @@ -28,7 +28,8 @@ jobs: infra-test: ${{ steps.filter.outputs.infrastructure-test }} infra-test-alt: ${{ steps.filter.outputs.infrastructure-test-alt }} steps: - - uses: actions/checkout@v4 # not needed for pull_request + - name: ⤵️ Check out code from GitHub + uses: actions/checkout@v4 # not needed for pull_request if: | github.event_name == 'push' - uses: dorny/paths-filter@v3 # cspell:ignore dorny @@ -57,7 +58,7 @@ jobs: repo-head: ${{ steps.gh-context.outputs.repo-head }} # repo where change occurred repo-origin: ${{ steps.gh-context.outputs.repo-origin }} # origin of codebase steps: - - name: Output GitHub Context + - name: ℹ️ Output GitHub Context id: gh-context run: | export _REPO_ORIGIN="onicagroup/runway"; @@ -86,35 +87,28 @@ jobs: (needs.changes.outputs.infra-test == 'true' || needs.changes.outputs.infra-test-alt == 'true') runs-on: ubuntu-latest steps: - - name: Checkout Repo + - name: ⤵️ Check out code from GitHub uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - id: setup-python + - name: 🏗 Setup Python + uses: finleyfamily/action-setup-python@v1.0.0 with: - python-version: 3.9 - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Configure AWS Credentials + poetry-plugins: poetry-dynamic-versioning[plugin] + - name: 🏗 Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 with: aws-access-key-id: ${{ secrets.DEPLOY_AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.DEPLOY_AWS_SECRET_ACCESS_KEY }} aws-region: us-east-1 - - run: make deploy test + - name: 🚀 Deploy to the test environment + run: make deploy test working-directory: infrastructure - - run: make deploy test-alt + - name: 🚀 Deploy to the test-alt environment + run: make deploy test-alt working-directory: infrastructure lint-python: name: Lint Python strategy: + fail-fast: false matrix: python-version: [3.9, '3.10', '3.11'] os: [ubuntu-latest, windows-latest] @@ -124,29 +118,21 @@ jobs: AWS_ACCESS_KEY_ID: test AWS_SECRET_ACCESS_KEY: test steps: - - name: Checkout Repo + - name: ⤵️ Check out code from GitHub uses: actions/checkout@v4 - - name: Install Node + - name: 🏗 Setup Node uses: actions/setup-node@v4 with: + cache: npm node-version: ${{ env.NODE_VERSION }} - - uses: actions/setup-python@v5 - id: setup-python + - name: 🏗 Setup Python + uses: finleyfamily/action-setup-python@v1.0.0 with: + poetry-plugins: poetry-dynamic-versioning[plugin] python-version: ${{ matrix.python-version }} - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Install Node Dependencies - run: make npm-ci - - name: Run Linters + - name: ⤵️ Install Node Dependencies + run: make setup-npm + - name: 🚀 Run Linters run: make lint pre-commit: name: pre-commit @@ -155,22 +141,12 @@ jobs: matrix: python-version: ['3.10'] steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: ${{ matrix.python-version }} - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - uses: pre-commit/action@v3.0.1 + - name: ⤵️ Check out code from GitHub + uses: actions/checkout@v4 + - name: 🏗 Setup Python + uses: finleyfamily/action-setup-python@v1.0.0 + - name: 🚀 Run pre-commit + uses: pre-commit/action@v3.0.1 test-functional: name: Functional Tests needs: @@ -183,40 +159,34 @@ jobs: (needs.deploy-test-infrastructure.result == 'success' || needs.deploy-test-infrastructure.result == 'skipped') runs-on: ubuntu-latest steps: - - name: Checkout Repo + - name: ⤵️ Check out code from GitHub uses: actions/checkout@v4 - - uses: actions/setup-node@v4 + - name: 🏗 Setup Node + uses: actions/setup-node@v4 with: + cache: npm node-version: ${{ env.NODE_VERSION }} - - uses: actions/setup-python@v5 - id: setup-python + - name: 🏗 Setup Python + uses: finleyfamily/action-setup-python@v1.0.0 with: + poetry-plugins: poetry-dynamic-versioning[plugin] python-version: '3.10' - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Install Ubuntu Dependencies + - name: ⤵️ Install Ubuntu Dependencies run: | sudo apt update -y sudo apt install -y default-libmysqlclient-dev libxml2-dev libxmlsec1-dev libxmlsec1-openssl pkg-config - - name: Configure AWS Credentials + - name: 🏗 Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 with: aws-access-key-id: ${{ secrets.TEST_RUNNER_AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.TEST_RUNNER_AWS_SECRET_ACCESS_KEY }} aws-region: us-east-1 - - name: Run Tests + - name: 🚀 Run Tests run: make test-functional test-python: name: Test Python strategy: + fail-fast: false matrix: python-version: [3.9, '3.10', '3.11'] os: [ubuntu-latest, windows-latest] @@ -226,41 +196,33 @@ jobs: AWS_ACCESS_KEY_ID: test AWS_SECRET_ACCESS_KEY: test steps: - - name: Checkout Repo (complete) + - name: ⤵️ Check out code from GitHub (complete) uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Install Node + - name: 🏗 Setup Node uses: actions/setup-node@v4 with: + cache: npm node-version: ${{ env.NODE_VERSION }} - - uses: actions/setup-python@v5 - id: setup-python + - name: 🏗 Setup Python + uses: finleyfamily/action-setup-python@v1.0.0 with: + poetry-plugins: poetry-dynamic-versioning[plugin] python-version: ${{ matrix.python-version }} - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Install Node Dependencies - run: make npm-install - - name: Configure Pagefile # avoid MemoryError during tests + - name: ⤵️ Install Node Dependencies + run: make setup-npm + - name: 🏗 Configure Pagefile # avoid MemoryError during tests if: runner.os == 'Windows' uses: al-cheb/configure-pagefile-action@v1.4 # cspell:ignore cheb with: minimum-size: 16GB maximum-size: 16GB disk-root: 'C:' - - name: Run Integration & Unit Tests + - name: 🚀 Run Integration & Unit Tests # assertions assume linux so some fail when run on windows run: make test cov-xml - - name: Upload to Codecov + - name: ⤴️ Upload to Codecov uses: codecov/codecov-action@v4.5.0 with: token: ${{ secrets.CODECOV_TOKEN }} @@ -271,34 +233,25 @@ jobs: - pre-commit runs-on: ubuntu-latest steps: - - name: Checkout Repo (complete) + - name: ⤵️ Check out code from GitHub (complete) uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v5 - id: setup-python + - name: 🏗 Setup Python + uses: finleyfamily/action-setup-python@v1.0.0 with: - python-version: 3.9 + poetry-install: false + poetry-plugins: poetry-dynamic-versioning[plugin] # Remove apt repos that are known to break from time to time # See https://github.com/actions/virtual-environments/issues/323 - name: Remove broken apt repos (ubuntu) run: | for apt_file in `grep -lr microsoft /etc/apt/sources.list.d/`; do sudo rm $apt_file; done - - name: Install Dependencies (ubuntu) + - name: ⤵️ Install Dependencies (ubuntu) run: sudo apt-get update && sudo apt-get install sed -y - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Run Build + - name: 👷 Build run: make build - - name: Upload Distribution Artifact + - name: ⤴️ Upload distribution artifact uses: actions/upload-artifact@v4 with: name: pypi-dist diff --git a/.github/workflows/label-maker.yml b/.github/workflows/label-maker.yml index cef7692b5..4daf073f0 100644 --- a/.github/workflows/label-maker.yml +++ b/.github/workflows/label-maker.yml @@ -15,9 +15,9 @@ jobs: if: github.repository == 'onicagroup/runway' && github.actor != 'dependabot[bot]' runs-on: ubuntu-latest steps: - - name: Checkout + - name: ⤵️ Check out code from GitHub uses: actions/checkout@v4 - - name: Run Label Maker + - name: 🚀 Run Label Maker uses: crazy-max/ghaction-github-labeler@v5 with: github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/on-pr-target-opened.yml b/.github/workflows/on-pr-target-opened.yml deleted file mode 100644 index c135e959b..000000000 --- a/.github/workflows/on-pr-target-opened.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: PR Target (opened) - - -on: - pull_request_target: - types: - - opened - - reopened - - -jobs: - assign-author: - name: Assign Author to PR - if: github.actor != 'dependabot[bot]' - runs-on: ubuntu-latest - steps: - - uses: technote-space/assign-author@v1 # cspell:ignore technote - label-pr: - name: Label PR - runs-on: ubuntu-latest - steps: - - uses: release-drafter/release-drafter@v6.0.0 - with: - disable-releaser: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/on-pr.yml b/.github/workflows/on-pr.yml deleted file mode 100644 index b03eef6a9..000000000 --- a/.github/workflows/on-pr.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: PR - - -on: - pull_request: - - -jobs: - branch-name: - name: Enforce Branch Name - runs-on: ubuntu-latest - steps: - - uses: deepakputhraya/action-branch-name@v1.0.0 # cspell:ignore deepakputhraya - with: - regex: ([a-z])+\/([a-zA-Z0-9\-\_])+ - allowed_prefixes: bugfix,chore,depend,dependabot,docs,feat,feature,fix,hotfix,maint,maintain,maintenance,release - ignore: develop,master,release,v0_47_fixes diff --git a/.github/workflows/on-push-pyinstaller.yml b/.github/workflows/on-push-pyinstaller.yml deleted file mode 100644 index eb6f50d73..000000000 --- a/.github/workflows/on-push-pyinstaller.yml +++ /dev/null @@ -1,213 +0,0 @@ -name: Pyinstaller (on_push) - - -on: - push: - paths: - - .github/scripts/cicd/build_pyinstaller.sh - - .github/workflows/on-push-pyinstaller.yml - - runway/* - - poetry.lock - - pyproject.toml - - runway.file.spec - - runway.folder.spec - - -env: - AWS_DEFAULT_REGION: us-east-1 - - -jobs: - build-pyinstaller-onefile: - name: Pyinstaller "One File" Build - strategy: - matrix: - os: [macos-12, ubuntu-latest, windows-latest] - python-version: [3.9] - runs-on: ${{ matrix.os }} - env: - OS_NAME: ${{ matrix.os }} - # pydantic binary causes a recursion error - # https://github.com/pyinstaller/pyinstaller/issues/4406 - PIP_NO_BINARY: pydantic - steps: - - name: Checkout Repo (complete) - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: ${{ matrix.python-version }} - # Remove apt repos that are known to break from time to time - # See https://github.com/actions/virtual-environments/issues/323 - - name: Remove broken apt repos (ubuntu) - if: startsWith( matrix.os, 'ubuntu' ) - run: | - for apt_file in `grep -lr microsoft /etc/apt/sources.list.d/`; do sudo rm $apt_file; done - - name: Install Dependencies (ubuntu) - if: startsWith( matrix.os, 'ubuntu' ) - run: sudo apt-get update && sudo apt-get install sed -y - - name: Install Dependencies (windows) - if: matrix.os == 'windows-latest' - run: choco install make sed - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Run Build - run: make build-pyinstaller-file - - name: Upload Artifacts - uses: actions/upload-artifact@v4 - with: - name: pyinstaller-onefile-${{ matrix.os }} - path: artifacts - build-pyinstaller-onefolder: - name: Pyinstaller "One Folder" Build - strategy: - matrix: - os: [macos-12, ubuntu-latest, windows-latest] - python-version: [3.9] - runs-on: ${{ matrix.os }} - env: - OS_NAME: ${{ matrix.os }} - # pydantic binary causes a recursion error - # https://github.com/pyinstaller/pyinstaller/issues/4406 - PIP_NO_BINARY: pydantic - steps: - - name: Checkout Repo (complete) - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: ${{ matrix.python-version }} - # Remove apt repos that are known to break from time to time - # See https://github.com/actions/virtual-environments/issues/323 - - name: Remove broken apt repos (ubuntu) - if: startsWith( matrix.os, 'ubuntu' ) - run: | - for apt_file in `grep -lr microsoft /etc/apt/sources.list.d/`; do sudo rm $apt_file; done - - name: Install Dependencies (ubuntu) - if: startsWith( matrix.os, 'ubuntu' ) - run: sudo apt-get update && sudo apt-get install sed -y - - name: Install Dependencies (windows) - if: matrix.os == 'windows-latest' - run: choco install make sed - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Run Build - run: make build-pyinstaller-folder - - name: Upload Artifacts - uses: actions/upload-artifact@v4 - with: - name: pyinstaller-onefolder-${{ matrix.os }} - path: artifacts - build-npm: - name: Build npm 📦 - if: github.ref == 'refs/heads/master' - needs: - - build-pyinstaller-onefolder - env: - NODE_VERSION: 18 - NPM_PACKAGE_NAME: '@onica/runway' - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: [3.9] - runs-on: ${{ matrix.os }} - steps: - - name: Checkout Repo (complete) - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - id: check_distance - name: Ensure Commit Is Not Tagged - continue-on-error: true - run: bash ./check_distance_from_tag.sh - working-directory: .github/scripts/cicd - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: ${{ matrix.python-version }} - - uses: actions/setup-node@v4 - if: steps.check_distance.outcome == 'success' - with: - always-auth: true - node-version: ${{ env.NODE_VERSION }} - registry-url: https://registry.npmjs.org - scope: '@onica' - # Remove apt repos that are known to break from time to time - # See https://github.com/actions/virtual-environments/issues/323 - - name: Remove broken apt repos (ubuntu) - if: startsWith( matrix.os, 'ubuntu' ) - run: | - for apt_file in `grep -lr microsoft /etc/apt/sources.list.d/`; do sudo rm $apt_file; done - - name: Install Dependencies (ubuntu) - if: steps.check_distance.outcome == 'success' - run: sudo apt-get update && sudo apt-get install sed tree -y - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Download Artifacts (macOS) - if: steps.check_distance.outcome == 'success' - uses: actions/download-artifact@v4 - with: - name: pyinstaller-onefolder-macos-12 - path: artifacts - - name: Download Artifacts (ubuntu) - if: steps.check_distance.outcome == 'success' - uses: actions/download-artifact@v4 - with: - name: pyinstaller-onefolder-ubuntu-latest - path: artifacts - - name: Download Artifacts (windows) - if: steps.check_distance.outcome == 'success' - uses: actions/download-artifact@v4 - with: - name: pyinstaller-onefolder-windows-latest - path: artifacts - - name: List Artifacts - if: steps.check_distance.outcome == 'success' - run: tree artifacts/ - - name: npm Prep - if: steps.check_distance.outcome == 'success' - run: make npm-prep - - name: npm pack - if: steps.check_distance.outcome == 'success' - run: | - npm pack - rm -rf artifacts && mkdir -p artifacts - find . -name 'onica-runway-*.*.*.tgz' -exec mv {} artifacts/ \; - - name: Skipped Publishing - if: steps.check_distance.outcome == 'failure' - run: echo "A pre-production version was not published because the current commit is tagged for release." - - name: Upload Artifacts - if: steps.check_distance.outcome == 'success' - uses: actions/upload-artifact@v4 - with: - name: npm-pack - path: artifacts diff --git a/.github/workflows/publish-on-release.yml b/.github/workflows/publish-on-release.yml deleted file mode 100644 index b259dfeb4..000000000 --- a/.github/workflows/publish-on-release.yml +++ /dev/null @@ -1,385 +0,0 @@ -name: Publish Release - - -on: - release: - types: - - published - -env: - AWS_DEFAULT_REGION: us-east-1 - - -jobs: - build-pyinstaller-onefile: - name: Pyinstaller "One File" Build - strategy: - fail-fast: true - matrix: - os: [macos-12, ubuntu-latest, windows-latest] - python-version: [3.9] - runs-on: ${{ matrix.os }} - env: - OS_NAME: ${{ matrix.os }} - # pydantic binary causes a recursion error - # https://github.com/pyinstaller/pyinstaller/issues/4406 - PIP_NO_BINARY: pydantic - steps: - - name: Checkout Repo (complete) - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: ${{ matrix.python-version }} - # Remove apt repos that are known to break from time to time - # See https://github.com/actions/virtual-environments/issues/323 - - name: Remove broken apt repos (ubuntu) - if: startsWith( matrix.os, 'ubuntu' ) - run: | - for apt_file in `grep -lr microsoft /etc/apt/sources.list.d/`; do sudo rm $apt_file; done - - name: Install Dependencies (ubuntu) - if: startsWith( matrix.os, 'ubuntu' ) - run: sudo apt-get update && sudo apt-get install sed -y - - name: Install Dependencies (windows) - if: matrix.os == 'windows-latest' - run: choco install make sed - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Run Build - run: make build-pyinstaller-file - - name: Upload Artifacts - uses: actions/upload-artifact@v4 - with: - name: pyinstaller-onefile-${{ matrix.os }} - path: artifacts - build-pyinstaller-onefolder: - name: Pyinstaller "One Folder" Build - strategy: - fail-fast: true - matrix: - os: [macos-12, ubuntu-latest, windows-latest] - python-version: [3.9] - runs-on: ${{ matrix.os }} - env: - OS_NAME: ${{ matrix.os }} - # pydantic binary causes a recursion error - # https://github.com/pyinstaller/pyinstaller/issues/4406 - PIP_NO_BINARY: pydantic - steps: - - name: Checkout Repo (complete) - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: ${{ matrix.python-version }} - # Remove apt repos that are known to break from time to time - # See https://github.com/actions/virtual-environments/issues/323 - - name: Remove broken apt repos (ubuntu) - if: startsWith( matrix.os, 'ubuntu' ) - run: | - for apt_file in `grep -lr microsoft /etc/apt/sources.list.d/`; do sudo rm $apt_file; done - - name: Install Dependencies (ubuntu) - if: startsWith( matrix.os, 'ubuntu' ) - run: sudo apt-get update && sudo apt-get install sed -y - - name: Install Dependencies (windows) - if: matrix.os == 'windows-latest' - run: choco install make sed - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Run Build - run: make build-pyinstaller-folder - - name: Upload Artifacts - uses: actions/upload-artifact@v4 - with: - name: pyinstaller-onefolder-${{ matrix.os }} - path: artifacts - build-npm: - name: Build npm 📦 - needs: - - build-pyinstaller-onefolder - env: - NODE_VERSION: 18 - NPM_PACKAGE_NAME: '@onica/runway' - strategy: - fail-fast: true - matrix: - os: [ubuntu-latest] - python-version: [3.9] - runs-on: ${{ matrix.os }} - steps: - - name: Checkout Repo (complete) - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: ${{ matrix.python-version }} - - uses: actions/setup-node@v4 - with: - always-auth: true - node-version: ${{ env.NODE_VERSION }} - registry-url: https://registry.npmjs.org - scope: '@onica' - # Remove apt repos that are known to break from time to time - # See https://github.com/actions/virtual-environments/issues/323 - - name: Remove broken apt repos (ubuntu) - if: startsWith( matrix.os, 'ubuntu' ) - run: | - for apt_file in `grep -lr microsoft /etc/apt/sources.list.d/`; do sudo rm $apt_file; done - - name: Install Dependencies (ubuntu) - run: sudo apt-get update && sudo apt-get install sed tree -y - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Download Artifacts (macOS) - uses: actions/download-artifact@v4 - with: - name: pyinstaller-onefolder-macos-12 - path: artifacts - - name: Download Artifacts (ubuntu) - uses: actions/download-artifact@v4 - with: - name: pyinstaller-onefolder-ubuntu-latest - path: artifacts - - name: Download Artifacts (windows) - uses: actions/download-artifact@v4 - with: - name: pyinstaller-onefolder-windows-latest - path: artifacts - - name: List Artifacts - run: tree artifacts/ - - name: npm Prep - run: make npm-prep - - name: npm pack - run: | - npm pack - rm -rf artifacts && mkdir -p artifacts - find . -name 'onica-runway-*.*.*.tgz' -exec mv {} artifacts/ \; - - name: Upload Artifacts - uses: actions/upload-artifact@v4 - with: - name: npm-pack - path: artifacts - publish-npm: - name: Publish 📦 To npm - needs: - - build-npm - env: - CI: true - NODE_VERSION: 18 - NPM_PACKAGE_NAME: '@onica/runway' - NODE_AUTH_TOKEN: ${{ secrets.npm_api_token }} - strategy: - fail-fast: true - matrix: - os: [ubuntu-latest] - python-version: [3.9] - runs-on: ${{ matrix.os }} - steps: - - uses: actions/setup-node@v4 - with: - node-version: ${{ env.NODE_VERSION }} - registry-url: https://registry.npmjs.org/ - - name: Download Artifact - uses: actions/download-artifact@v4 - with: - name: npm-pack - path: artifacts - - name: Publish Distribution 📦 to npm - env: - NODE_AUTH_TOKEN: ${{ secrets.npm_api_token }} - run: | - find ./artifacts -name 'onica-runway-*.*.*.tgz' -exec npm publish --access public {} + - build-pypi: - name: Build PyPi 📦 - runs-on: ubuntu-latest - steps: - - name: Checkout Repo (complete) - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: 3.9 - # Remove apt repos that are known to break from time to time - # See https://github.com/actions/virtual-environments/issues/323 - - name: Remove broken apt repos (ubuntu) - run: | - for apt_file in `grep -lr microsoft /etc/apt/sources.list.d/`; do sudo rm $apt_file; done - - name: Install Dependencies (ubuntu) - run: sudo apt-get update && sudo apt-get install sed -y - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Run Build - run: make build - - name: Upload Distribution Artifact - uses: actions/upload-artifact@v4 - with: - name: pypi-dist - path: dist - publish-pypi: - name: Publish 📦 To PyPI - needs: - - build-pypi - runs-on: ubuntu-latest - steps: - - name: Checkout Repo (complete) - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Download Distribution Artifact - uses: actions/download-artifact@v4 - with: - name: pypi-dist - path: dist - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: 3.9 - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - run: make version - - name: Publish Distribution 📦 to PyPI - env: - POETRY_PYPI_TOKEN_PYPI: ${{ secrets.pypi_password }} - run: poetry publish - publish-s3: - name: Publish 📦 To S3 - needs: - - build-pyinstaller-onefile - env: - AWS_DEFAULT_REGION: us-west-2 - AWS_S3_BUCKET: common-runway-assets-bucket83908e77-u2xp1bj1tuhp - AWS_ACCESS_KEY_ID: ${{ secrets.aws_access_key }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.aws_secret_key }} - runs-on: ubuntu-latest - steps: - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: 3.9 - - name: Download Artifacts (macOS) - uses: actions/download-artifact@v4 - with: - name: pyinstaller-onefile-macos-12 - path: artifacts - - name: Download Artifacts (ubuntu) - uses: actions/download-artifact@v4 - with: - name: pyinstaller-onefile-ubuntu-latest - path: artifacts - - name: Download Artifacts (windows) - uses: actions/download-artifact@v4 - with: - name: pyinstaller-onefile-windows-latest - path: artifacts - - name: Install AWS CLI & Upload 📦 - run: | - pip install "awscli~=1.18.19" - aws s3 cp artifacts s3://$AWS_S3_BUCKET/runway/ --recursive --acl public-read - update-urlshortener: - name: Update URL Shortener - needs: - - publish-s3 - env: - BUCKET_NAME: common-runway-assets-bucket83908e77-u2xp1bj1tuhp - BUCKET_REGION: us-west-2 - TABLE: onica-urlshortener-prod - TABLE_REGION: us-east-1 - VERSION: ${{ github.ref }} - runs-on: ubuntu-latest - steps: - - name: Checkout Repo - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - id: setup-python - with: - python-version: 3.9 - - uses: Gr1N/setup-poetry@v9 - - uses: actions/cache@v4 - id: cache - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Ensure Cache Is Healthy - if: runner.os != 'Windows' && steps.cache.outputs.cache-hit == 'true' - run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv - - run: poetry install -vv - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.aws_access_key }} - aws-secret-access-key: ${{ secrets.aws_secret_key }} - aws-region: us-east-1 - - name: Run Script - working-directory: .github/scripts/urlshortener - run: | - poetry run python update_urls.py \ - --bucket-name $BUCKET_NAME \ - --bucket-region $BUCKET_REGION \ - --table $TABLE \ - --table-region $TABLE_REGION \ - --version $VERSION \ - --latest - notify-on-publish: - name: Notify - needs: - - publish-npm - - publish-pypi - - update-urlshortener - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Microsoft Teams Notification - uses: skitionek/notify-microsoft-teams@v1.0.8 - if: always() - with: - webhook_url: ${{ secrets.MSTEAMS_WEBHOOK }} - needs: ${{ toJson(needs) }} - job: ${{ toJson(job) }} - steps: ${{ toJson(steps) }} diff --git a/.github/workflows/pull_request_target.yml b/.github/workflows/pull_request_target.yml new file mode 100644 index 000000000..e9e3786b4 --- /dev/null +++ b/.github/workflows/pull_request_target.yml @@ -0,0 +1,36 @@ +name: pull_request_target + +on: + pull_request_target: + +jobs: + assign-author-to-pr: + name: Assign Author to PR + if: ${{ !endswith(github.actor, '[bot]') && (github.event.action == 'opened' || github.event.action == 'reopened') }} + runs-on: ubuntu-latest + steps: + - uses: technote-space/assign-author@v1 # cspell:ignore technote + enforce-branch-name: + name: Enforce Branch Name + runs-on: ubuntu-latest + steps: + - name: 💂 Enforce Branch Name + uses: finleyfamily/action-enforce-branch-name@v1.0.0 + with: + allowed_prefixes: >- + bugfix,chore,dep,depend,dependabot,deps,docs,feat,feature,fix,hotfix, + maint,maintain,maintenance,pre-commit,release,renovate,snyk + label-pr: + name: Label PR + if: ${{ github.event.action == 'opened' || github.event.action == 'reopened' || github.event.action == 'synchronize' }} + permissions: + contents: write + pull-requests: write + runs-on: ubuntu-latest + steps: + - name: 🚀 Run Release Drafter + uses: release-drafter/release-drafter@v6 + with: + disable-releaser: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release-management.yml b/.github/workflows/release-management.yml index 511f31bdb..0f36a5012 100644 --- a/.github/workflows/release-management.yml +++ b/.github/workflows/release-management.yml @@ -9,10 +9,13 @@ on: jobs: update_draft_release: name: Draft release + permissions: + contents: write runs-on: ubuntu-latest steps: # https://github.com/release-drafter/release-drafter - - uses: release-drafter/release-drafter@v6.0.0 + - name: 🚀 Run Release Drafter + uses: release-drafter/release-drafter@v6.0.0 env: # Using a PAT here will allow releases to trigger a build/release but # we're just using the actions token for the time being since we diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..39ef370d9 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,77 @@ +name: Publish Release + +on: + release: + types: + - published + +env: + AWS_DEFAULT_REGION: us-east-1 + +jobs: + build-pypi: + name: Build PyPi 📦 + runs-on: ubuntu-latest + steps: + - name: ⤵️ Check out code from GitHub (complete) + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: 🏗 Setup Python + uses: finleyfamily/action-setup-python@v1.0.0 + with: + poetry-plugins: poetry-dynamic-versioning[plugin] + # Remove apt repos that are known to break from time to time + # See https://github.com/actions/virtual-environments/issues/323 + - name: Remove broken apt repos (ubuntu) + run: | + for apt_file in `grep -lr microsoft /etc/apt/sources.list.d/`; do sudo rm $apt_file; done + - name: ⤵️ Install Dependencies (ubuntu) + run: sudo apt-get update && sudo apt-get install sed -y + - name: 👷 Build + run: make build + - name: ⤴️ Upload distribution artifact + uses: actions/upload-artifact@v4 + with: + name: pypi-dist + path: dist + publish-pypi: + name: Publish 📦 To PyPI + needs: + - build-pypi + runs-on: ubuntu-latest + steps: + - name: ⤵️ Check out code from GitHub (complete) + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: ⤵️ Download distribution artifact + uses: actions/download-artifact@v4 + with: + name: pypi-dist + path: dist + - name: 🏗 Setup Python + uses: finleyfamily/action-setup-python@v1.0.0 + with: + poetry-install: false + poetry-plugins: poetry-dynamic-versioning[plugin] + - name: 🚀 Publish Distribution 📦 to PyPI + env: + POETRY_PYPI_TOKEN_PYPI: ${{ secrets.pypi_password }} + run: poetry publish + notify-on-publish: + name: Notify + needs: + - publish-pypi + runs-on: ubuntu-latest + steps: + - name: ⤵️ Check out code from GitHub + uses: actions/checkout@v4 + - name: 🚀 Microsoft Teams Notification + uses: skitionek/notify-microsoft-teams@v1.0.8 # cspell:ignore skitionek + if: always() + with: + webhook_url: ${{ secrets.MSTEAMS_WEBHOOK }} + needs: ${{ toJson(needs) }} + job: ${{ toJson(job) }} + steps: ${{ toJson(steps) }} diff --git a/.github/workflows/spell-check.yml b/.github/workflows/spell-check.yml index 4bfbf9370..7d86c82ed 100644 --- a/.github/workflows/spell-check.yml +++ b/.github/workflows/spell-check.yml @@ -10,15 +10,20 @@ on: - master env: - NODE_VERSION: '18' + NODE_VERSION: '20' jobs: spell-check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 + - name: ⤵️ Check out code from GitHub + uses: actions/checkout@v4 + - name: 🏗 Setup Node + uses: actions/setup-node@v4 with: + cache: npm node-version: ${{ env.NODE_VERSION }} - - run: make npm-ci - - run: make spellcheck + - name: ⤵️ Install Node Dependencies + run: make setup-npm + - name: 🚀 Run spellcheck + run: make spellcheck diff --git a/.gitignore b/.gitignore index e4a47a01a..66abfa70e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,4 @@ !infrastructure/**/**/**/package.json -!runway/templates/**/.npmignore *.egg-info *.pyc *.pyo @@ -9,7 +8,6 @@ .eggs .idea .mypy_cache -.npmignore .runway .secrets .serverless diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b43487cbb..9194daa8f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,11 +1,19 @@ -minimum_pre_commit_version: 2.6.0 +default_language_version: + node: system + +exclude: | + (?x)^( + (.*/)?package-lock\.json| + (.*/)?poetry\.lock + )$ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.6.0 hooks: - id: check-json - id: check-merge-conflict + - id: check-toml - id: check-yaml args: - --unsafe # needed for parsing CFN @@ -14,56 +22,47 @@ repos: - id: file-contents-sorter files: | (?x)^( + \.dockerignore| \.gitignore| \.vscode/dictionaries/.*\.txt| - MANIFEST.in )$ - id: pretty-format-json args: [--autofix, --indent, '4'] - files: | + exclude: | (?x)^( - \.vscode/.*\.json + (.*)?(angular|cdk|package|tsconfig(\.spec)?|tslint)\.json )$ - id: pretty-format-json args: [--autofix, --indent, '2'] files: | (?x)^( - (.*)?(cdk|tsconfig|tslint).json + (.*)?(angular|cdk|package|tsconfig(\.spec)?|tslint)\.json )$ - id: trailing-whitespace + - repo: https://github.com/pappasam/toml-sort + rev: v0.23.1 + hooks: + - id: toml-sort-fix - repo: https://github.com/ITProKyle/pre-commit-hook-yamlfmt - rev: v0.2.0 + rev: v0.3.0 hooks: - id: yamlfmt args: [--mapping, '2', --offset, '2', --sequence, '4'] - files: | + exclude: | (?x)^( - \.github/(?!dependabot).*\.(yaml|yml)| - \.markdownlint.yml| - \.pre-commit-config.yaml| - \.readthedocs.yml| - buildspec.yml + tests/unit/module/staticsite/fixtures/expected_yaml/.*\.(yaml|yml)| + docs/runway-example\.yml )$ - - repo: https://github.com/timothycrosley/isort - rev: 5.12.0 - hooks: - - id: isort - - repo: https://github.com/psf/black - rev: 24.1.1 - hooks: - - id: black - args: - - --color - - repo: https://github.com/pycqa/flake8 - rev: 4.0.1 + - repo: https://github.com/executablebooks/mdformat + rev: 0.7.17 hooks: - - id: flake8 + - id: mdformat additional_dependencies: - - flake8-bugbear - - flake8-docstrings - - flake8-print==5.0.0 - - flake8-use-fstring + - mdformat-frontmatter + - mdformat-gfm + - mdformat-gfm-alerts + - mdformat-tables - repo: https://github.com/igorshubovych/markdownlint-cli - rev: v0.31.1 + rev: v0.41.0 hooks: - id: markdownlint diff --git a/.readthedocs.yml b/.readthedocs.yml index bf2eee012..a179c419f 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,37 +1,24 @@ -# .readthedocs.yml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details - -# Required version: 2 -# Set the version of Python and other tools you might need build: + os: ubuntu-24.04 + tools: + python: '3.12' jobs: post_create_environment: # Install poetry # https://python-poetry.org/docs/#installing-manually - pip install poetry + - poetry self add "poetry-dynamic-versioning[plugin]" + - poetry dynamic-versioning post_install: # Install dependencies with 'docs' dependency group # https://python-poetry.org/docs/managing-dependencies/#dependency-groups # VIRTUAL_ENV needs to be set manually for now. # See https://github.com/readthedocs/readthedocs.org/pull/11152/ - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH poetry install --with docs,types - os: ubuntu-22.04 - tools: - python: '3.12' - -# Optionally build your docs in additional formats such as PDF and ePub -formats: all - -# Optionally declare the Python requirements required to build your docs -python: - install: - - method: pip - path: . - extra_requirements: - - docs # Build documentation in the docs/ directory with Sphinx sphinx: diff --git a/.vscode/cspell.json b/.vscode/cspell.json index d06031501..92b0f2a0b 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -1,4 +1,5 @@ { + "allowCompoundWords": true, "dictionaries": [ "local", "pypi", @@ -27,13 +28,26 @@ ], "ignorePaths": [ "**/*.egg-info/**", + "**/*.gzip", "**/*.js", "**/*.pyc", + "**/*.tar*", "**/*.ts", + "**/.cache", + "**/.envrc", + "**/.git", + ".devcontainer/files/.bash_completion", + "**/.gitignore", "**/.runway/**", + "**/.secret", "**/.serverless/**", + "**/.terraform.lock.hcl", "**/.terraform/**", "**/.venv/**", + "**/.vscode/cspell.json", + "**/.vscode/extensions.json", + "**/.vscode/launch.json", + "**/.vscode/settings.json", "**/Pipfile", "**/Pipfile.lock", "**/__pycache__/**", @@ -41,8 +55,7 @@ "**/angular.json", "**/artifacts/**", "**/build/**", - "**/build/**", - "**/dist/**", + "**/cdk.json", "**/dist/**", "**/dot_gitignore", "**/node_modules/**", @@ -59,164 +72,174 @@ "maxNumberOfProblems": 100, "version": "0.2", "words": [ - "abstractmethod", "ALGS", + "EOCD", + "FQDNs", + "Fakhreddine", + "Inconsolata", + "Ngin", + "PYXMLSEC", + "Pipefile", + "SPHINXAUTOBUILD", + "SPHINXAUTOBUILDPORT", + "absolutepath", + "abstractmethod", + "accesspoint", + "addoption", + "appendleft", + "arcname", + "argparsing", + "assumerole", + "authmap", "autoattribute", + "autobuild", + "autodetected", + "autofind", + "autoloaded", + "autoscale", + "autoscaler", + "autouse", + "awslogbucket", + "backported", + "barfoo", + "blogpost", + "caplog", "certifi", + "certificatemanager", + "chunksize", + "classdir", + "classmethods", + "clienterror", "cmds", "codecov", + "configvars", + "copydir", + "datetimez", "devel", - "EOCD", - "FQDNs", + "dockerized", + "domparator", + "downstreams", + "dryrun", + "dunder", + "edgelambda", + "ekscluster", + "eksservicerole", + "errmsg", + "excinfo", + "execglobals", + "filedes", + "filedir", + "filehandle", + "fileinfo", + "fileinfos", + "fileout", + "foobarfoo", + "frontmatter", "fstring", + "furo", + "getgid", + "getpreferredencoding", "getuid", + "graphviz", "hashextra", + "hashfile", "hashicorp", + "htmlhelp", + "humanreadable", + "identless", "igittigitt", + "indentless", + "instancerole", + "intersphinx", + "invalidtestkey", + "keylist", "kwoa", + "libbz", "libltdl", "libmysqlclient", + "libncursesw", "libxmlsec", + "lintfix", + "locallocal", "ltdl", "lxml", + "managementpolicy", "markexpr", - "Ngin", - "openid", - "Pipefile", - "PYXMLSEC", - "rglob", - "runtimes", - "tomap", - "tomli", - "typeshed", - "unsubscriptable", - "xmlsec", - "intersphinx", - "viewcode", + "maxsplit", + "mdformat", + "mynamespace", + "mystack", + "nameextra", + "nameserver", + "nestedkey", + "nestedval", + "newdir", + "newfile", "nitpicky", - "htmlhelp", - "runwaydoc", - "typehints", - "templatedir", - "getpreferredencoding", - "execglobals", - "refreshable", - "nodeinstancerole", - "nodeinstanceprofile", - "autoscaler", - "thisfile", - "eksservicerole", - "ekscluster", "nodegroup", + "nodeinstanceprofile", + "nodeinstancerole", + "nodelaunchtemplate", "nodesecuritygroup", - "blogpost", - "awslogbucket", - "edgelambda", - "terraformlocktable", - "terraformstatebucket", - "managementpolicy", - "graphviz", - "classdir", - "configvars", - "nosetests", + "nonbool", "noninteractive", - "downstreams", - "appendleft", - "dockerized", - "certificatemanager", - "copydir", - "maxsplit", - "getpreferredencoding", - "absolutepath", - "getgid", - "assumerole", - "excinfo", - "caplog", - "classmethods", - "autoloaded", - "autouse", - "accesspoint", - "readacl", - "writeacl", "nonseekable", - "chunksize", - "fileinfo", - "fileinfos", - "dryrun", - "sourcebucket", - "sourcekey", - "locallocal", - "rootdir", + "nosetests", "onezone", - "backported", - "usefixtures", - "tagset", - "testtemplate", - "safehaven", - "barfoo", - "dunder", - "testval", - "testkey", - "invalidtestkey", - "subclasscheck", - "paravirtual", - "autouse", - "nonbool", - "ssword", - "teststack", - "mynamespace", - "foobarfoo", - "unittests", + "openid", "outputquery", - "clienterror", - "autofind", - "mystack", - "shouldraise", - "tempdirectory", - "nameextra", - "argparsing", - "hashfile", - "newdir", + "paravirtual", + "partitionkey", + "prehook", + "prepad", "prevdir", - "identless", - "humanreadable", + "pyupgrade", + "readacl", + "refreshable", + "rglob", + "rootdir", + "runtimes", "runwayconfig", - "instancerole", - "authmap", - "tmpdirname", - "shelloutexc", - "nestedkey", - "nestedval", - "fileout", - "lintfix", - "autoscale", + "runwaydoc", + "safehaven", + "savingsplans", + "searchpath", "shasums", - "threadsafe", - "nameserver", - "keylist", - "filehandle", - "prepad", - "newfile", - "filedir", + "shelloutexc", + "shouldraise", + "sourcebucket", + "sourcekey", + "ssmstore", + "ssword", + "subclasscheck", + "tagset", + "tempdirectory", + "templatedir", "temppath", - "prehook", - "nodelaunchtemplate", - "indentless", - "arcname", - "searchpath", - "savingsplans", - "topdown", - "partitionkey", - "timestnonce", + "terraformlocktable", + "terraformstatebucket", + "testkey", + "teststack", + "testtemplate", + "testval", + "thisfile", + "threadsafe", "timestampamp", - "filedes", - "autodetected", - "addoption", - "domparator", - "Fakhreddine", - "SPHINXAUTOBUILD", - "autobuild", - "SPHINXAUTOBUILDPORT", - "ssmstore" + "timestnonce", + "tmpdirname", + "tomap", + "tomli", + "tomlsort", + "topdown", + "troyready", + "tryceratops", + "typehints", + "typeshed", + "unittests", + "unsubscriptable", + "usefixtures", + "viewcode", + "writeacl", + "xmlsec" ] } diff --git a/.vscode/dictionaries/pypi.txt b/.vscode/dictionaries/pypi.txt index 128d093b8..cbb626db8 100644 --- a/.vscode/dictionaries/pypi.txt +++ b/.vscode/dictionaries/pypi.txt @@ -10,7 +10,6 @@ ctypes distutils dunamai gitpython -isort moto numpy pefile @@ -21,8 +20,6 @@ prettytable pydantic pydocstyle pyhcl -pyinstaller -pylint pywin pyyaml runpy diff --git a/.vscode/extensions.json b/.vscode/extensions.json index c71af0350..c003f2319 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -2,7 +2,6 @@ "recommendations": [ "DavidAnson.vscode-markdownlint", "editorconfig.editorconfig", - "littlefoxteam.vscode-python-test-adapter", "ms-python.python", "ms-python.vscode-pylance" ] diff --git a/.vscode/settings.json b/.vscode/settings.json index 0fb3e52a8..149f8b2ed 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,5 +1,10 @@ { "[python]": { + "editor.codeActionsOnSave": { + "source.fixAll.ruff": "explicit", + "source.organizeImports": "always" + }, + "editor.defaultFormatter": "ms-python.black-formatter", "editor.detectIndentation": false, "editor.formatOnSave": true, "editor.insertSpaces": true, @@ -35,17 +40,7 @@ "**/__pycache__": true }, "files.insertFinalNewline": true, - "python.analysis.typeCheckingMode": "strict", - "python.formatting.provider": "black", - "python.linting.flake8Args": [ - "--docstring-convention=all" - ], - "python.linting.flake8Enabled": true, - "python.linting.mypyEnabled": false, - "python.linting.pylintArgs": [ - "--rcfile=pyproject.toml" - ], - "python.linting.pylintEnabled": true, + "python.analysis.typeCheckingMode": "off", "python.pythonPath": "${workspaceFolder}/.venv/", "python.testing.pytestArgs": [ "tests", diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 1f34d15f2..0cc59f1d4 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -17,23 +17,23 @@ diverse, inclusive, and healthy community. Examples of behavior that contributes to a positive environment for our community include: -* Demonstrating empathy and kindness toward other people -* Being respectful of differing opinions, viewpoints, and experiences -* Giving and gracefully accepting constructive feedback -* Accepting responsibility and apologizing to those affected by our mistakes, +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience -* Focusing on what is best not just for us as individuals, but for the +- Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: -* The use of sexualized language or imagery, and sexual attention or +- The use of sexualized language or imagery, and sexual attention or advances of any kind -* Trolling, insulting or derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or email +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, without their explicit permission -* Other conduct which could reasonably be considered inappropriate in a +- Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities @@ -59,8 +59,7 @@ representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the community leaders responsible for enforcement at -opensource@onica.com. +reported to the community leaders responsible for enforcement at . All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the @@ -121,8 +120,8 @@ version 2.0, available at Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). -[homepage]: https://www.contributor-covenant.org - For answers to common questions about this code of conduct, see the FAQ at . Translations are available at . + +[homepage]: https://www.contributor-covenant.org diff --git a/Makefile b/Makefile index 0b977b583..6301b7de8 100644 --- a/Makefile +++ b/Makefile @@ -1,43 +1,30 @@ -.PHONY: build clean docs help install lint list release test version +.PHONY: build clean docs help install lint list release test SHELL := /bin/bash +ifeq ($(CI), yes) + POETRY_OPTS = "-v" + PRE_COMMIT_OPTS = --show-diff-on-failure --verbose +endif help: ## show this message - @IFS=$$'\n' ; \ - help_lines=(`fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/##/:/'`); \ - printf "%-30s %s\n" "target" "help" ; \ - printf "%-30s %s\n" "------" "----" ; \ - for help_line in $${help_lines[@]}; do \ - IFS=$$':' ; \ - help_split=($$help_line) ; \ - help_command=`echo $${help_split[0]} | sed -e 's/^ *//' -e 's/ *$$//'` ; \ - help_info=`echo $${help_split[2]} | sed -e 's/^ *//' -e 's/ *$$//'` ; \ - printf '\033[36m'; \ - printf "%-30s %s" $$help_command ; \ - printf '\033[0m'; \ - printf "%s\n" $$help_info; \ - done - - -build: clean create-tfenv-ver-file version ## build the PyPi release - poetry build - -build-pyinstaller-file: clean create-tfenv-ver-file version ## build Pyinstaller single file release (github) - bash ./.github/scripts/cicd/build_pyinstaller.sh file - -build-pyinstaller-folder: clean create-tfenv-ver-file version ## build Pyinstaller folder release (github) - bash ./.github/scripts/cicd/build_pyinstaller.sh folder + @awk \ + 'BEGIN {FS = ":.*##"; printf "\nUsage: make \033[36m\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-30s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) }' \ + $(MAKEFILE_LIST) + + +build: clean create-tfenv-ver-file ## build the PyPi release + @poetry build clean: ## remove generated file from the project directory - rm -rf build/ - rm -rf dist/ - rm -rf runway.egg-info/ - rm -rf tmp/ - rm -rf src/ - rm -rf postinstall.js preuninstall.js .coverage .npmignore - find . -name ".runway" -type d -prune -exec rm -rf '{}' + - @make -C docs clean + rm -rf ./build/ ./dist/ ./src/ ./tmp/ ./runway.egg-info/; + rm -rf ./.pytest_cache ./.venv; + find . -type d -name ".venv" -prune -exec rm -rf '{}' +; + find . -type d -name "node_modules" -prune -exec rm -rf '{}' +; + find . -type d -name ".runway" -prune -exec rm -rf '{}' +; + find . -type f -name "*.py[co]" -delete; + find . -type d -name "__pycache__" -prune -exec rm -rf '{}' +; + @$(MAKE) --no-print-directory -C docs clean; cov-report: ## display a report in the terminal of files missing coverage @poetry run coverage report \ @@ -59,81 +46,68 @@ create-tfenv-ver-file: ## create a tfenv version file using the latest version curl --silent https://releases.hashicorp.com/index.json | jq -r '.terraform.versions | to_entries | map(select(.key | contains ("-") | not)) | sort_by(.key | split(".") | map(tonumber))[-1].key' | egrep -o '^[0-9]*\.[0-9]*\.[0-9]*' > runway/templates/terraform/.terraform-version docs: ## delete current HTML docs & build fresh HTML docs - @make -C docs docs + @$(MAKE) --no-print-directory -C docs docs docs-changes: ## build HTML docs; only builds changes detected by Sphinx - @make -C docs html + @$(MAKE) --no-print-directory -C docs html + +fix: fix-ruff fix-black run-pre-commit ## run all automatic fixes fix-black: ## automatically fix all black errors @poetry run black . -fix-isort: ## automatically fix all isort errors - @poetry run isort . +fix-imports: ## automatically fix all import sorting errors + @poetry run ruff check . --fix-only --fixable I001 + +fix-ruff: ## automatically fix everything ruff can fix (implies fix-imports) + @poetry run ruff check . --fix-only -lint: lint-isort lint-black lint-pyright lint-flake8 lint-pylint ## run all linters +fix-ruff-tests: + @poetry run ruff check ./tests --fix-only --unsafe-fixes + +lint: lint-black lint-ruff lint-pyright ## run all linters lint-black: ## run black @echo "Running black... If this fails, run 'make fix-black' to resolve." @poetry run black . --check --color --diff @echo "" -lint-flake8: ## run flake8 - @echo "Running flake8..." - @poetry run flake8 --config=setup.cfg - @echo "" - -lint-isort: ## run isort - @echo "Running isort... If this fails, run 'make fix-isort' to resolve." - @poetry run isort . --check-only - @echo "" - -lint-pylint: ## run pylint - @echo "Running pylint..." - @poetry run pylint runway tests --rcfile=pyproject.toml - @echo "" - lint-pyright: ## run pyright @echo "Running pyright..." - @npm run-script py-type-check + @npm exec --no -- pyright --venvpath ./ @echo "" -npm-ci: ## run "npm ci" with the option to ignore scripts - required to succeed for this project - @npm ci --ignore-scripts - -npm-install: ## run "npm install" with the option to ignore scripts - required to succeed for this project - @npm install --ignore-scripts - -# copies artifacts to src & npm package files to the root of the repo -npm-prep: version ## process that needs to be run before creating an npm package - mkdir -p tmp - mkdir -p src - cp -r artifacts/$$(poetry version --short)/* src/ - cp npm/* . && cp npm/.[^.]* . - cp package.json tmp/package.json - jq ".name = \"$${NPM_PACKAGE_NAME-undefined}\"" tmp/package.json > package.json - rm -rf tmp/package.json +lint-ruff: ## run ruff + @echo "Running ruff... If this fails, run 'make fix-ruff' to resolve some error automatically, other require manual action." + @poetry run ruff check . + @echo "" open-docs: ## open docs (HTML files must already exists) @make -C docs open run-pre-commit: ## run pre-commit for all files - @poetry run pre-commit run -a + @poetry run pre-commit run $(PRE_COMMIT_OPTS) \ + --all-files \ + --color always setup: setup-poetry setup-pre-commit setup-npm ## setup development environment -setup-npm: npm-ci ## install node dependencies with npm +setup-npm: ## install node dependencies with npm + @npm ci setup-poetry: ## setup python virtual environment - @poetry install --sync + @poetry install $(POETRY_OPTS) --sync setup-pre-commit: ## install pre-commit git hooks @poetry run pre-commit install spellcheck: ## run cspell @echo "Running cSpell to checking spelling..." - @npx cspell "**/*" \ + @npm exec --no -- cspell lint . \ --color \ --config .vscode/cspell.json \ + --dot \ + --gitignore \ --must-find-files \ --no-progress \ --relative \ @@ -183,9 +157,3 @@ test-unit: ## run unit tests only --cov=runway \ --cov-config=tests/unit/.coveragerc \ --cov-report term-missing:skip-covered - -version: ## set project version using distance from last tag - @VERSION=$$(poetry run dunamai from git --style semver --no-metadata) && \ - echo setting version to $${VERSION}... && \ - poetry version $${VERSION} && \ - npm version $${VERSION} --allow-same-version --no-git-tag-version diff --git a/README.md b/README.md index bc0223f7e..87a8b0ee9 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,6 @@ [![CI/CD](https://github.com/onicagroup/runway/workflows/CI/CD/badge.svg?branch=master)](https://github.com/onicagroup/runway/actions?query=workflow%3ACI%2FCD) [![codecov](https://codecov.io/gh/onicagroup/runway/branch/master/graph/badge.svg?token=Ku28I0RY80)](https://codecov.io/gh/onicagroup/runway) [![PyPi](https://img.shields.io/pypi/v/runway?style=flat)](https://pypi.org/project/runway/) -[![npm](https://img.shields.io/npm/v/@onica/runway?style=flat)](https://www.npmjs.com/package/@onica/runway) [![code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat)](https://github.com/psf/black) ![runway-example.gif](https://raw.githubusercontent.com/onicagroup/runway/master/docs/source/images/runway-example.gif) @@ -12,25 +11,23 @@ Runway is a lightweight integration app designed to ease management of infrastru Its main goals are to encourage GitOps best-practices, avoid convoluted Makefiles/scripts (enabling identical deployments from a workstation or CI job), and enable developers/admins to use the best tool for any given job. - ## Features -* Centralized environment-specific configuration -* Automatic environment identification from git branches -* Automatic linting/verification -* Support of IAM roles to assume for each deployment -* Terraform backend/workspace config management w/per-environment tfvars -* Automatic kubectl/terraform version management per-environment +- Centralized environment-specific configuration +- Automatic environment identification from git branches +- Automatic linting/verification +- Support of IAM roles to assume for each deployment +- Terraform backend/workspace config management w/per-environment tfvars +- Automatic kubectl/terraform version management per-environment ### Supported Deployment Tools -* AWS CDK -* Kubectl -* Serverless Framework -* CFNgin (CloudFormation) -* Static websites (build & deploy to S3+CloudFront) -* Terraform - +- AWS CDK +- Kubectl +- Serverless Framework +- CFNgin (CloudFormation) +- Static websites (build & deploy to S3+CloudFront) +- Terraform ## Example @@ -51,42 +48,8 @@ deployments: The example above contains enough information for Runway to deploy all resources, lambda functions and a static website backed by S3 and Cloudfront in either dev or prod environments - ## Install -Runway is available via any of the following installation methods. Use whatever works best for your project/team (it's the same application no matter how you obtain it). - -### HTTPS Download (e.g cURL) - -Use one of the endpoints below to download a single-binary executable version of Runway based on your operating system. - -| Operating System | Endpoint | -|------------------|----------------------------------------| -| Linux | | -| macOS | | -| Windows | | - -```shell -$ curl -L oni.ca/runway/latest/osx -o runway -$ chmod +x runway -$ ./runway new -``` - -**Suggested use:** CloudFormation or Terraform projects - - -### npm - -```shell -$ npm i -D @onica/runway -$ npx runway new -``` - -**Suggested use:** Serverless or AWS CDK projects - - -### pip (or poetry, etc) - ```shell $ pip install runway $ runway new @@ -95,11 +58,8 @@ $ poetry add --dev runway $ poetry run runway new ``` -**Suggested use:** Python projects - - ## Documentation -See the [doc site](https://docs.onica.com/projects/runway) for full documentation. +See the [doc site](https://runway.readthedocs.io) for full documentation. -Quickstart documentation, including CloudFormation templates and walkthrough can be found [here](https://docs.onica.com/projects/runway/page/quickstart/index.html) +Quickstart documentation, including CloudFormation templates and walkthrough can be found [here](https://runway.readthedocs.io/page/quickstart/index.html) diff --git a/docs/README.md b/docs/README.md index 91be77776..c5edb8201 100644 --- a/docs/README.md +++ b/docs/README.md @@ -14,7 +14,7 @@ To record or render a new gif, terminalizer must be installed (globally is fine) ### Caveats -- node <= 10 is required due to dependency requirements (`nvm install 10` or `nvm use 10`) +- node \<= 10 is required due to dependency requirements (`nvm install 10` or `nvm use 10`) - `terminalizer@0.6.1` must be used (`npm i -g terminalizer@0.6.1`) - 0.7 changed the resolution of the GIF which increases the size 3x @@ -41,4 +41,4 @@ To render a new copy of the gif, just run `terminalizer render runway-example.ym This will take some time to complete. We need to reduce the size of the rendered GIF so it can be served from GitHub to be viewable on PyPi. -To do this, the GIF must be compressed ([GIF Compressor](https://gifcompressor.com/) was used) to achieve the <5MB size required (GitHub restriction). +To do this, the GIF must be compressed ([GIF Compressor](https://gifcompressor.com/) was used) to achieve the \<5MB size required (GitHub restriction). diff --git a/docs/source/_static/css/custom.css b/docs/source/_static/css/custom.css deleted file mode 100644 index 5a2987518..000000000 --- a/docs/source/_static/css/custom.css +++ /dev/null @@ -1,8 +0,0 @@ -/* Change code blocks font and bump up font size slightly (normally 12px)*/ -.rst-content code.literal, -.rst-content pre.literal-block, -.rst-content div[class^="highlight"] pre, -.rst-content .linenodiv pre { - font-family: 'Inconsolata', monospace !important; - font-size: 14px !important; -} diff --git a/docs/source/_static/scripts/custom.js b/docs/source/_static/scripts/custom.js new file mode 100644 index 000000000..ae7a68475 --- /dev/null +++ b/docs/source/_static/scripts/custom.js @@ -0,0 +1,4 @@ +// open external links in new tabs +$(document).ready(function () { + $('a.external').attr('target', '_blank'); +}); diff --git a/docs/source/cdk/advanced_features.rst b/docs/source/cdk/advanced_features.rst index 1e60a9152..446f68227 100644 --- a/docs/source/cdk/advanced_features.rst +++ b/docs/source/cdk/advanced_features.rst @@ -7,9 +7,6 @@ Advanced Features Advanced features and detailed information for using CDK with Runway. -.. contents:: - :depth: 4 - .. _cdk.Build Steps: diff --git a/docs/source/cdk/configuration.rst b/docs/source/cdk/configuration.rst index fbea75639..b243dc154 100644 --- a/docs/source/cdk/configuration.rst +++ b/docs/source/cdk/configuration.rst @@ -6,8 +6,6 @@ Configuration Standard `CDK `__ rules apply but, we have some added prerequisites, recommendations, and caveats. -.. contents:: - :depth: 4 ************* @@ -84,7 +82,7 @@ Options .. _cdk.build_steps: .. data:: build_steps - :type: Optional[List[str]] + :type: list[str] | None :value: None :noindex: diff --git a/docs/source/cdk/directory_structure.rst b/docs/source/cdk/directory_structure.rst index 792b8aa06..743dd85a2 100644 --- a/docs/source/cdk/directory_structure.rst +++ b/docs/source/cdk/directory_structure.rst @@ -7,9 +7,6 @@ Directory Structure Example directory structures for a CDK module. -.. contents:: - :depth: 4 - ********** C# Example diff --git a/docs/source/cfngin/blueprints.rst b/docs/source/cfngin/blueprints.rst index 1e6b480df..ca31475e3 100644 --- a/docs/source/cfngin/blueprints.rst +++ b/docs/source/cfngin/blueprints.rst @@ -9,7 +9,7 @@ Blueprints ########## A |Blueprint| is a python classes that dynamically builds CloudFormation templates. -Where you would specify a raw Cloudformation template in a |stack| using the |template_path| key, you instead specify a |Blueprint| subclass using the |class_path| key. +Where you would specify a raw Cloudformation template in a |Stack| using the |template_path| key, you instead specify a |Blueprint| subclass using the |class_path| key. Traditionally Blueprints are built using troposphere_, but that is not absolutely necessary. @@ -28,9 +28,6 @@ In the end, all that is required is that the |Blueprint| is a subclass of :class """ -.. contents:: - :depth: 4 - ********* Variables @@ -81,7 +78,7 @@ TroposphereType =============== The :class:`~runway.cfngin.blueprints.variables.types.TroposphereType` can be used to generate resources for use in the :class:`~runway.cfngin.blueprints.base.Blueprint` directly from user-specified configuration. -Which of the below case applies depends on what ``defined_type`` was chosen, and how it would be normally used in the :ref:`Blueprint ` (and CloudFormation in general). +Which of the below case applies depends on what ``defined_type`` was chosen, and how it would be normally used in the :term:`Blueprint` (and :link:`CloudFormation` in general). Resource Types -------------- @@ -306,7 +303,7 @@ To use this in your |Blueprint|, you can get the name from context using ``self. Referencing the Stack short name ================================ -The |Stack| short name is the name you specified for the |stack| within your YAML config. +The |Stack| short name is the name you specified for the |Stack| within your YAML config. It does not include the |namespace|. If your CFNgin namespace is ``CFNginIsCool`` and the stack's short name is ``myAwesomeEC2Instance``, the short name would be ``myAwesomeEC2Instance``. diff --git a/docs/source/cfngin/configuration.rst b/docs/source/cfngin/configuration.rst index 0c1791a3d..bccfe99f1 100644 --- a/docs/source/cfngin/configuration.rst +++ b/docs/source/cfngin/configuration.rst @@ -4,7 +4,7 @@ Configuration ############# -In addition to the :ref:`Runway Config File `, there are two files that can be used for configuration: +In addition to the :ref:`runway_config:Runway Config File`, there are two files that can be used for configuration: - a YAML :ref:`configuration file ` **[REQUIRED]** - a key/value :ref:`environment file ` @@ -15,9 +15,6 @@ In addition to the :ref:`Runway Config File `, there are two file It has been replaced with an internal CloudFormation engin (CFNgin). -.. contents:: - :depth: 4 - ********** runway.yml @@ -48,10 +45,10 @@ CloudFormation modules do not have any module-specific options. Parameters ========== -Runway can pass :ref:`Parameters ` to a CloudFormation module in place of or in addition to values in an :ref:`environment file `. -When :ref:`Parameters ` are passed to the module, the data type is retained (e.g. ``array``, ``boolean``, ``mapping``). +Runway can pass :term:`Parameters` to a CloudFormation module in place of or in addition to values in an :ref:`environment file `. +When :term:`Parameters` are passed to the module, the data type is retained (e.g. ``array``, ``boolean``, ``mapping``). -A typical usage pattern would be to use :ref:`Runway Lookups ` in combination with :ref:`Parameters ` to pass :ref:`deploy environment ` and/or region specific values to the module from the :ref:`Runway Config File `. +A typical usage pattern would be to use :ref:`Runway Lookups ` in combination with :term:`Parameters` to pass :term:`Deploy Environment` and/or region specific values to the module from the :ref:`runway_config:Runway Config File`. .. rubric:: Example .. code-block:: yaml @@ -68,7 +65,7 @@ A typical usage pattern would be to use :ref:`Runway Lookups ` in combi Common Parameters ----------------- -Runway automatically makes the following commonly used :ref:`Parameters ` available to CloudFormation modules. +Runway automatically makes the following commonly used :term:`Parameters` available to CloudFormation modules. .. note:: If these parameters are already being explicitly defined in :attr:`deployment.parameters`/:attr:`module.parameters` the value provided will be used instead of what would be automatically added. @@ -77,7 +74,7 @@ Runway automatically makes the following commonly used :ref:`Parameters `. + Taken from the ``DEPLOY_ENVIRONMENT`` environment variable. This will the be current :term:`Deploy Environment`. .. data:: region :type: str @@ -109,7 +106,7 @@ Top-Level Fields .. _cfngin-bucket: .. attribute:: cfngin_bucket - :type: Optional[str] + :type: str | None :value: None By default, CloudFormation templates are pushed into an S3 bucket and CloudFormation is pointed to the template in that bucket when launching or updating stacks. @@ -161,7 +158,7 @@ Top-Level Fields The format of the default value is now ``cfngin-${namespace}-${region}``. .. attribute:: cfngin_bucket_region - :type: Optional[str] + :type: str | None :value: None AWS Region where :attr:`~cfngin.config.cfngin_bucket` is located. @@ -175,7 +172,7 @@ Top-Level Fields cfngin_bucket_region: us-east-1 .. attribute:: cfngin_cache_dir - :type: Optional[str] + :type: str | None :value: ./.runway/ Path to a local directory that CFNgin will use for local caching. @@ -190,7 +187,7 @@ Top-Level Fields cfngin_cache_dir: ./.runway .. attribute:: log_formats - :type: Optional[Dict[str, str]] + :type: dict[str, str] :value: {} Customize log message formatting by log level. @@ -205,7 +202,7 @@ Top-Level Fields debug: "[%(asctime)s] %(levelname)s %(threadName)s %(name)s:%(lineno)d(%(funcName)s): %(message)s" .. attribute:: lookups - :type: Optional[Dict[str, str]] + :type: dict[str, str] :value: {} Lookups allow you to create custom methods which take a value and are resolved at runtime time. @@ -227,7 +224,7 @@ Top-Level Fields conf_value: ${custom query} .. attribute:: mappings - :type: Optional[Dict[str, Dict[str, Dict[str, Any]]]] + :type: dict[str, dict[str, dict[str, Any]]] :value: {} Mappings are dictionaries that are provided as `Mappings `__ to each CloudFormation stack that CFNgin produces. @@ -259,7 +256,7 @@ Top-Level Fields In addition, this value can be used to create an S3 bucket that will be used to upload and store all CloudFormation templates. See :attr:`~cfngin.config.cfngin_bucket` for more detailed information. - In general, this is paired with the concept of :ref:`deploy environments ` to create a namespace per environment. + In general, this is paired with the concept of :term:`Deploy Environments ` to create a namespace per environment. .. rubric:: Example .. code-block:: yaml @@ -267,7 +264,7 @@ Top-Level Fields namespace: ${namespace}-${environment} .. attribute:: namespace_delimiter - :type: Optional[str] + :type: str | None :value: "-" By default, ``-`` will be used as a delimiter between the :attr:`~cfngin.config.namespace` and the declared stack name to deploy the actual CloudFormation stack name that gets created. @@ -282,7 +279,7 @@ Top-Level Fields namespace_delimiter: "" .. attribute:: package_sources - :type: Optional[cfngin.package_sources] + :type: cfngin.package_sources :value: {} See :ref:`Remote Sources ` for detailed information. @@ -299,7 +296,7 @@ Top-Level Fields ... .. attribute:: persistent_graph_key - :type: Optional[str] + :type: str | None :value: None Used to track the *state* of stacks defined in configuration file. @@ -313,10 +310,10 @@ Top-Level Fields persistent_graph_key: unique-key.json .. attribute:: post_deploy - :type: Optional[List[cfngin.hook]] + :type: list[cfngin.hook] :value: [] - Python functions/methods that are executed after processing the stacks in the config while using the :ref:`deploy command `. + Python functions/methods that are executed after processing the stacks in the config while using the :ref:`commands:deploy` command. See :ref:`Hooks ` for more detailed information. @@ -333,10 +330,10 @@ Top-Level Fields The CFNgin bucket is now created using a CloudFormation stack. .. attribute:: post_destroy - :type: Optional[List[cfngin.hook]] + :type: list[cfngin.hook] :value: [] - Python functions/methods that are executed after processing the stacks in the config while using the :ref:`destroy command `. + Python functions/methods that are executed after processing the stacks in the config while using the :ref:`commands:destroy` command. See :ref:`Hooks ` for more detailed information. @@ -347,10 +344,10 @@ Top-Level Fields - path: do.something .. attribute:: pre_deploy - :type: Optional[List[cfngin.hook]] + :type: list[cfngin.hook] :value: [] - Python functions/methods that are executed before processing the stacks in the config while using the :ref:`deploy command `. + Python functions/methods that are executed before processing the stacks in the config while using the :ref:`commands:deploy` command. See :ref:`Hooks ` for more detailed information. @@ -364,10 +361,10 @@ Top-Level Fields *pre_build* renamed to *pre_deploy*. .. attribute:: pre_destroy - :type: Optional[List[cfngin.hook]] + :type: list[cfngin.hook] :value: [] - Python functions/methods that are executed before processing the stacks in the config while using the :ref:`destroy command `. + Python functions/methods that are executed before processing the stacks in the config while using the :ref:`commands:destroy` command. See :ref:`Hooks ` for more detailed information. @@ -378,7 +375,7 @@ Top-Level Fields - path: do.something .. attribute:: service_role - :type: Optional[str] + :type: str | None :value: None By default CFNgin doesn't specify a service role when executing changes to CloudFormation stacks. @@ -394,7 +391,7 @@ Top-Level Fields service_role: arn:aws:iam::123456789012:role/name .. attribute:: stacks - :type: Optional[List[cfngin.stack]] + :type: list[cfngin.stack] :Value: [] This is the core part of the config where the CloudFormations stacks that will be deployed in the environment are defined. @@ -402,7 +399,7 @@ Top-Level Fields See Stack_ for more information. .. attribute:: sys_path - :type: Optional[str] + :type: str | None :value: None A path to be added to ``$PATH`` while processing the configuration file. @@ -416,7 +413,7 @@ Top-Level Fields sys_path: ./ # most common value to use .. attribute:: tags - :type: Optional[Dict[str, str]] + :type: dict[str, str] :value: {"cfngin_namespace": namespace} A dictionary of tags to add to all stacks. @@ -441,7 +438,7 @@ Top-Level Fields tags: {} .. attribute:: template_indent - :type: Optional[int] + :type: int | None :value: 4 Number of spaces per indentation level to use when rendering/outputting CloudFormation templates. @@ -489,7 +486,7 @@ Stack CidrBlock: 10.128.0.0/16 .. attribute:: class_path - :type: Optional[str] + :type: str | None :value: None A python importable path to the |Blueprint| class to be used. @@ -504,7 +501,7 @@ Stack class_path: example.BlueprintClass .. attribute:: description - :type: Optional[str] + :type: str | None :value: None A short description to apply to the stack. @@ -519,7 +516,7 @@ Stack description: An Example Stack .. attribute:: enabled - :type: Optional[bool] + :type: bool :value: True Whether to deploy/update the stack. @@ -537,7 +534,7 @@ Stack enabled: ${enable_another_stack} .. attribute:: in_progress_behavior - :type: Optional[Literal["wait"]] + :type: Literal["wait"] | None :value: None Specifies the behavior for when a stack is in ``CREATE_IN_PROGRESS`` or ``UPDATE_IN_PROGRESS``. @@ -553,7 +550,7 @@ Stack in_progress_behavior: wait .. attribute:: locked - :type: Optional[bool] + :type: bool :value: False Whether the stack should be updated after initial deployment. @@ -585,7 +582,7 @@ Stack - name: example-stack .. attribute:: protected - :type: Optional[bool] + :type: bool :value: False Whether to force all updates to be performed interactively. @@ -602,7 +599,7 @@ Stack protected: ${protected_another_stack} .. attribute:: required_by - :type: Optional[List[str]] + :type: list[str] :value: [] A list of other stacks that require this stack. @@ -621,7 +618,7 @@ Stack ... .. attribute:: requires - :type: Optional[List[str]] + :type: list[str] :value: [] A list of other stacks that this stack requires. @@ -640,7 +637,7 @@ Stack ... .. attribute:: stack_name - :type: Optional[str] + :type: str | None :value: None The name used when creating the CloudFormation stack. @@ -656,7 +653,7 @@ Stack stack_name: another-name .. attribute:: stack_policy_path - :type: Optional[str] + :type: str | None :value: None Path to a JSON formatted stack policy that will be applied when the CloudFormation stack is created and/or updated. @@ -671,7 +668,7 @@ Stack stack_policy_path: ./stack_policies/example-stack.json .. attribute:: tags - :type: Optional[Dict[str, str]] + :type: dict[str, str] :value: {} A dictionary of tags to add to the Stack. @@ -691,7 +688,7 @@ Stack example: value .. attribute:: template_path - :type: Optional[str] + :type: str | None Path to a raw CloudFormation template (JSON or YAML). Can be relative to the working directory (e.g. templates stored alongside the configuration file), or relative to a directory in the *$PATH* (i.e. for loading templates retrieved via :attr:`~cfngin.config.package_sources`). @@ -708,7 +705,7 @@ Stack template_path: remote/path/templates/another-stack.json .. attribute:: termination_protection - :type: Optional[bool] + :type: bool :value: False Whether the stack will be protected from termination by CloudFormation. @@ -728,7 +725,7 @@ Stack termination_protection: ${termination_protection_another_stack} .. attribute:: timeout - :type: Optional[int] + :type: int | None :value: None Specifies the amount of time, in minutes, that CloudFormation should allot before timing out stack creation operations. @@ -746,7 +743,7 @@ Stack timeout: 120 .. attribute:: variables - :type: Optional[Dict[str, Any]] + :type: dict[str, Any] :value: {} A dictionary of Variables_ to pass to the |Blueprint| when rendering the CloudFormation template. @@ -816,8 +813,8 @@ Using Outputs as Variables --------------------------- Since CFNgin encourages the breaking up of your CloudFormation stacks into entirely separate stacks, sometimes you'll need to pass values from one stack to another. -The way this is handled in CFNgin is by having one stack provide :ref:`Outputs ` for all the values that another stack may need, and then using those as the inputs for another stack's :attr:`~cfngin.stack.variables`. -CFNgin makes this easier for you by providing a syntax for :attr:`~cfngin.stack.variables` that will cause CFNgin to automatically look up the values of :ref:`Outputs ` from another stack in its config. +The way this is handled in CFNgin is by having one stack provide :term:`Outputs ` for all the values that another stack may need, and then using those as the inputs for another stack's :attr:`~cfngin.stack.variables`. +CFNgin makes this easier for you by providing a syntax for :attr:`~cfngin.stack.variables` that will cause CFNgin to automatically look up the values of :term:`Outputs ` from another stack in its config. To do so, use the :ref:`output lookup` in the :attr:`~cfngin.stack.variables` on the target stack. @@ -876,7 +873,7 @@ The files must also be stored at the root of the module's directory. The region can optionally be omitted to apply a single file to all regions. Files following both naming schemes may be used. The file with the most specific name takes precedence. -Values passed in as ``parameters`` from the :ref:`Runway Config File ` take precedence over those provided in an environment file. +Values passed in as ``parameters`` from the :ref:`runway_config:Runway Config File` take precedence over those provided in an environment file. Usage @@ -886,16 +883,16 @@ A pretty common use case is to have separate environments that you want to look For example, you might want a **production** and a **staging** environment. The production environment likely needs more instances, and often those instances will be of a larger instance type. -The parameters defined in an environment file, :attr:`deployment.parameters`, and/or :attr:`module.parameters` allow you to use your existing CFNgin config, but provide different values based on the current :ref:`deploy environment `. +The parameters defined in an environment file, :attr:`deployment.parameters`, and/or :attr:`module.parameters` allow you to use your existing CFNgin config, but provide different values based on the current :term:`Deploy Environment`. .. rubric:: Example .. code-block:: yaml vpcID: vpc-12345678 -Provided the key-value pair above, you will now be able to use this in your configs for a :ref:`deploy environment `. +Provided the key-value pair above, you will now be able to use this in your configs for a :term:`Deploy Environment`. They act as keys that can be used in your config file, providing a sort of templating ability. -This allows you to change the values of your config based on the current :ref:`deploy environment `. +This allows you to change the values of your config based on the current :term:`Deploy Environment`. For example, if you have a **webserver** stack, and you need to provide it a variable for the instance size it should use, you would have something like this in your config file. diff --git a/docs/source/cfngin/directory_structure.rst b/docs/source/cfngin/directory_structure.rst index 32acb0428..640c20910 100644 --- a/docs/source/cfngin/directory_structure.rst +++ b/docs/source/cfngin/directory_structure.rst @@ -6,8 +6,6 @@ Directory Structure Example directory structures for a CloudFormation module. -.. contents:: - :depth: 4 ********** diff --git a/docs/source/cfngin/hooks/acm.Certificate.rst b/docs/source/cfngin/hooks/acm.Certificate.rst index 5d4b3262c..67cc463a6 100644 --- a/docs/source/cfngin/hooks/acm.Certificate.rst +++ b/docs/source/cfngin/hooks/acm.Certificate.rst @@ -38,7 +38,7 @@ Args **** .. data:: alt_names - :type: Optional[List[str]] + :type: list[str] :value: [] :noindex: @@ -62,7 +62,7 @@ Args This must exist in the same account that the certificate will be created in. .. data:: stack_name - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -71,7 +71,7 @@ Args If the is provided in a deploy stage, its needs to be provided in the matching destroy stage. .. data:: ttl - :type: Optional[int] + :type: int | None :value: None :noindex: diff --git a/docs/source/cfngin/hooks/aws_lambda.upload_lambda_functions.rst b/docs/source/cfngin/hooks/aws_lambda.upload_lambda_functions.rst index c7aa99958..08835a245 100644 --- a/docs/source/cfngin/hooks/aws_lambda.upload_lambda_functions.rst +++ b/docs/source/cfngin/hooks/aws_lambda.upload_lambda_functions.rst @@ -37,14 +37,14 @@ Args **** .. data:: bucket - :type: Optional[str] + :type: str | None :value: None :noindex: Custom bucket to upload functions to. If not provided, |cfngin_bucket| will be used. .. data:: bucket_region - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -52,14 +52,14 @@ Args If not provided, :attr:`~cfngin.config.cfngin_bucket_region` will be used. .. data:: prefix - :type: Optional[str] + :type: str | None :value: None :noindex: S3 key prefix to prepend to the uploaded zip name. .. data:: follow_symlinks - :type: Optional[bool] + :type: bool :value: False :noindex: @@ -73,7 +73,7 @@ Args The canned S3 object ACL to be applied to the uploaded payload. .. data:: functions - :type: Dict[str, Any] + :type: dict[str, Any] :noindex: Configurations of desired payloads to build. @@ -81,7 +81,7 @@ Args Each value should itself be a dictionary, with the following data: .. data:: docker_file - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -89,7 +89,7 @@ Args Must provide exactly one of ``docker_file``, ``docker_image``, or ``runtime``. .. data:: docker_image - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -97,7 +97,7 @@ Args Must provide exactly one of ``docker_file``, ``docker_image``, or ``runtime``. .. data:: dockerize_pip - :type: Optional[Union[bool, Literal["non-linux"]]] + :type: bool | Literal["non-linux"] | None :value: None :noindex: @@ -106,7 +106,7 @@ Args To use this option Docker must be installed. .. data:: exclude - :type: Optional[Union[List[str], str]] + :type: list[str] | str :value: None :noindex: @@ -116,7 +116,7 @@ Args Commonly ignored files are already excluded by default, such as ``.git``, ``.svn``, ``__pycache__``, ``*.pyc``, ``.gitignore``, etc. .. data:: include - :type: Optional[List[str], str] + :type: list[str] | str | None :value: None :noindex: @@ -139,7 +139,7 @@ Args So, for example, all the files contained directly under this directory will be added to the root of the ZIP file. .. data:: python_path - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -147,7 +147,7 @@ Args If not provided, the current python interpreter will be used for ``pip`` and ``pipenv`` will be used from the current ``$PATH``. .. data:: runtime - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -156,7 +156,7 @@ Args Must provide exactly one of ``docker_file``, ``docker_image``, or ``runtime``. .. data:: use_pipenv - :type: Optional[bool] + :type: bool | None :value: False :noindex: diff --git a/docs/source/cfngin/hooks/awslambda.PythonFunction.rst b/docs/source/cfngin/hooks/awslambda.PythonFunction.rst index 28beb18d2..700a59c81 100644 --- a/docs/source/cfngin/hooks/awslambda.PythonFunction.rst +++ b/docs/source/cfngin/hooks/awslambda.PythonFunction.rst @@ -24,10 +24,6 @@ It also ensures that binary files built during the install process are compatibl .. versionadded:: 2.5.0 -.. contents:: Table of Contents - :local: - - **** Args diff --git a/docs/source/cfngin/hooks/awslambda.PythonLayer.rst b/docs/source/cfngin/hooks/awslambda.PythonLayer.rst index 2f4ad99ea..b3ab86e31 100644 --- a/docs/source/cfngin/hooks/awslambda.PythonLayer.rst +++ b/docs/source/cfngin/hooks/awslambda.PythonLayer.rst @@ -22,10 +22,6 @@ It also ensures that binary files built during the install process are compatibl .. versionadded:: 2.5.0 -.. contents:: Table of Contents - :local: - - **** Args diff --git a/docs/source/cfngin/hooks/command.run_command.rst b/docs/source/cfngin/hooks/command.run_command.rst index 1f90bc2f5..9f66d75c4 100644 --- a/docs/source/cfngin/hooks/command.run_command.rst +++ b/docs/source/cfngin/hooks/command.run_command.rst @@ -14,20 +14,20 @@ Args **** .. data:: command - :type: Union[List[str], str] + :type: list[str] | str :noindex: Command(s) to run. .. data:: capture - :type: Optional[bool] + :type: bool :value: False :noindex: If enabled, capture the command's stdout and stderr, and return them in the hook result. .. data:: interactive - :type: Optional[bool] + :type: bool :value: False :noindex: @@ -35,14 +35,14 @@ Args Otherwise, stdin will be set to the null device. .. data:: ignore_status - :type: Optional[bool] + :type: bool :value: False :noindex: Don't fail the hook if the command returns a non-zero status. .. data:: quiet - :type: Optional[bool] + :type: bool :value: False :noindex: @@ -50,7 +50,7 @@ Args Should not be enabled if ``capture`` is also enabled. .. data:: stdin - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -58,7 +58,7 @@ Args Implicitly disables ``interactive``. .. data:: env - :type: Optional[Dict[str, str]] + :type: dict[str, str] | None :value: None :noindex: diff --git a/docs/source/cfngin/hooks/docker.image.build.rst b/docs/source/cfngin/hooks/docker.image.build.rst index bde66f7de..2bb3dd4e5 100644 --- a/docs/source/cfngin/hooks/docker.image.build.rst +++ b/docs/source/cfngin/hooks/docker.image.build.rst @@ -21,14 +21,14 @@ Args **** .. data:: docker - :type: Optional[Dict[str, Any]] + :type: dict[str, Any] :value: {} :noindex: Options for ``docker image build``. .. data:: buildargs - :type: Optional[Dict[str, str]] + :type: dict[str, str] | None :value: None :noindex: @@ -42,7 +42,7 @@ Args Optional if providing a path to a zip file. .. data:: extra_hosts - :type: Optional[Dict[str, str]] + :type: dict[str, str] | None :value: None :noindex: @@ -57,14 +57,14 @@ Args Always remove intermediate containers, even after unsuccessful builds. .. data:: isolation - :type: Optional[str] + :type: str | None :value: None :noindex: Isolation technology used during build. .. data:: network_mode - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -78,7 +78,7 @@ Args Don't use cache when set to ``True``. .. data:: platform - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -107,21 +107,21 @@ Args Squash the resulting image layers into a single layer. .. data:: tag - :type: Optional[str] + :type: str | None :value: None :noindex: Optional name and tag to apply to the base image when it is built. .. data:: target - :type: Optional[str] + :type: str | None :value: None :noindex: Name of the build-stage to build in a multi-stage Dockerfile. .. data:: timeout - :type: Optional[int] + :type: str | None :value: None :noindex: @@ -135,14 +135,14 @@ Args If ``True`` and if the docker client configuration file (``~/.docker/config.json`` by default) contains a proxy configuration, the corresponding environment variables will be set in the container being built. .. data:: dockerfile - :type: Optional[str] + :type: str | None :value: "./Dockerfile" :noindex: Path within the build context to the Dockerfile. .. data:: ecr_repo - :type: Optional[Dict[str, Optional[str]]] + :type: dict[str, str | None] | None :value: None :noindex: @@ -153,7 +153,7 @@ Args If using a public registry, ``repo_name`` and ``registry_alias``. .. data:: account_id - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -161,7 +161,7 @@ Args it will be acquired automatically if needed. .. data:: aws_region - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -169,7 +169,7 @@ Args automatically if needed. .. data:: registry_alias - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -182,13 +182,13 @@ Args The name of the repository. .. data:: path - :type: Optional[str] + :type: str | None :noindex: Path to the directory containing the Dockerfile. .. data:: repo - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -196,7 +196,7 @@ Args If providing one of the other repo values, leave this value empty. .. data:: tags - :type: Optional[List[str]] + :type: list[str] :value: ["latest"] :noindex: diff --git a/docs/source/cfngin/hooks/docker.image.push.rst b/docs/source/cfngin/hooks/docker.image.push.rst index 56c9c8ffe..256456a0d 100644 --- a/docs/source/cfngin/hooks/docker.image.push.rst +++ b/docs/source/cfngin/hooks/docker.image.push.rst @@ -19,7 +19,7 @@ Args **** .. data:: ecr_repo - :type: Optional[Dict[str, Optional[str]]] + :type: dict[str, str | None] | None :value: None :noindex: @@ -30,7 +30,7 @@ Args If using a public registry, ``repo_name`` and ``registry_alias``. .. data:: account_id - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -38,7 +38,7 @@ Args it will be acquired automatically if needed. .. data:: aws_region - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -46,7 +46,7 @@ Args automatically if needed. .. data:: registry_alias - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -59,7 +59,7 @@ Args The name of the repository. .. data:: image - :type: Optional[DockerImage] + :type: DockerImage | None :value: None :noindex: @@ -70,7 +70,7 @@ Args If providing a value for this field, do not provide a value for ``ecr_repo`` or ``repo``. .. data:: repo - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -78,7 +78,7 @@ Args If providing one of the other repo values or ``image``, leave this value empty. .. data:: tags - :type: Optional[List[str]] + :type: list[str] :value: ["latest"] :noindex: diff --git a/docs/source/cfngin/hooks/docker.image.remove.rst b/docs/source/cfngin/hooks/docker.image.remove.rst index ab3637ed7..1a94b0ed3 100644 --- a/docs/source/cfngin/hooks/docker.image.remove.rst +++ b/docs/source/cfngin/hooks/docker.image.remove.rst @@ -18,7 +18,7 @@ Args **** .. data:: ecr_repo - :type: Optional[Dict[str, Optional[str]]] + :type: dict[str, str | None] | None :value: None :noindex: @@ -29,7 +29,7 @@ Args If using a public registry, ``repo_name`` and ``registry_alias``. .. data:: account_id - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -37,7 +37,7 @@ Args it will be acquired automatically if needed. .. data:: aws_region - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -45,7 +45,7 @@ Args automatically if needed. .. data:: registry_alias - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -58,14 +58,14 @@ Args The name of the repository. .. data:: force - :type: Optional[bool] + :type: bool :value: False :noindex: Whether to force the removal of the image. .. data:: image - :type: Optional[DockerImage] + :type: DockerImage | None :value: None :noindex: @@ -76,14 +76,14 @@ Args If providing a value for this field, do not provide a value for ``ecr_repo`` or ``repo``. .. data:: noprune - :type: Optional[bool] + :type: bool :value: False :noindex: Whether to delete untagged parents. .. data:: repo - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -91,7 +91,7 @@ Args If providing one of the other repo values or ``image``, leave this value empty. .. data:: tags - :type: Optional[List[str]] + :type: list[str] :value: ["latest"] :noindex: diff --git a/docs/source/cfngin/hooks/docker.login.rst b/docs/source/cfngin/hooks/docker.login.rst index d7d34f403..bf306856a 100644 --- a/docs/source/cfngin/hooks/docker.login.rst +++ b/docs/source/cfngin/hooks/docker.login.rst @@ -20,14 +20,14 @@ Args **** .. data:: dockercfg_path - :type: Optional[str] + :type: str | None :value: None :noindex: Use a custom path for the Docker config file (``$HOME/.docker/config.json`` if present, otherwise ``$HOME/.dockercfg``). .. data:: ecr - :type: Optional[Dict[str, Optional[str]]] + :type: dict[str, str | None] | None :value: None :noindex: @@ -38,7 +38,7 @@ Args If using a public registry, ``repo_name`` and ``registry_alias``. .. data:: account_id - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -46,14 +46,14 @@ Args it will be acquired automatically if needed. .. data:: alias - :type: Optional[str] + :type: str | None :value: None :noindex: If it is a public registry, provide the alias. .. data:: aws_region - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -61,7 +61,7 @@ Args automatically if needed. .. data:: email - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -74,7 +74,7 @@ Args The plaintext password for the registry account. .. data:: registry - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -83,7 +83,7 @@ Args If providing a value for this field, do not provide a value for ``ecr``. .. data:: username - :type: Optional[str] + :type: str | None :value: None :noindex: diff --git a/docs/source/cfngin/hooks/iam.create_ecs_service_role.rst b/docs/source/cfngin/hooks/iam.create_ecs_service_role.rst index 28958e118..bf5be6f7c 100644 --- a/docs/source/cfngin/hooks/iam.create_ecs_service_role.rst +++ b/docs/source/cfngin/hooks/iam.create_ecs_service_role.rst @@ -17,7 +17,7 @@ Args **** .. data:: role_name - :type: Optional[str] + :type: str | None :value: "ecsServiceRole" :noindex: diff --git a/docs/source/cfngin/hooks/index.rst b/docs/source/cfngin/hooks/index.rst index 8f2b36c3d..0463388e3 100644 --- a/docs/source/cfngin/hooks/index.rst +++ b/docs/source/cfngin/hooks/index.rst @@ -24,7 +24,7 @@ Only the following actions allow pre/post hooks: - :attr:`~cfngin.hook.args` .. attribute:: args - :type: Optional[Dict[str, Any]] + :type: dict[str, Any] :value: {} A dictionary of arguments to pass to the hook. @@ -42,7 +42,7 @@ Only the following actions allow pre/post hooks: key: ${val} .. attribute:: data_key - :type: Optional[str] + :type: str | None :value: None If set, and the hook returns data (a dictionary or ``pydantic.BaseModel``), the results will be stored in :attr:`CfnginContext.hook_data ` with the ``data_key`` as its key. @@ -54,7 +54,7 @@ Only the following actions allow pre/post hooks: - data_key: example-key .. attribute:: enabled - :type: Optional[bool] + :type: bool :value: True Whether to execute the hook every CFNgin run. @@ -78,15 +78,12 @@ Only the following actions allow pre/post hooks: - path: runway.cfngin.hooks.command.run_command .. attribute:: required - :type: Optional[bool] + :type: bool :value: True Whether to stop execution if the hook fails. -.. contents:: - :depth: 4 - ---- @@ -156,12 +153,11 @@ Example Hook Function :caption: local_path/hooks/my_hook.py """My hook.""" - from typing import Dict, Optional def do_something( *, is_failure: bool = True, name: str = "Kevin", **_kwargs: str - ) -> Optional[Dict[str, str]]: + ) -> dict[str, str] | None: """Do something.""" if is_failure: return None @@ -194,7 +190,7 @@ These can then be used to parse the values provided in the :attr:`~cfngin.hook.a """My hook.""" import logging - from typing import TYPE_CHECKING, Any, Dict, Optional + from typing import TYPE_CHECKING, Any from runway.utils import BaseModel from runway.cfngin.hooks.protocols import CfnginHookProtocol @@ -222,11 +218,11 @@ These can then be used to parse the values provided in the :attr:`~cfngin.hook.a """My class does a thing. Keyword Args: - is_failure (bool): Force the hook to fail if true. - name (str): Name used in the response. + is_failure: Force the hook to fail if true. + name: Name used in the response. Returns: - Dict[str, str]: Response message is stored in ``result``. + Response message is stored in ``result``. Example: .. code-block:: yaml @@ -255,7 +251,7 @@ These can then be used to parse the values provided in the :attr:`~cfngin.hook.a self.args.tags.update(context.tags) self.context = context - def post_deploy(self) -> Optional[Dict[str, str]]: + def post_deploy(self) -> dict[str, str] | None: """Run during the **post_deploy** stage.""" if self.args["is_failure"]: return None diff --git a/docs/source/cfngin/hooks/keypair.ensure_keypair_exists.rst b/docs/source/cfngin/hooks/keypair.ensure_keypair_exists.rst index 6b1e7e887..bcba69651 100644 --- a/docs/source/cfngin/hooks/keypair.ensure_keypair_exists.rst +++ b/docs/source/cfngin/hooks/keypair.ensure_keypair_exists.rst @@ -20,7 +20,7 @@ Args Name of the key pair to create .. data:: public_key_path - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -28,7 +28,7 @@ Args Incompatible with the SSM options, as the private key will not be available for storing. .. data:: ssm_key_id - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -36,7 +36,7 @@ Args parameter with. If omitted, the default key will be used. .. data:: ssm_parameter_name - :type: Optional[str] + :type: str | None :value: None :noindex: diff --git a/docs/source/cfngin/hooks/ssm.parameter.SecureString.rst b/docs/source/cfngin/hooks/ssm.parameter.SecureString.rst index 9e471a538..dc3aa9a31 100644 --- a/docs/source/cfngin/hooks/ssm.parameter.SecureString.rst +++ b/docs/source/cfngin/hooks/ssm.parameter.SecureString.rst @@ -24,14 +24,14 @@ Args **** .. data:: allowed_pattern - :type: Optional[str] + :type: str | None :value: None :noindex: A regular expression used to validate the parameter value. .. data:: data_type - :type: Optional[Literal["aws:ec2:image", "text"]] + :type: Literal["aws:ec2:image", "text"] | None :value: None :noindex: @@ -39,7 +39,7 @@ Args Supported data types include plain text and Amazon Machine Image IDs. .. data:: description - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -54,7 +54,7 @@ Args Can be used alongside **overwrite** to always update a parameter. .. data:: key_id - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -76,7 +76,7 @@ Args If this is set to ``False`` and the parameter already exists, the parameter will not be updated and a warning will be logged. .. data:: policies - :type: Optional[Union[List[Dict[str, Any]], str]] + :type: list[dict[str, Any]] | str | None :value: None :noindex: @@ -84,7 +84,7 @@ Args This field takes a JSON array. .. data:: tags - :type: Optional[Union[Dict[str, str], List[TagTypeDef]]] + :type: dict[str, str] | list[TagTypeDef] | None :value: None :noindex: @@ -98,7 +98,7 @@ Args The parameter tier to assign to a parameter. .. data:: value - :type: Optional[str] + :type: str | None :value: None :noindex: diff --git a/docs/source/cfngin/lookups/ami.rst b/docs/source/cfngin/lookups/ami.rst index 953dc560c..00b240df9 100644 --- a/docs/source/cfngin/lookups/ami.rst +++ b/docs/source/cfngin/lookups/ami.rst @@ -14,13 +14,13 @@ Any other arguments specified but not listed below are sent as filters to the AW For example, ``architecture:x86_64`` would add a filter. .. data:: region - :type: Optional[str] + :type: str | None :noindex: AWS region to search (e.g. ``us-east-1``). Defaults to the current region. .. data:: owners - :type: Union[List[str], str] + :type: List[str] | str | None :noindex: At least one owner must be specified in the query (e.g. ``amazon``, ``self``, or an AWS account ID). @@ -33,7 +33,7 @@ For example, ``architecture:x86_64`` would add a filter. Regex pattern for the name of the AMI (e.g. ``my-ubuntu-server-[0-9]+``). .. data:: executable_users - :type: Optional[str] + :type: str | None :noindex: ``amazon``, ``self``, or an AWS account ID. diff --git a/docs/source/cfngin/lookups/index.rst b/docs/source/cfngin/lookups/index.rst index 1023d4963..54630d39e 100644 --- a/docs/source/cfngin/lookups/index.rst +++ b/docs/source/cfngin/lookups/index.rst @@ -109,31 +109,32 @@ If using boto3 in a lookup, use :meth:`context.get_session() str: """Do something. diff --git a/docs/source/cfngin/migrating.rst b/docs/source/cfngin/migrating.rst index 7bfdbfe87..17bbd1617 100644 --- a/docs/source/cfngin/migrating.rst +++ b/docs/source/cfngin/migrating.rst @@ -4,8 +4,7 @@ Migrating from Stacker ###################### -.. contents:: - :depth: 4 + ********** Blueprints diff --git a/docs/source/cfngin/persistent_graph.rst b/docs/source/cfngin/persistent_graph.rst index 51a392d5c..7aba209c2 100644 --- a/docs/source/cfngin/persistent_graph.rst +++ b/docs/source/cfngin/persistent_graph.rst @@ -4,12 +4,12 @@ Persistent Graph ################ -Each time Runway's CFNgin is run, it creates a dependency :ref:`graph ` of :class:`stacks `. +Each time Runway's CFNgin is run, it creates a dependency :term:`graph` of :class:`stacks `. This is used to determine the order in which to execute them. -This :ref:`graph ` can be persisted between runs to track the removal of :class:`stacks ` from the config file. +This :term:`graph` can be persisted between runs to track the removal of :class:`stacks ` from the config file. -When a |stack| is present in the persistent graph but not in the :ref:`graph ` constructed from the config file, CFNgin will delete the Stack from CloudFormation. -This takes effect when running either the :ref:`deploy command ` or :ref:`destroy command `. +When a |Stack| is present in the persistent graph but not in the :term:`graph` constructed from the config file, CFNgin will delete the Stack from CloudFormation. +This takes effect when running either the :ref:`commands:deploy` command or :ref:`commands:destroy` command. To enable persistent graph, define the :attr:`~cfngin.config.persistent_graph_key` field as a unique value that will be used to construct the path to the persistent graph object in S3. This object is stored in the |cfngin_bucket| which is also used for CloudFormation templates. @@ -36,7 +36,7 @@ The lock is a tag applied to the object at the start of one of these actions. The tag-key is **cfngin_lock_code** and the tag-value is UUID generated each time a config is processed. To lock a persistent graph object, the tag must not be present on the object. -For CFNgin to act on the :ref:`graph ` (modify or unlock) the value of the tag must match the UUID of the current CFNgin session. +For CFNgin to act on the :term:`graph` (modify or unlock) the value of the tag must match the UUID of the current CFNgin session. If the object is locked or the code does not match, an error will be raised and no action will be taken. This prevents two parties from acting on the same persistent graph object concurrently which would create a race condition. diff --git a/docs/source/cfngin/remote_sources.rst b/docs/source/cfngin/remote_sources.rst index 681eaa27c..e61ae9b83 100644 --- a/docs/source/cfngin/remote_sources.rst +++ b/docs/source/cfngin/remote_sources.rst @@ -14,7 +14,7 @@ By defining these additional sources you are able to extend your *$PATH* to make There are three different types of package sources - git repository, local, and AWS S3. .. attribute:: git - :type: Optional[List[cfngin.package_source.git]] + :type: list[cfngin.package_source.git] :value: [] A list of git repositories to include when processing the configuration file. @@ -29,7 +29,7 @@ By defining these additional sources you are able to extend your *$PATH* to make ... .. attribute:: local - :type: Optional[List[cfngin.package_source.local]] + :type: list[cfngin.package_source.local] :value: [] A list of additional local directories to include when processing the configuration file. @@ -44,7 +44,7 @@ By defining these additional sources you are able to extend your *$PATH* to make ... .. attribute:: s3 - :type: Optional[List[cfngin.package_source.s3]] + :type: list[cfngin.package_source.s3] :value: [] A list of AWS S3 objects to include when processing the configuration file. @@ -59,9 +59,6 @@ By defining these additional sources you are able to extend your *$PATH* to make ... -.. contents:: - :depth: 4 - ************** Git Repository @@ -75,7 +72,7 @@ Git Repository The cache location is defined by :attr:`cfngin.config.cfngin_cache_dir`. .. attribute:: branch - :type: Optional[str] + :type: str | None :value: None Name of a branch to checkout after cloning the git repository. @@ -90,7 +87,7 @@ Git Repository - branch: master .. attribute:: commit - :type: Optional[str] + :type: str | None :value: None After cloning the git repository, reset *HEAD* to the given commit hash. @@ -105,7 +102,7 @@ Git Repository - commit: 5d83f7ff1ad6527233be2c27e9f68816599b6c57 .. attribute:: configs - :type: Optional[List[str]] + :type: list[str] :value: [] Configuration files from this source location can also be used by specifying a list of file paths. @@ -123,7 +120,7 @@ Git Repository - example-02.yml .. attribute:: paths - :type: Optional[List[str]] + :type: list[str] :value: [] A list of subdirectories within the source location that should be added to *$PATH*. @@ -138,7 +135,7 @@ Git Repository - another/ .. attribute:: tag - :type: Optional[str] + :type: str | None :value: None After cloning the git repository, reset *HEAD* to the given tag. @@ -175,7 +172,7 @@ Local Package source located on a local disk. .. attribute:: configs - :type: Optional[List[str]] + :type: list[str] :value: [] Configuration files from this source location can also be used by specifying a list of file paths. @@ -193,7 +190,7 @@ Local - example-02.yml .. attribute:: paths - :type: Optional[List[str]] + :type: list[str] :value: [] A list of subdirectories within the source location that should be added to *$PATH*. @@ -245,7 +242,7 @@ AWS S3 - bucket: example-bucket .. attribute:: configs - :type: Optional[List[str]] + :type: list[str] :value: [] Configuration files from this source location can also be used by specifying a list of file paths. @@ -276,7 +273,7 @@ AWS S3 - key: path/to/example.zip .. attribute:: paths - :type: Optional[List[str]] + :type: list[str] :value: [] A list of subdirectories within the source location that should be added to *$PATH*. @@ -291,7 +288,7 @@ AWS S3 - another/ .. attribute:: requester_pays - :type: Optional[bool] + :type: bool :value: False Confirms that the requester knows that they will be charged for the request @@ -304,7 +301,7 @@ AWS S3 - requester_pays: true .. attribute:: use_latest - :type: Optional[bool] + :type: bool :value: True Update the local copy if the last modified date in AWS S3 changes. diff --git a/docs/source/cfngin/templates.rst b/docs/source/cfngin/templates.rst index 0ff3fe439..f4d0c593a 100644 --- a/docs/source/cfngin/templates.rst +++ b/docs/source/cfngin/templates.rst @@ -2,18 +2,16 @@ Templates ######### -CloudFormation templates can be provided via :ref:`Blueprints ` or JSON/YAML. +CloudFormation templates can be provided via :term:`Blueprints ` or JSON/YAML. JSON/YAML templates are specified for :class:`stacks ` via the :attr:`~cfngin.stack.template_path` config. -.. contents:: - :depth: 4 ***************** Jinja2 Templating ***************** -Templates with a ``.j2`` extension will be parsed using `Jinja2 `__. +Templates with a ``.j2`` extension will be parsed using :link:`Jinja2 `. The CFNgin ``context`` and ``mappings`` objects and stack ``variables`` objects are available for use in the template: .. code-block:: yaml diff --git a/docs/source/commands.rst b/docs/source/commands.rst index e69d021f2..f1d6ab404 100644 --- a/docs/source/commands.rst +++ b/docs/source/commands.rst @@ -12,8 +12,6 @@ Commands :ellipsis: 13 -.. _command-deploy: - ****** deploy ****** @@ -32,7 +30,6 @@ deploy ---- -.. _command-destroy: ******* destroy @@ -52,7 +49,6 @@ destroy ---- -.. _command-dismantle: ********* dismantle @@ -72,7 +68,6 @@ dismantle ---- -.. _command-docs: **** docs @@ -90,7 +85,6 @@ docs ---- -.. _command-envvars: ******* envvars @@ -110,7 +104,6 @@ envvars ---- -.. _command-gen-sample: ********** gen-sample @@ -129,7 +122,6 @@ gen-sample ---- -.. _command-init: **** init @@ -149,8 +141,6 @@ init ---- -.. _command-kbenv: -.. _command-kbenv-install: ************* kbenv install @@ -169,7 +159,6 @@ kbenv install ---- -.. _command-kbenv-list: ********** kbenv list @@ -187,7 +176,6 @@ kbenv list ---- -.. _command-kbenv-run: ********* kbenv run @@ -206,7 +194,6 @@ kbenv run ---- -.. _command-kbenv-uninstall: *************** kbenv uninstall @@ -225,7 +212,6 @@ kbenv uninstall ---- -.. _command-new: **** new @@ -244,7 +230,6 @@ new ---- -.. _command-plan: **** plan @@ -252,7 +237,7 @@ plan .. file://./../../runway/_cli/commands/_plan.py -.. note:: Currently only supported for :ref:`mod-cdk`, :ref:`mod-cfn`, and :ref:`mod-tf`. +.. note:: Currently only supported for :ref:`index:AWS Cloud Development Kit (CDK)`, :ref:`index:CloudFormation & Troposphere`, and :ref:`index:Terraform`. .. command-output:: runway new --help @@ -266,7 +251,6 @@ plan ---- -.. _command-preflight: ********* preflight @@ -284,7 +268,6 @@ preflight ---- -.. _command-run-python: ********** run-python @@ -302,7 +285,6 @@ run-python ---- -.. _command-schema-cfngin: ************* schema cfngin @@ -320,7 +302,6 @@ schema cfngin ---- -.. _command-schema-runway: ************* schema runway @@ -338,7 +319,6 @@ schema runway ---- -.. _command-takeoff: ******* takeoff @@ -358,7 +338,6 @@ takeoff ---- -.. _command-taxi: **** taxi @@ -378,7 +357,6 @@ taxi ---- -.. _command-test: **** test @@ -396,8 +374,6 @@ test ---- -.. _command-tfenv: -.. _command-tfenv-install: ************* tfenv install @@ -415,7 +391,6 @@ tfenv install ---- -.. _command-tfenv-list: ********** tfenv list @@ -433,7 +408,6 @@ tfenv list ---- -.. _command-tfenv-run: ********* tfenv run @@ -452,7 +426,6 @@ tfenv run ---- -.. _command-tfenv-uninstall: *************** tfenv uninstall @@ -471,7 +444,6 @@ tfenv uninstall ---- -.. _command-whichenv: ******** whichenv diff --git a/docs/source/conf.py b/docs/source/conf.py index 2c28c6fcc..acc34fa18 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -2,25 +2,32 @@ https://www.sphinx-doc.org/en/master/usage/configuration.html -""" +""" # noqa: INP001 -# pylint: skip-file import os +import sys +from datetime import date from pathlib import Path -from dunamai import Style, Version +if sys.version_info < (3, 11): + import tomli as tomllib +else: + import tomllib DOCS_DIR = Path(__file__).parent.parent.resolve() ROOT_DIR = DOCS_DIR.parent SRC_DIR = DOCS_DIR / "source" +PYPROJECT_TOML = tomllib.loads((ROOT_DIR / "pyproject.toml").read_text()) +"""Read in the contents of ``../../pyproject.toml`` to reuse it's values.""" + # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information -project = "Runway" -copyright = "2021, Onica Group" -author = "Onica Group" -release = Version.from_git().serialize(metadata=False, style=Style.SemVer) +project = PYPROJECT_TOML["tool"]["poetry"]["name"].title() +copyright = f"{date.today().year}, Onica Group" # noqa: A001 +author = PYPROJECT_TOML["tool"]["poetry"]["authors"][0] +release = PYPROJECT_TOML["tool"]["poetry"]["version"] version = ".".join(release.split(".")[:2]) # short X.Y version @@ -31,62 +38,29 @@ default_role = None exclude_patterns = [] extensions = [ + "notfound.extension", "sphinx.ext.autodoc", + "sphinx.ext.autosectionlabel", "sphinx.ext.intersphinx", "sphinx.ext.napoleon", "sphinx.ext.viewcode", + "sphinx_copybutton", + "sphinx_design", "sphinx_github_changelog", - "sphinx_tabs.tabs", "sphinxcontrib.apidoc", + "sphinxcontrib.external_links", + "sphinxcontrib.jquery", "sphinxcontrib.programoutput", ] highlight_language = "default" -intersphinx_mapping = { - "docker": ( - "https://docker-py.readthedocs.io/en/stable/", - None, - ), # link to docker docs - "python": ("https://docs.python.org/3", None), # link to python docs -} -language = None +language = "en" master_doc = "index" needs_extensions = {} -needs_sphinx = "3.5" -nitpicky = False # TODO enable nitpicky +needs_sphinx = "7.4" +nitpicky = False # TODO (kyle): enable nitpicky primary_domain = "py" -pygments_style = "material" # syntax highlighting style -# Appended to the end of each rendered file -rst_epilog = """ -.. |Blueprint| replace:: - :class:`~runway.cfngin.blueprints.base.Blueprint` - -.. |Dict| replace:: - :class:`~typing.Dict` - -.. |Protocol| replace:: - :class:`~typing.Protocol` - -.. |Stack| replace:: - :class:`~runway.cfngin.stack.Stack` - -.. |cfngin_bucket| replace:: - :attr:`~cfngin.config.cfngin_bucket` - -.. |class_path| replace:: - :attr:`~cfngin.stack.class_path` - -.. |namespace| replace:: - :attr:`~cfngin.config.namespace` - -.. |stack| replace:: - :class:`~cfngin.stack` - -.. |template_path| replace:: - :attr:`~cfngin.stack.template_path` - -""" -rst_prolog = "" - +pygments_style = "one-dark" # syntax highlighting style +pygments_dark_style = "one-dark" # syntax highlighting style source_suffix = {".rst": "restructuredtext"} templates_path = ["_templates"] # template dir relative to this dir @@ -94,25 +68,29 @@ # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output html_codeblock_linenos_style = "inline" -html_css_files = ["css/custom.css"] # files relative to html_static_path +html_css_files = [] # files relative to html_static_path html_favicon = None html_logo = None -html_theme = "sphinx_rtd_theme" # theme to use for HTML and HTML Help pages +html_theme = "furo" # theme to use for HTML and HTML Help pages html_theme_options = { - "navigation_depth": -1, # unlimited depth + "dark_css_variables": { + "font-stack--monospace": "Inconsolata, monospace", + "color-inline-code-background": "#24242d", + }, + "light_css_variables": { + "font-stack--monospace": "Inconsolata, monospace", + }, } html_short_title = f"{project} v{release}" html_title = f"{project} v{release}" -html_show_copyright = True -html_show_sphinx = True +html_show_copyright = False +html_show_sphinx = False html_static_path = ["_static"] # dir with static files relative to this dir - # -- Options for HTMLHelp output --------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-help-output htmlhelp_basename = "runwaydoc" - # -- Options for LaTeX output ------------------------------------------------ # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-latex-output latex_documents = [ @@ -120,12 +98,10 @@ ] latex_elements = {} - # -- Options for manual page output ------------------------------------------ # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-manual-page-output man_pages = [(master_doc, "runway", "runway Documentation", [author], 1)] - # -- Options for Texinfo output ---------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-texinfo-output texinfo_documents = [ @@ -140,7 +116,6 @@ ), ] - # -- Options for Epub output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-epub-output epub_exclude_files = ["search.html"] @@ -151,9 +126,12 @@ # https://www.sphinx-doc.org/en/master/man/sphinx-apidoc.html#environment os.environ["SPHINX_APIDOC_OPTIONS"] = "members" -# -- Options for sphinx-github-changelog ------------------------------------- -# GitHub PAT with "repo.public_repo" access provided by @ITProKyle -changelog_github_token = os.getenv("SPHINX_GITHUB_CHANGELOG_TOKEN", "") + +# -- Options of sphinx.ext.autosectionlabel ---------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/autosectionlabel.html +# autosectionlabel_maxdepth = 4 +autosectionlabel_prefix_document = True + # -- Options of sphinx.ext.autodoc ------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration @@ -161,10 +139,12 @@ autodoc_class_signature = "separated" autodoc_default_options = { "inherited-members": "dict", # show all inherited members - "member-order": "bysource", + "member-order": "alphabetical", "members": True, "show-inheritance": True, } +autodoc_inherit_docstrings = True +autodoc_member_order = "alphabetical" autodoc_type_aliases = { "CfnginContext": "runway.context.CfnginContext", "DirectoryPath": "Path", @@ -173,9 +153,23 @@ "RunwayContext": "runway.context.RunwayContext", } autodoc_typehints = "signature" +autodoc_typehints_format = "short" + + +# -- Options of sphinx.ext.intersphinx ------------------------------------------ +# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html +intersphinx_mapping = { + "docker": ( + "https://docker-py.readthedocs.io/en/stable/", + None, + ), # link to docker docs + "packaging": ("https://packaging.pypa.io/en/stable/", None), + "python": ("https://docs.python.org/3", None), # link to python docs +} + -# -- Options for napoleon --------------------------------------------------- -# https://www.sphinx-doc.org/en/3.x/usage/extensions/napoleon.html#configuration +# -- Options for sphinx.ext.napoleon ---------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html#configuration napoleon_attr_annotations = True napoleon_google_docstring = True napoleon_include_init_with_doc = False @@ -189,6 +183,20 @@ napoleon_use_param = False napoleon_use_rtype = True + +# -- Options for sphinx_copybutton --------------------------------- +# https://sphinx-copybutton.readthedocs.io/en/latest/index.html +copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " +copybutton_prompt_is_regexp = True +copybutton_remove_prompts = True +copybutton_line_continuation_character = "\\" + + +# -- Options for sphinx-github-changelog ------------------------------------- +# GitHub PAT with "repo.public_repo" access provided by @ITProKyle +changelog_github_token = os.getenv("SPHINX_GITHUB_CHANGELOG_TOKEN", "") + + # -- Options for sphinxcontrib.apidoc --------------------------------------- # https://github.com/sphinx-contrib/apidoc apidoc_excluded_paths = [ @@ -201,3 +209,22 @@ apidoc_output_dir = "apidocs" apidoc_separate_modules = True apidoc_toc_file = "index" + + +# -- Options for sphinxcontrib.external_links ------------------------------ +# https://sphinxcontribexternal-links.readthedocs.io/latest/configuration.html +external_links: dict[str, str] = { + "CloudFormation": "https://aws.amazon.com/cloudformation", + "troposphere": "https://github.com/cloudtools/troposphere", +} +external_links_substitutions: dict[str, str] = { + "Blueprint": ":class:`Blueprint `", + "Dict": ":class:`~typing.Dict`", + "dict": ":class:`~typing.Dict`", + "Protocol": ":class:`~typing.Protocol`", + "Stack": ":class:`~cfngin.stack`", + "cfngin_bucket": ":attr:`~cfngin.config.cfngin_bucket`", + "class_path": ":attr:`~cfngin.stack.class_path`", + "namespace": ":attr:`~cfngin.config.namespace`", + "template_path": ":attr:`~cfngin.stack.template_path`", +} diff --git a/docs/source/defining_tests.rst b/docs/source/defining_tests.rst index 34baf5fa8..36601b31e 100644 --- a/docs/source/defining_tests.rst +++ b/docs/source/defining_tests.rst @@ -4,8 +4,8 @@ Defining Tests ############## -:ref:`Tests` can be defined in the :ref:`runway config file` to test your modules in any way you desire before deploying. -They are run by using the ``runway test`` :ref:`command`. +:ref:`Tests ` can be defined in the :ref:`runway_config:Runway Config File` to test your modules in any way you desire before deploying. +They are run by using the ``runway test`` :ref:`command `. Tests are run in the order they are defined. .. rubric:: Example: @@ -20,9 +20,6 @@ Tests are run in the order they are defined. - echo "Success!" -.. contents:: - :depth: 4 - ************* Test Failures @@ -61,7 +58,7 @@ cfn-lint *and additional checks. Includes checking valid values for resource properties* *and best practices*. -In order to use this test, there must be a ``.cfnlintrc`` file in the same directory as the :ref:`Runway config file`. +In order to use this test, there must be a ``.cfnlintrc`` file in the same directory as the :ref:`runway_config:Runway Config File`. .. rubric:: Example: .. code-block:: yaml @@ -104,7 +101,7 @@ yamllint *problems such as lines length, trailing spaces, indentation, etc*. A ``.yamllint`` file can be placed at in the same directory as the -:ref:`Runway config file` to customize the linter or, +:ref:`runway_config:Runway Config File` to customize the linter or, the Runway provided template will be used. .. rubric:: Example: diff --git a/docs/source/developers/advanced_configuration.rst b/docs/source/developers/advanced_configuration.rst index 9cbd32894..dfbf4cd37 100644 --- a/docs/source/developers/advanced_configuration.rst +++ b/docs/source/developers/advanced_configuration.rst @@ -50,7 +50,7 @@ Environment variables can be used to alter the functionality of Runway. :type: str :noindex: - Explicitly enable/disable colorized output for :ref:`CDK `, :ref:`Serverless `, and :ref:`Terraform ` modules. + Explicitly enable/disable colorized output for :ref:`index:AWS Cloud Development Kit (CDK)`, :ref:`index:Serverless Framework`, and :ref:`index:Terraform` :term:`Modules `. Having this set to a truthy value will prevent ``-no-color``/``--no-color`` from being added to any commands even if stdout is not a TTY. Having this set to a falsy value will include ``-no-color``/``--no-color`` in commands even if stdout is a TTY. If the IaC tool has other mechanisms for disabling color output, using a truthy value will not circumvent them. diff --git a/docs/source/developers/getting_started.rst b/docs/source/developers/getting_started.rst index 474b42d03..514beb014 100644 --- a/docs/source/developers/getting_started.rst +++ b/docs/source/developers/getting_started.rst @@ -14,14 +14,21 @@ Before getting started, `fork this repo`_ and `clone your fork`_. Development Environment *********************** -This project includes an optional `VSCode Dev Container `__. This is an Ubuntu 22.04 image that will launch with operating system pre-requisites already installed and VSCode configured for Python debugging. It's not required to use this for development work, but does provide an easy and consistent way to get started. +This project includes an optional `VSCode Dev Container `__. +This is an Ubuntu 22.04 image that will launch with operating system pre-requisites already installed and VSCode configured for Python debugging. +It's not required to use this for development work, but does provide an easy and consistent way to get started. -This project uses `poetry `__ to create Python virtual environment. This must be installed on your system before setting up your dev environment. +This project uses poetry_ to create Python virtual environment. +This must be installed on your system before setting up your dev environment. +Additionally, the poetry-dynamic-versioning_ plugin should be installed. +Refer to the documentation of poetry-dynamic-versioning_ for how to install it based on how you installed poetry_. -With poetry installed, run ``make setup`` to setup your development environment. +With poetry_ installed, run ``make setup`` to setup your development environment. This will create all the required virtual environments to work on Runway, build docs locally, and run integration tests locally. The virtual environments all have Runway installed as editable meaning as you make changes to the code of your local clone, it will be reflected in all the virtual environments. +.. _poetry: https://python-poetry.org +.. _poetry-dynamic-versioning: https://github.com/mtkennerly/poetry-dynamic-versioning pre-commit ========== @@ -36,4 +43,4 @@ You can also run ``make run-pre-commit`` at any time to manually trigger these h pyright Type Checking ===================== -This project uses pyright to perform type checking. To run type checking locally, install pyright (``make npm-ci``) then run ``make lint`` or ``make lint-pyright``. +This project uses pyright to perform type checking. To run type checking locally, install pyright (``make setup-npm``) then run ``make lint`` or ``make lint-pyright``. diff --git a/docs/source/developers/pyinstaller.rst b/docs/source/developers/pyinstaller.rst deleted file mode 100644 index 11c7b8172..000000000 --- a/docs/source/developers/pyinstaller.rst +++ /dev/null @@ -1,27 +0,0 @@ -##################################### -Building Pyinstaller Packages Locally -##################################### - -We use Pyinstaller_ to build executables that do not require Python to be installed on a system. -These are built by Travis CI for distribution to ensure a consistent environment but they can also be build locally for testing. - -.. _Pyinstaller: https://pypi.org/project/PyInstaller/ - - -************* -Prerequisites -************* - -These need to be installed globally. - -- `poetry `__ - - -******* -Process -******* - -1. Export ``OS_NAME`` environment variable for your system (``ubuntu-22.04``, ``macos-12``, or ``windows-latest``). -2. Execute ``make build-pyinstaller-file`` or ``make build-pyinstaller-folder`` from the root of the repo. - -The output of these commands can be found in ``./artifacts`` diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst index 128eaf2e0..f568fcda2 100644 --- a/docs/source/getting_started.rst +++ b/docs/source/getting_started.rst @@ -2,17 +2,14 @@ Getting Started Guide ##################### -.. contents:: - :depth: 4 ************** Basic Concepts ************** -Welcome to Runway! To get a basic understanding of Runway, we have listed out -the key concepts below that you will need to get started with deploying your -first module. +Welcome to Runway! +To get a basic understanding of Runway, we have listed out the key concepts below that you will need to get started with deploying your first module. Runway Config File @@ -25,31 +22,26 @@ It defines the modules that will be managed by Runway. Deployment ========== -A deployment contains a list of modules and options for all the modules in the deployment. -A Runway config file can contain multiple deployments and a deployment can contain multiple modules. +A :term:`Deployment` contains a list of :term:`Modules ` and options for all the :term:`Modules ` in the deployment. +A Runway config file can contain multiple :term:`Deployments ` and a :term:`Deployment` can contain multiple :term:`Modules `. Module ====== -A module is a directory containing a single infrastructure as code tool -configuration of an application, a component, or some infrastructure -(eg. a set of CloudFormation templates). +A :term:`Module` is a directory containing a single infrastructure as code tool configuration of an application, a component, or some infrastructure (eg. a set of CloudFormation templates). It is defined in a deployment by path. -Modules can also contain granular options that only pertain to it. +:term:`Modules ` can also contain granular options that only pertain to it. Deploy Environment ================== -Deploy environments are used for selecting the options/variables/parameters to -be used with each modules . -They can be defined by the name of a directory (if its not a git repo), -git branch, or environment variable (``DEPLOY_ENVIRONMENT``). -Standard environments would be something like prod, dev, and test. +:term:`Deploy Environments ` are used for selecting the options/variables/parameters to be used with each :term:`Modules `. +They can be defined by the name of a directory (if its not a git repo), git branch, or environment variable (``DEPLOY_ENVIRONMENT``). +Standard environments would be something like ``prod``, ``dev``, and ``test``. -No matter how the environment is determined, the name is made available -to be consumed by your modules as the ``DEPLOY_ENVIRONMENT`` environment variable. +No matter how the environment is determined, the name is made available to be consumed by your modules as the ``DEPLOY_ENVIRONMENT`` environment variable. @@ -57,8 +49,7 @@ to be consumed by your modules as the ``DEPLOY_ENVIRONMENT`` environment variabl Deploying Your First Module *************************** -#. Create a directory for our project and change directory into the new - directory. +#. Create a directory for our project and change directory into the new directory. .. code-block:: sh @@ -71,51 +62,76 @@ Deploying Your First Module $ git init && git checkout -b ENV-dev -#. Download Runway using :ref:`curl`. Be sure to use the endpoint - that corresponds to your operating system. Then, change the downloaded - file's permissions to allow execution. +#. Install Runway. - .. tabs:: + .. tab-set:: - .. tab:: Linux + .. tab-item:: poetry (recommended) + :sync: poetry - .. code-block:: sh + .. code-block:: console - $ curl -L https://oni.ca/runway/latest/linux -o runway - $ chmod +x runway + $ poetry init --quiet + $ poetry add --group deploy runway - .. tab:: macOS + .. tab-item:: pip (Unix/macOS) + :sync: pip-unix - .. code-block:: sh + .. code-block:: console - $ curl -L https://oni.ca/runway/latest/osx -o runway - $ chmod +x runway + $ python -m venv .venv + $ source .venv/bin/activate + $ pip install runway - .. tab:: Windows + .. tab-item:: pip (Windows) + :sync: pip-win - .. code-block:: powershell + .. code-block:: console - Invoke-WebRequest -Uri "https://oni.ca/runway/latest/windows" -OutFile runway + $ python -m venv .venv + $ .venv\Scripts\activate + $ pip install runway -#. Use Runway to generate a sample module using the :ref:`gen-sample` command. - This will give us a preformatted module` command. + This will give us a preformatted CloudFormation :term:`Module` that is ready to be deployed after we change a few variables. + To read more about the directory structure, see :ref:`repo_structure:Repo Structure`. .. code-block:: sh $ ./runway gen-sample cfn -#. To finish configuring our CloudFormation module, lets open the - ``dev-us-east-1.env`` file that was created in ``sampleapp.cfn/``. - Here is where we will define values for our stacks that will be deployed as - part of the **dev** environment in the **us-east-1** region. + .. tab-set:: + + .. tab-item:: poetry (recommended) + :sync: poetry + + .. code-block:: console + + $ poetry shell + $ runway gen-sample cfn + + .. tab-item:: pip (Unix/macOS) + :sync: pip-unix + + .. code-block:: console + + $ runway gen-sample cfn + + .. tab-item:: pip (Windows) + :sync: pip-win + + .. code-block:: console + + $ runway gen-sample cfn + +#. To finish configuring our CloudFormation :term:`Module`, lets open the ``dev-us-east-1.env`` file that was created in ``sampleapp.cfn/``. + Here is where we will define values for our stacks that will be deployed as part of the **dev** environment in the **us-east-1** region. Replace the place holder values in this file with your own information. - It is important that the ``cfngin_bucket_name`` value is globally unique for - this example as it will be used to create a new S3 bucket. + It is important that the ``cfngin_bucket_name`` value is globally unique for this example as it will be used to create a new S3 bucket. .. code-block:: yaml :caption: dev-us-east-1.env + :linenos: namespace: onica-dev customer: onica @@ -124,19 +140,18 @@ Deploying Your First Module # The CFNgin bucket is used for CFN template uploads to AWS cfngin_bucket_name: cfngin-onica-us-east-1 -#. With the module ready to deploy, now we need to create our Runway config file. - To do this, use the :ref:`new` command to generate a sample - file at the root of the project repo. +#. With the :term:`Module` ready to deploy, now we need to create our Runway config file. + To do this, use the :ref:`commands:new` command to generate a sample file at the root of the project repo. - .. code-block:: sh + .. code-block:: console - $ ./runway new + $ runway new .. code-block:: yaml :caption: runway.yml + :linenos: - --- - # See full syntax at https://docs.onica.com/projects/runway + # See full syntax at https://runway.readthedocs.io deployments: - modules: - nameofmyfirstmodulefolder @@ -145,47 +160,37 @@ Deploying Your First Module regions: - us-east-1 -#. Now, we need to modify the ``runway.yml`` file that was just created to - tell it where the module is located that we want it to deploy and what - regions it will be deployed to. - Each module type has their own configuration options which are described in - more detail in the :ref:`Module Configurations` - section but, for this example we are only concerned with the - :ref:`CloudFormation module configuration`. +#. Now, we need to modify the ``runway.yml`` file that was just created to tell it where the :term:`Module` is located that we want it to deploy and what regions it will be deployed to. + Each :term:`Module` type has their own configuration options which are described in more detail in the :ref:`index:Module Configuration` section but, for this example we are only concerned with the :ref:`index:CloudFormation & Troposphere`. .. code-block:: yaml :caption: runway.yml + :linenos: - --- - # See full syntax at https://docs.onica.com/projects/runway + # See full syntax at https://runway.readthedocs.io deployments: - modules: - sampleapp.cfn regions: - us-east-1 -#. Before we deploy, it is always a good idea to know how the module will - impact the currently deployed infrastructure in your AWS account. - This is less of a concern for net-new infrastructure as it is when making - modifications. - But, for this example, lets run the :ref:`plan` command to see - what is about to happen. +#. Before we deploy, it is always a good idea to know how the :term:`Module` will impact the currently deployed infrastructure in your AWS account. + This is less of a concern for net-new infrastructure as it is when making modifications. + But, for this example, lets run the :ref:`commands:plan` command to see what is about to happen. - .. code-block:: shell + .. code-block:: console - $ ./runway plan + $ runway plan #. We are finally ready to deploy! - Use the :ref:`deploy` command to deploy our module. + Use the :ref:`commands:deploy` command to deploy our :term:`Module`. - .. code-block:: shell + .. code-block:: console - $ ./runway deploy + $ runway deploy We have only scratched the surface with what is possible in this example. -Proceed below to find out how to delete the module we just deployed or, -review the pages linked throughout this section to learn more about what we -have done to this point before continuing. +Proceed below to find out how to delete the :term:`Module` we just deployed or, review the pages linked throughout this section to learn more about what we have done to this point before continuing. @@ -193,29 +198,21 @@ have done to this point before continuing. Deleting Your First Module ************************** -From the root of the project directory we created in -`Deploying Your First Module`_ we only need to run the -:ref:`destroy` command to remove what we have deployed. - -.. code-block:: shell +From the root of the project directory we created in `Deploying Your First Module`_ we only need to run the :ref:`commands:destroy` command to remove what we have deployed. - $ ./runway destroy +.. code-block:: console + $ runway destroy -.. _non-interactive-mode: ***************************************** Execution Without A TTY (non-interactive) ***************************************** -Runway allows you to set an environment variable to allow execution without a -TTY or if STDIN is closed. -This allows users to execute Runway :ref:`deployments` in -their CI/CD infrastructure as code deployment systems avoiding the -``EOF when reading a line`` error message. -In order to execute runway without a TTY, set the ``CI`` environment variable -before your ``runway [deploy|destroy]`` execution. +Runway allows you to set an environment variable to allow execution without a TTY or if STDIN is closed. +This allows users to execute Runway :term:`Deployments ` in their CI/CD infrastructure as code deployment systems avoiding the ``EOF when reading a line`` error message. +In order to execute Runway without a TTY, set the ``CI`` environment variable before your ``runway [deploy|destroy]`` execution. .. important:: Executing Runway in this way will cause Runway to perform updates in your environment without prompt. diff --git a/docs/source/index.rst b/docs/source/index.rst index aea642802..60b13ef75 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -26,15 +26,11 @@ Runway is a lightweight wrapper around infrastructure deployment (e.g. CloudForm ---- -.. _module-configurations: - ******************** Module Configuration ******************** -.. _mod-cdk: - AWS Cloud Development Kit (CDK) =============================== @@ -56,8 +52,6 @@ This means that the CDK must be included as a dev dependency in the **package.js cdk/advanced_features -.. _mod-cfn: - CloudFormation & Troposphere ============================ @@ -81,8 +75,6 @@ It is able to deploy raw CloudFormation templates (JSON & YAML) and Troposphere_ .. _Troposphere: https://github.com/cloudtools/troposphere -.. _mod-k8s: - Kubernetes ========== @@ -104,8 +96,6 @@ Kubernetes manifests can be deployed via Runway offering an ideal way to handle kubernetes/advanced_features -.. _mod-sls: - Serverless Framework ==================== @@ -127,8 +117,6 @@ This means that Serverless must be included as a dev dependency in the **package serverless/advanced_features -.. _mod-staticsite: - Static Site =========== @@ -157,8 +145,6 @@ A start-to-finish example walkthrough is available in the :ref:`Conduit quicksta staticsite/advanced_features -.. _mod-tf: - Terraform ========= diff --git a/docs/source/installation.rst b/docs/source/installation.rst index 0372a4bb3..ec5b879dc 100644 --- a/docs/source/installation.rst +++ b/docs/source/installation.rst @@ -1,144 +1,54 @@ -.. _install: - ############ Installation ############ -To enable Runway to conform to our users' varying use cases, we have made it -available via three different install methods - `cURL`_, `npm`_, and `pip`_. - - -.. _install-curl: - -**** -cURL -**** - -Arguably the easiest way to install Runway is by using curl. Use one of the -endpoints below to download a single-binary executable version of Runway based -on your operating system. - -+------------------+---------------------------------------------------+ -| Operating System | Endpoint | -+==================+===================================================+ -| Linux | https://oni.ca/runway/latest/linux | -+------------------+---------------------------------------------------+ -| macOS | https://oni.ca/runway/latest/osx | -+------------------+---------------------------------------------------+ -| Windows | https://oni.ca/runway/latest/windows | -+------------------+---------------------------------------------------+ - -.. tabs:: - - .. tab:: Linux - - .. code-block:: sh - - curl -L https://oni.ca/runway/latest/linux -o runway - - .. tab:: macOS - - .. code-block:: sh - - curl -L https://oni.ca/runway/latest/osx -o runway - - .. tab:: Windows - - .. code-block:: powershell - - Invoke-WebRequest -Uri "https://oni.ca/runway/latest/windows" -OutFile runway - -.. note:: To install a specific version of Runway, you can replace ``latest`` - with a version number. - -.. rubric:: Usage - -To use the single-binary, run it directly as shown below. Please note that -after download, you may need to adjust the permissions before it can be -executed. (eg. Linux/macOS:``chmod +x runway``) - -.. code-block:: sh - - $ ./runway deploy -**Suggested use:** CloudFormation or Terraform projects - - -.. _install-npm: - -*** -npm -*** - -Runway is published on npm as ``@onica/runway``. -It currently contains binaries to support macOS, Ubuntu, and Windows. - -While Runway can be installed globally like any other npm package, we strongly -recommend using it per-project as a dev dependency. +Runway can be installed like any other Python package, but we instead strongly recommend using it per-project with :link:`poetry`. See `Why Version Lock Per-Project`_ for more info regarding this suggestion. -.. code-block:: shell +.. tab-set:: - $ npm i -D @onica/runway + .. tab-item:: poetry (recommended) + :sync: poetry -.. rubric:: Usage + .. code-block:: console -.. code-block:: shell + $ poetry add runway - $ npx runway deploy + .. tab-item:: pip + :sync: pip -**Suggested use:** Serverless or AWS CDK projects + .. code-block:: console - -.. _install-python: - -*** -pip -*** - -Runway runs on Python 2.7 and Python 3.5+. - -Runway is hosted on PyPI as the package named ``runway``. -It can be installed like any other Python package, but we instead strongly recommend using it -per-project with `poetry `_. -See `Why Version Lock Per-Project`_ for more info regarding this suggestion. - -**Suggested use:** Python projects - -.. tabs:: - - .. tab:: poetry - - .. code-block:: sh - - poetry add runway - - .. tab:: pip - - .. code-block:: sh - - pip install --user runway + $ pip install --user runway # or (depending on how Python was installed) - pip install runway + $ pip install runway .. rubric:: Usage -.. tabs:: +.. tab-set:: + + .. tab-item:: poetry + :sync: poetry + + .. code-block:: console - .. tab:: poetry + $ poetry run runway --help - .. code-block:: sh + .. tab-item:: pip + :sync: pip - poetry run runway --help + .. code-block:: console - .. tab:: pip + $ runway --help - .. code-block:: sh - runway --help +.. versionremoved:: 2.8.0 + Support for installation via cURL and npm was removed. + Prior versions published to npm will remain, in a deprecated/unsupported state, indefinably. + Prior versions published to S3 will be removed at a date yet to be determined. -.. _why-version-lock: **************************** Why Version Lock Per-Project diff --git a/docs/source/kubernetes/advanced_features.rst b/docs/source/kubernetes/advanced_features.rst index 20e882dd0..1c8ce45e1 100644 --- a/docs/source/kubernetes/advanced_features.rst +++ b/docs/source/kubernetes/advanced_features.rst @@ -6,8 +6,6 @@ Advanced Features Advanced features and detailed information for using Kubernetes with Runway. -.. contents:: - :depth: 4 *************************** @@ -28,7 +26,7 @@ This can be set as a relative path or an absolute one. - ${env DEPLOY_ENVIRONMENT} - config -This would set `KUBECONFIG` to ``/.kube/$DEPLOY_ENVIRONMENT/config`` where ``$DEPLOY_ENVIRONMENT`` is the current Runway :ref:`deploy environment `. +This would set `KUBECONFIG` to ``/.kube/$DEPLOY_ENVIRONMENT/config`` where ``$DEPLOY_ENVIRONMENT`` is the current Runway :term:`Deploy Environment`. ---- @@ -50,7 +48,7 @@ Without a version specified, Runway will fallback to whatever ``kubectl`` it fin 1.14.5 -Lookups can be used to provide different versions for each :ref:`deploy environment `. +Lookups can be used to provide different versions for each :term:`Deploy Environment`. .. code-block:: yaml :caption: runway.yml diff --git a/docs/source/kubernetes/configuration.rst b/docs/source/kubernetes/configuration.rst index e5ba937eb..4418e8f45 100644 --- a/docs/source/kubernetes/configuration.rst +++ b/docs/source/kubernetes/configuration.rst @@ -4,10 +4,8 @@ Configuration ############# -Configuration options and parameters for :ref:`Kubernetes ` modules. +Configuration options and parameters for :ref:`index:Kubernetes` :term:`Modules `. -.. contents:: - :depth: 4 ******* @@ -15,7 +13,7 @@ Options ******* .. data:: kubectl_version - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -29,7 +27,7 @@ Options kubectl_version: 1.14.5 .. data:: overlay_path - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -48,4 +46,4 @@ Options Parameters ********** -:ref:`Kubernetes ` does not support the use of :attr:`deployment.parameters`/:attr:`module.parameters` at this time. +:ref:`index:Kubernetes` does not support the use of :attr:`deployment.parameters`/:attr:`module.parameters` at this time. diff --git a/docs/source/kubernetes/directory_structure.rst b/docs/source/kubernetes/directory_structure.rst index c6d45d9f3..046e3fb7f 100644 --- a/docs/source/kubernetes/directory_structure.rst +++ b/docs/source/kubernetes/directory_structure.rst @@ -4,7 +4,7 @@ Directory Structure ################### -Example directory structures for a :ref:`Kubernetes ` module. +Example directory structures for a :ref:`index:Kubernetes` :term:`Module`. .. code-block:: diff --git a/docs/source/kubernetes/examples.rst b/docs/source/kubernetes/examples.rst index eb845250b..3b7acecdb 100644 --- a/docs/source/kubernetes/examples.rst +++ b/docs/source/kubernetes/examples.rst @@ -4,10 +4,8 @@ Examples ######## -Example uses of the :ref:`Kubernetes ` module +Example uses of the :ref:`index:Kubernetes` :term:`Module` -.. contents:: - :depth: 4 ****************** diff --git a/docs/source/lookups/env.rst b/docs/source/lookups/env.rst index 0f7869bf3..5f4338f15 100644 --- a/docs/source/lookups/env.rst +++ b/docs/source/lookups/env.rst @@ -16,7 +16,7 @@ These environment variables are manipulated at runtime by Runway to fill in addi .. note:: ``DEPLOY_ENVIRONMENT`` and ``AWS_REGION`` can only be resolved during the processing of a module. - To ensure no error occurs when trying to resolve one of these in a :ref:`Deployment ` definition, provide a default value. + To ensure no error occurs when trying to resolve one of these in a :ref:`Deployment ` definition, provide a default value. If the Lookup is unable to find an environment variable matching the provided query, the default value is returned or a :exc:`ValueError` is raised if a default value was not provided. diff --git a/docs/source/lookups/index.rst b/docs/source/lookups/index.rst index c0e13c74a..8b9cc7d08 100644 --- a/docs/source/lookups/index.rst +++ b/docs/source/lookups/index.rst @@ -5,7 +5,7 @@ Lookups ####### Runway Lookups allow the use of variables within the Runway config file. -These variables can then be passed along to :ref:`deployments `, :ref:`modules ` and :ref:`tests `. +These variables can then be passed along to :ref:`Deployment `, :ref:`Modules ` and :ref:`tests `. The syntax for a lookup is ``${ ::=}``. @@ -47,9 +47,6 @@ For example, if i use ``${var region}`` in my Runway config file to resolve the Well, it can but it will resolve to the literal value provided, not an AWS region like you may expect. -.. contents:: - :depth: 4 - .. _lookup arguments: diff --git a/docs/source/maintainers/release_process.rst b/docs/source/maintainers/release_process.rst index c892eeae0..6af125b08 100644 --- a/docs/source/maintainers/release_process.rst +++ b/docs/source/maintainers/release_process.rst @@ -6,8 +6,6 @@ Release Process Steps that should be taken when preparing for and executing a release. -.. contents:: - :depth: 4 *********** @@ -34,7 +32,5 @@ Execution #. Publish the release. -At this point, GitHub Actions will begin building the deployment packages & automatically publishing them to npm, PyPi, and AWS S3. +At this point, GitHub Actions will begin building the deployment package & automatically publishing it to PyPI. The **Publish Release** workflow can be monitored for progress. -It can take around 20 minutes for the process to complete. -At which time, the logs and package repositories should be checked to verify that the release was published successfully. diff --git a/docs/source/maintainers/secrets.rst b/docs/source/maintainers/secrets.rst index f80da5ca6..daaf8af80 100644 --- a/docs/source/maintainers/secrets.rst +++ b/docs/source/maintainers/secrets.rst @@ -55,7 +55,7 @@ Secrets specific to the repository, available to all environments. .. envvar:: TEST_PYPI_PASSWORD - Similar to :envvar:`PYPI_PASSWORD` but for https://test.pypi.org/. + Similar to :envvar:`PYPI_PASSWORD` but for :link:`Test PyPI`. .. envvar:: TEST_RUNNER_AWS_ACCESS_KEY_ID diff --git a/docs/source/python_setup.rst b/docs/source/python_setup.rst deleted file mode 100644 index 9acf1fc93..000000000 --- a/docs/source/python_setup.rst +++ /dev/null @@ -1,81 +0,0 @@ -.. _python-setup: - -Python Setup -============ - -Perform the following to install/configure Python & package management tools. - -.. note:: All commands below are to be run as your user. - (i.e. not root/sudo/Administrator). - -1. Ensure you have a working python environment: - - On macOS: - - Note: Substitute ``.bash_profile`` in place of ``.zshrc`` below if using Bash (e.g. macOS pre-Catalina) - - Add local python bin directories to $PATH:: - - PYTHONVER=$(python3 -c "import sys;print(str(sys.version_info.major)+'.'+str(sys.version_info.minor))") - if ! grep -s '\.local\/bin' ~/.zshrc > /dev/null 2>&1 || ! grep -s "Library\/Python\/$PYTHONVER/bin" ~/.zshrc > /dev/null 2>&1 ; then echo "export PATH=\"\$HOME/Library/Python/$PYTHONVER/bin:\$HOME/.local/bin:\$PATH\"" >> ~/.zshrc; fi - - - Run ``source ~/.zshrc`` to use the updated PATH. - - - On Windows: - - Install `Python `_ (choose the latest ``Windows x86-64 executable installer`` and run it): - - On the initial setup page, click ``Customize installation`` - - Leave all Optional Features selected, and click Next - - On the Advanced Options page change the following options and click Install: - - Check the ``Install for all users`` and ``Add Python to environment variables`` options. - - Change the install location to ``C:\Python37`` (updating ``Python37`` to the appropriate directory for the installed version, e.g. ``Python39`` for Python 3.9) - - At the ``Setup was successful`` screen, click ``Disable path length limit`` and then close the setup program. - - Edit the Path environment variable for your user: - - In the Start Menu, start typing ``environment variables`` and select ``Edit environment variables for your account``. - - In the User variables for your username, select ``Path`` and click ``Edit...`` - - Append ``%USERPROFILE%\AppData\Roaming\Python\Python37\Scripts`` & ``%USERPROFILE%\.local\bin`` to the current Variable values and click Ok - - Change ``Python37`` to the appropriate directory for the installed version (e.g. ``Python39`` for Python 3.9) - - In Windows Server 2016, the value is shown in a single line -- add it with semicolons:: - - %USERPROFILE%\AppData\Roaming\Python\Python37\Scripts;%USERPROFILE%\.local\bin; - - - Click Ok to close the Environment Variables window. - - Close all existing PowerShell windows and launch a new one to use the updated PATH. - - - On Ubuntu Linux: - - Add local python bin directory to $PATH:: - - if ! grep 'HOME\/\.local\/bin' ~/.bash_profile > /dev/null 2>&1; then echo 'export PATH=$HOME/.local/bin:$PATH' >> ~/.bash_profile; fi - - - Run ``source ~/.bash_profile`` to use the updated PATH. - - Install Python 3 and dependencies:: - - sudo apt -y install python3-pip - -2. Install python package managers:: - - curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python - - -Package Installation --------------------- - -Afterwards, your user will be able to install additional Python packages using -one of the following tools. Each wraps the base pip package manager and -automatically manages virtual environments to ensure package dependencies don't -conflict. - -pipx -~~~~ - -Global CLIs & utilities, like the AWS CLI, can be installed via -`pipx `_:: - - pipx install awscli - -poetry -~~~~~~ - -Utilities used in a project should generally have their versions stored in the -project repository, so users (now and in the future) and remote build systems -use the same versions specified in the repo. - -`poetry `_ works similarly to pipx, but -creates a lockfile that can be checked into a repo:: - - poetry add --dev runway diff --git a/docs/source/quickstart/cloudformation.rst b/docs/source/quickstart/cloudformation.rst index 6b0a90d3c..fefa16369 100644 --- a/docs/source/quickstart/cloudformation.rst +++ b/docs/source/quickstart/cloudformation.rst @@ -4,8 +4,7 @@ CloudFormation Quickstart ######################### -#. Prepare the project directory. See :ref:`Repo Structure` - for more details. +#. Prepare the project directory. See :ref:`repo_structure:Repo Structure` for more details. .. code-block:: sh @@ -13,15 +12,13 @@ CloudFormation Quickstart $ git init && git checkout -b ENV-dev #. Download/install Runway. - To see available install methods, see :ref:`Installation`. + To see available install methods, see :ref:`installation:Installation`. -#. Use Runway to :ref:`generate a sample` CloudFormation - module` CloudFormation :ref:`Module `, edit the values in the environment file, and create a :ref:`runway_config:Runway Config File` to use the :term:`Module`. - .. tabs:: + .. tab-set:: - .. tab:: POSIX + .. tab-item:: POSIX .. code-block:: sh @@ -37,7 +34,7 @@ CloudFormation Quickstart - us-east-1 EOF - .. tab:: Windows + .. tab-item:: Windows .. code-block:: powershell @@ -56,7 +53,7 @@ CloudFormation Quickstart "@ $RunwayTemplate | Out-File -FilePath runway.yml -Encoding ASCII -#. :ref:`Deploy` the stack. +#. :ref:`Deploy ` the stack. .. code-block:: sh diff --git a/docs/source/quickstart/conduit.rst b/docs/source/quickstart/conduit.rst index 993822bb4..742aab769 100644 --- a/docs/source/quickstart/conduit.rst +++ b/docs/source/quickstart/conduit.rst @@ -8,8 +8,6 @@ The `Medium.com-clone "RealWorld" demo app ` for more details. + See :ref:`repo_structure:Repo Structure` for more details. .. code-block:: sh @@ -42,13 +40,13 @@ Setup $ git checkout -b ENV-dev #. Download/install Runway. - To see available install methods, see :ref:`Installation`. + To see available install methods, see :ref:`installation:Installation`. #. Download the source files. - .. tabs:: + .. tab-set:: - .. tab:: POSIX + .. tab-item:: POSIX .. code-block:: sh @@ -73,7 +71,7 @@ Setup $ cd .. $ curl -O https://raw.githubusercontent.com/onicagroup/runway/master/quickstarts/conduit/runway.yml - .. tab:: Windows + .. tab-item:: Windows .. highlight:: powershell @@ -131,15 +129,15 @@ Execute ``runway destroy``, enter ``all``. The backend DynamoDB tables will still be retained after the destroy is complete. They must be deleted separately. -.. tabs:: +.. tab-set:: - .. tab:: POSIX + .. tab-item:: POSIX .. code-block:: sh for i in realworld-dev-articles realworld-dev-comments realworld-dev-users; do aws dynamodb delete-table --region us-east-1 --table-name $i; done - .. tab:: Windows + .. tab-item:: Windows .. code-block:: powershell diff --git a/docs/source/quickstart/other_ways_to_use.rst b/docs/source/quickstart/other_ways_to_use.rst index e30feb844..0f951258c 100644 --- a/docs/source/quickstart/other_ways_to_use.rst +++ b/docs/source/quickstart/other_ways_to_use.rst @@ -5,32 +5,21 @@ Other Ways to Use Runway ######################## -While we recommend using one of the install methods outlined in the -:ref:`Installation` section, we realize that these may not be an -option for some so we have provided a `CloudFormation`_ template for spinning -up a deploy environment in AWS and a `Docker`_ image/Dockerfile that can be -used to run Runway. +While we recommend using one of the install methods outlined in the :ref:`installation:Installation` section, we realize that these may not be an option for some so we have provided a `CloudFormation`_ template for spinning up a deploy environment in AWS and a `Docker`_ image/Dockerfile that can be used to run Runway. -.. contents:: - :depth: 4 ************** CloudFormation ************** -This `CloudFormation template`_ is probably the easiest and quickest way to go -from "zero to Runway" as it allows for using an IAM Role eliminate the need to -configure API keys. +This `CloudFormation template`_ is probably the easiest and quickest way to go from "zero to Runway" as it allows for using an IAM Role eliminate the need to configure API keys. The template will deploy your preference of Linux or Windows Runway host. -Windows Runway host includes Visual Studio Code, which some users may -find easier for manipulating Runway config files. +Windows Runway host includes Visual Studio Code, which some users may find easier for manipulating Runway config files. ****** Docker ****** -Docker users can build their own Docker image to run a local Runway -container or modify this `Dockerfile`_ to build a Runway image to suit specific -needs. +Docker users can build their own Docker image to run a local Runway container or modify this `Dockerfile`_ to build a Runway image to suit specific needs. diff --git a/docs/source/quickstart/private_static_site.rst b/docs/source/quickstart/private_static_site.rst index 8eca4d27d..bbcc636a3 100644 --- a/docs/source/quickstart/private_static_site.rst +++ b/docs/source/quickstart/private_static_site.rst @@ -5,8 +5,6 @@ Private Static Site (`Auth@Edge`) Quickstart The Runway built-in sample generation of a basic React app will be used as a simple demonstration of creating an authentication backed single page application. -.. contents:: - :depth: 4 ************* @@ -29,7 +27,7 @@ Project Setup ============= #. Download/install Runway. - To see available install methods, see :ref:`Installation`. + To see available install methods, see :ref:`installation:Installation`. #. From a directory of your choosing run the following to generate a sample React project: @@ -45,7 +43,7 @@ Project Setup $ mv static-react my-static-site #. Change directories into the new project folder and prepare the project directory. - See :ref:`Repo Structure` for more details. + See :ref:`repo_structure:Repo Structure` for more details. .. code-block:: shell diff --git a/docs/source/repo_structure.rst b/docs/source/repo_structure.rst index 7eeac6ebf..b073320f1 100644 --- a/docs/source/repo_structure.rst +++ b/docs/source/repo_structure.rst @@ -1,13 +1,9 @@ -.. _repo-structure: - ############## Repo Structure ############## Projects deployed via Runway can be structured in a few ways. -.. contents:: - :depth: 4 **************************** diff --git a/docs/source/runway_config.rst b/docs/source/runway_config.rst index eed27f6dc..dc73838f2 100644 --- a/docs/source/runway_config.rst +++ b/docs/source/runway_config.rst @@ -1,5 +1,3 @@ -.. _runway-config: - ################## Runway Config File ################## @@ -15,7 +13,7 @@ Top-Level Configuration *********************** .. attribute:: deployments - :type: List[deployment] + :type: list[deployment] A list of deployments that will be processed in the order they are defined. See Deployment_ for detailed information about defining this value. @@ -35,9 +33,9 @@ Top-Level Configuration :type: bool :value: false - Optionally exclude the git branch name when determining the current :ref:`deploy environment `. + Optionally exclude the git branch name when determining the current :term:`Deploy Environment`. - This can be useful when using the directory name or environment variable to set the :ref:`deploy environment ` to ensure the correct value is used. + This can be useful when using the directory name or environment variable to set the :term:`Deploy Environment` to ensure the correct value is used. .. rubric:: Example .. code-block:: yaml @@ -47,8 +45,8 @@ Top-Level Configuration .. note:: The existence of ``DEPLOY_ENVIRONMENT`` in the environment will automatically ignore the git branch. .. attribute:: runway_version - :type: str - :value: ">=1.10.0" + :type: str | None + :value: None Define the versions of Runway that can be used with this configuration file. @@ -73,10 +71,10 @@ Top-Level Configuration .. versionadded:: 1.11.0 .. attribute:: tests - :type: Optional[List[test]] + :type: list[test] | None :value: [] - List of Runway test definitions that are executed with the :ref:`test command ` command. + List of Runway test definitions that are executed with the :ref:`test command `. See Test_ for detailed information about defining this value. .. rubric:: Example @@ -92,7 +90,7 @@ Top-Level Configuration .. _runway-variables: .. attribute:: variables - :type: Optional[Dict[str, Any]] + :type: dict[str, Any] | None :value: {} Runway variables are used to fill values that could change based on any number of circumstances. @@ -132,7 +130,7 @@ Top-Level Configuration .. versionadded 1.4.0 .. data:: variables.file_path - :type: Optional[str] + :type: str | None Explicit path to a variables file that will be loaded and merged with the variables defined here. @@ -143,7 +141,7 @@ Top-Level Configuration file_path: some-file.yml .. data:: variables.sys_path - :type: Optional[str] + :type: str | None :value: ./ Directory to use as the root of a relative :data:`variables.file_path`. @@ -159,7 +157,6 @@ Top-Level Configuration ---- -.. _runway-deployment: ********** Deployment @@ -169,10 +166,10 @@ Deployment A deployment defines modules and options that affect the modules. - Deployments are processed during a :ref:`deploy `/:ref:`destroy `/:ref:`plan ` action. + Deployments are processed during a :ref:`commands:deploy`/:ref:`commands:destroy`/:ref:`commands:plan` action. If the processing of one deployment fails, the action will end. - During a :ref:`deploy `/:ref:`destroy ` action, the user has the option to select which deployment will run unless the ``CI`` environment variable (``--ci`` cli option) is set, the ``--tag ...`` cli option was provided, or only one deployment is defined. + During a :ref:`commands:deploy`/:ref:`commands:destroy` action, the user has the option to select which deployment will run unless the ``CI`` environment variable (``--ci`` cli option) is set, the ``--tag ...`` cli option was provided, or only one deployment is defined. .. rubric:: Lookup Support @@ -202,7 +199,7 @@ Deployment .. attribute:: account_alias - :type: Optional[str] + :type: str | None :value: None An `AWS account alias `__ use to verify the currently assumed role or credentials. @@ -231,7 +228,7 @@ Deployment No longer accepts a :class:`typing.Dict`. .. attribute:: account_id - :type: Optional[str] + :type: str | None :value: None An AWS account ID use to verify the currently assumed role or credentials. @@ -260,7 +257,7 @@ Deployment No longer accepts a :class:`typing.Dict`. .. attribute:: assume_role - :type: Optional[assume_role_definition, str] + :type: assume_role_definition | str | None :value: {} Assume an AWS IAM role when processing the deployment. @@ -315,7 +312,7 @@ Deployment An identifier for the assumed role session. .. attribute:: env_vars - :type: Optional[Dict[str, Union[List[str], str]]] + :type: dict[str, list[str] | str] | None :value: {} Additional variables to add to the environment when processing the deployment. @@ -350,7 +347,7 @@ Deployment The entire value of the field is used for all environments. .. attribute:: environments - :type: Optional[Dict[str, Union[bool, List[str], str]]] + :type: dict[str, bool | list[str] | str] | None :value: {} Explicitly enable/disable the deployment for a specific deploy environment, AWS Account ID, and AWS Region combination. @@ -389,7 +386,7 @@ Deployment If defined and the current deploy environment is missing from the definition, processing will be skipped. .. attribute:: modules - :type: List[Union[module, str]] + :type: list[module | str] A list of modules to process as part of a deployment. @@ -402,7 +399,7 @@ Deployment - path: sampleapp-02.cfn .. attribute:: module_options - :type: Optional[Union[Dict[str, Any], str]] + :type: dict[str, Any] | str | None :value: {} Options that are passed directly to the modules within this deployment. @@ -437,7 +434,7 @@ Deployment example: value .. attribute:: name - :type: Optional[str] + :type: str | None :value: None The name of the deployment to be displayed in logs and the interactive selection menu. @@ -449,7 +446,7 @@ Deployment - name: networking .. attribute:: parallel_regions - :type: Optional[Union[List[str], str]] + :type: list[str] | str | None :value: [] A list of AWS Regions to process asynchronously. @@ -491,7 +488,7 @@ Deployment .. versionadded:: 1.3.0 .. attribute:: parameters - :type: Optional[Union[Dict[str, Any], str]] + :type: dict[str, Any] | str | None :value: {} Used to pass variable values to modules in place of an environment configuration file. @@ -520,7 +517,7 @@ Deployment .. versionadded:: 1.4.0 .. attribute:: regions - :type: Optional[Union[Dict[str, Union[List[str], str], List[str], str]] + :type: dict[str, list[str] | str] | list[str] | str | None :value: [] A list of AWS Regions to process this deployment in. @@ -570,7 +567,6 @@ Deployment ---- -.. _runway-module: ****** Module @@ -578,31 +574,31 @@ Module .. class:: module - A module defines the directory to be processed and applicable options. + A :term:`Module` defines the directory to be processed and applicable options. - It can consist of :ref:`CloudFormation `, :ref:`Terraform `, :ref:`Serverless Framework `, :ref:`AWS CDK `, :ref:`Kubernetes `, or a :ref:`Static Site`. + It can consist of :ref:`index:CloudFormation & Troposphere`, :ref:`index:Terraform`, :ref:`index:Serverless Framework`, :ref:`index:AWS Cloud Development Kit (CDK)`, :ref:`index:Kubernetes`, or a :ref:`index:Static Site`. It is recommended to place the appropriate extension on each directory for identification (but it is not required). - See :ref:`Repo Structure` for examples of a module directory structure. - - +------------------+-----------------------------------------------+ - | Suffix/Extension | IaC Tool/Framework | - +==================+===============================================+ - | ``.cdk`` | :ref:`AWS CDK ` | - +------------------+-----------------------------------------------+ - | ``.cfn`` | :ref:`CloudFormation ` | - +------------------+-----------------------------------------------+ - | ``.k8s`` | :ref:`Kubernetes ` | - +------------------+-----------------------------------------------+ - | ``.sls`` | :ref:`Serverless Framework ` | - +------------------+-----------------------------------------------+ - | ``.tf`` | :ref:`Terraform ` | - +------------------+-----------------------------------------------+ - | ``.web`` | :ref:`Static Site` | - +------------------+-----------------------------------------------+ + See :ref:`repo_structure:Repo Structure` for examples of a module directory structure. + + +------------------+---------------------------------------------------------+ + | Suffix/Extension | IaC Tool/Framework | + +==================+=========================================================+ + | ``.cdk`` | :ref:`index:AWS Cloud Development Kit (CDK)` | + +------------------+---------------------------------------------------------+ + | ``.cfn`` | :ref:`index:CloudFormation & Troposphere` | + +------------------+---------------------------------------------------------+ + | ``.k8s`` | :ref:`index:Kubernetes` | + +------------------+---------------------------------------------------------+ + | ``.sls`` | :ref:`index:Serverless Framework` | + +------------------+---------------------------------------------------------+ + | ``.tf`` | :ref:`index:Terraform` | + +------------------+---------------------------------------------------------+ + | ``.web`` | :ref:`index:Static Site` | + +------------------+---------------------------------------------------------+ A module is only deployed if there is a corresponding environment file present, it is explicitly enabled via :attr:`deployment.environments`/:attr:`module.environments`, or :attr:`deployment.parameters`/:attr:`module.parameters` is defined. The naming format of an environment file varies per module type. - See :ref:`Module Configurations` for acceptable environment file name formats. + See :ref:`index:Module Configuration` for acceptable environment file name formats. Modules can be defined as a string or a mapping. The minimum requirement for a module is a string that is equal to the name of the module directory. @@ -622,7 +618,7 @@ Module - :attr:`~module.path` .. attribute:: class_path - :type: Optional[str] + :type: str | None :value: null .. note:: @@ -630,7 +626,7 @@ Module It is only used for custom module type handlers. Import path to a custom Runway module handler class. - See :ref:`Module Configurations` for detailed usage. + See :ref:`index:Module Configuration` for detailed usage. .. rubric:: Example .. code-block:: yaml @@ -640,7 +636,7 @@ Module - class_path: runway.module.cloudformation.CloudFormation .. attribute:: env_vars - :type: Optional[Dict[str, Union[List[str], str]]] + :type: dict[str, list[str] | str] | None :value: {} Additional variables to add to the environment when processing the deployment. @@ -678,7 +674,7 @@ Module The entire value of the field is used for all environments. .. attribute:: environments - :type: Optional[Dict[str, Union[bool, List[str], str]]] + :type: dict[str, bool | list[str] | str] | None :value: {} Explicitly enable/disable the deployment for a specific deploy environment, AWS Account ID, and AWS Region combination. @@ -720,7 +716,7 @@ Module If defined and the current deploy environment is missing from the definition, processing will be skipped. .. attribute:: name - :type: Optional[str] + :type: str | None The name of the module to be displayed in logs and the interactive selection menu. @@ -734,13 +730,13 @@ Module - name: networking .. attribute:: options - :type: Optional[Union[Dict[str, Any], str]] + :type: dict[str, Any] | str | None :value: {} Options that are passed directly to the module type handler class. The options that can be used with each module vary. - For detailed information about options for each type of module, see :ref:`Module Configurations`. + For detailed information about options for each type of module, see :ref:`index:Module Configuration`. Anything defined here is merged with the value of :attr:`deployment.module_options`. Values defined here take precedence. @@ -776,7 +772,7 @@ Module example: value .. attribute:: parallel - :type: Optional[List[module]] + :type: list[module] | None :value: [] List of `module` definitions that can be executed asynchronously. @@ -796,7 +792,7 @@ Module - path: sampleapp-02.cfn .. attribute:: parameters - :type: Optional[Union[Dict[str, Any], str]] + :type: dict[str, Any] | str | None :value: {} Used to pass variable values to modules in place of an environment configuration file. @@ -828,7 +824,7 @@ Module .. versionadded:: 1.4.0 .. attribute:: path - :type: Optional[Union[str, Path]] + :type: str | Path | None Directory (relative to the Runway config file) containing IaC. The directory can either be on the local file system or a network accessible location. @@ -846,7 +842,7 @@ Module .. versionadded:: 1.4.0 .. attribute:: tags - :type: Optional[List[str]] + :type: list[str] | None :value: [] A list of strings to categorize the module which can be used with the CLI to quickly select a group of modules. @@ -863,7 +859,7 @@ Module - type:network .. attribute:: type - :type: Optional[str] + :type: str | None Explicitly define the type of IaC contained within the directory. This can be useful when Runway fails to automatically determine the correct module type. @@ -887,7 +883,6 @@ Module .. versionadded:: 1.4.0 -.. _runway-module-path: path ==== @@ -929,7 +924,6 @@ The syntax is based on that of `Terraform module sources ` to execute them. + Simply define all tests for a project in the Runway config file and use the :ref:`test command ` to execute them. .. rubric:: Lookup Support .. note:: - Runway does not set ``AWS_REGION`` or ``AWS_DEFAULT_REGION`` environment variables when using the :ref:`test command`. + Runway does not set ``AWS_REGION`` or ``AWS_DEFAULT_REGION`` environment variables when using the :ref:`test command `. The following fields support lookups: @@ -1005,7 +998,7 @@ Test - :attr:`test.required` .. attribute:: args - :type: Optional[Union[Dict[str, Any], str]] + :type: dict[str, Any] | str | None :value: {} Arguments to be passed to the test. @@ -1021,7 +1014,7 @@ Test - echo "Hello world" .. attribute:: name - :type: Optional[str] + :type: str | None Name of the test. Used to more easily identify where different tests begin/end in the logs and to identify which tests failed. @@ -1039,7 +1032,7 @@ Test Whether the test must pass for subsequent tests to be run. If ``false``, testing will continue if the test fails. - If the test fails, the :ref:`test command ` will always return a non-zero exit code regardless of this value. + If the test fails, the :ref:`test command ` will always return a non-zero exit code regardless of this value. .. rubric:: Example .. code-block:: yaml diff --git a/docs/source/serverless/advanced_features.rst b/docs/source/serverless/advanced_features.rst index 6cae8571f..96b035d2f 100644 --- a/docs/source/serverless/advanced_features.rst +++ b/docs/source/serverless/advanced_features.rst @@ -6,8 +6,6 @@ Advanced Features Advanced features and detailed information for using Serverless Framework with Runway. -.. contents:: - :depth: 4 .. _sls-skip-npm-ci: @@ -40,7 +38,7 @@ The value of this option is recursively merged into a resolved clone of the modu To create this resolved clone, Runway uses "`serverless print`_" (including `args `_) to resolve the module's Serverless configuration file and output the contents to a temporary file. The temporary file is deleted after each execution of Runway. -This functionality can be especially useful when used alongside :ref:`remote module paths ` such as a module from a :ref:`git repository ` to change values on the fly without needing to modify the source for small differences in each environment. +This functionality can be especially useful when used alongside :ref:`remote module paths ` such as a module from a :ref:`git repository ` to change values on the fly without needing to modify the source for small differences in each environment. .. rubric:: Example .. code-block:: yaml diff --git a/docs/source/serverless/configuration.rst b/docs/source/serverless/configuration.rst index ea05ab32c..f7acae16b 100644 --- a/docs/source/serverless/configuration.rst +++ b/docs/source/serverless/configuration.rst @@ -6,8 +6,6 @@ Configuration Standard `Serverless Framework `__ rules apply but, we have some added prerequisites, recommendations, and caveats. -.. contents:: - :depth: 4 ************* @@ -27,7 +25,7 @@ Options Options specific to Serverless Framework modules. .. data:: args - :type: Optional[List[str]] + :type: list[str] :value: [] :noindex: @@ -45,7 +43,7 @@ Options specific to Serverless Framework modules. .. versionadded:: 1.4.0 .. data:: extend_serverless_yml - :type: Optional[Dict[str, Any]] + :type: dict[str, Any] :value: {} :noindex: @@ -64,7 +62,7 @@ Options specific to Serverless Framework modules. .. versionadded:: 1.8.0 .. data:: promotezip - :type: Optional[Dict[str, str]] + :type: dict[str, str] :value: {} :noindex: @@ -104,10 +102,10 @@ Refer to the `Serverless Framework Documentation ` has a 1-to-1 mapping to Serverless's **stage**. +Runway's concept of a :term:`Deploy Environment` has a 1-to-1 mapping to Serverless's **stage**. For example, if the deploy environment is **dev**, Serverless will be run with ``--stage dev``. -Each stage requires either its own variables file (even if empty for a particular stage) following a specific `File Naming`_ scheme and/or a configured ``environment`` for the module or deployment (see :ref:`Runway Config File ` for details). +Each stage requires either its own variables file (even if empty for a particular stage) following a specific `File Naming`_ scheme and/or a configured ``environment`` for the module or deployment (see :ref:`runway_config:Runway Config File` for details). File Naming =========== diff --git a/docs/source/serverless/directory_structure.rst b/docs/source/serverless/directory_structure.rst index e0a026f54..5130a9254 100644 --- a/docs/source/serverless/directory_structure.rst +++ b/docs/source/serverless/directory_structure.rst @@ -7,9 +7,6 @@ Directory Structure Example directory structures for a Serverless module. -.. contents:: - :depth: 4 - ************** Python Example diff --git a/docs/source/staticsite/advanced_features.rst b/docs/source/staticsite/advanced_features.rst index 99430a4a2..1e06591d1 100644 --- a/docs/source/staticsite/advanced_features.rst +++ b/docs/source/staticsite/advanced_features.rst @@ -4,8 +4,7 @@ Advanced Features ################# -.. contents:: - :depth: 4 + .. _`Auth@Edge`: @@ -93,7 +92,7 @@ See :ref:`Static Site Examples ` to see how to do this in A The destination name of the file to create. .. data:: file - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -102,7 +101,7 @@ See :ref:`Static Site Examples ` to see how to do this in A This or ``content`` must be specified. .. data:: content_type - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -114,7 +113,7 @@ See :ref:`Static Site Examples ` to see how to do this in A * ``text/yaml`` to serialize ``content`` into YAML. .. data:: content - :type: Optional[Union[str, List[Any], Dict[str, Any]]] + :type: str, list[Any], dict[str, Any] | None :value: None :noindex: diff --git a/docs/source/staticsite/configuration.rst b/docs/source/staticsite/configuration.rst index cb156efd0..ee6fb6236 100644 --- a/docs/source/staticsite/configuration.rst +++ b/docs/source/staticsite/configuration.rst @@ -5,20 +5,17 @@ Configuration ############# -Configuration options and parameters for :ref:`static site ` modules. +Configuration options and parameters for :ref:`index:Static Site` :term:`Modules `. Example uses of the options and parameters can be found in the :ref:`Examples ` section. -.. contents:: - :depth: 4 - ******* Options ******* .. data:: build_output - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -31,7 +28,7 @@ Options build_output: dist .. data:: build_steps - :type: Optional[List[str]] + :type: list[str] :value: [] :noindex: @@ -46,7 +43,7 @@ Options - npm run build .. data:: extra_files - :type: Optional[List[Dict[str, Union[str, Dict[str, Any]]]]] + :type: list[dict[str, str | dict[str, Any]]] :value: [] :noindex: @@ -87,7 +84,7 @@ Options .. versionadded:: 1.9.0 .. data:: pre_build_steps - :type: Optional[List[Dict[str, str]]] + :type: list[dict[str, str]] :value: [] :noindex: @@ -104,7 +101,7 @@ Options cwd: ../myothermodule .. data:: source_hashing - :type: Optional[Dict[str, str]] + :type: dict[str, str] :value: {} :noindex: @@ -131,7 +128,7 @@ Parameters ********** .. data:: cloudformation_service_role - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -161,7 +158,7 @@ Parameters .. _staticsite_acmcert_arn: .. data:: staticsite_acmcert_arn - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -175,7 +172,7 @@ Parameters staticsite_acmcert_arn: arn:aws:acm:::certificate/ .. data:: staticsite_aliases - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -193,7 +190,7 @@ Parameters .. _staticsite_auth_at_edge: .. data:: staticsite_auth_at_edge - :type: Optional[bool] + :type: bool :value: False :noindex: @@ -211,7 +208,7 @@ Parameters .. _staticsite_cf_disable: .. data:: staticsite_cf_disable - :type: Optional[bool] + :type: bool :value: False :noindex: @@ -229,7 +226,7 @@ Parameters .. versionadded:: 1.5.0 .. data:: staticsite_compress - :type: Optional[bool] + :type: bool :value: True :noindex: @@ -242,7 +239,7 @@ Parameters staticsite_compress: false .. data:: staticsite_cookie_settings - :type: Optional[Dict[str, str]] + :type: dict[str, str] | None :value: {"idToken": "Path=/; Secure; SameSite=Lax", "accessToken": "Path=/; Secure; SameSite=Lax", "refreshToken": "Path=/; Secure; SameSite=Lax", "nonce": "Path=/; Secure; HttpOnly; Max-Age=1800; SameSite=Lax"} :noindex: @@ -265,7 +262,7 @@ Parameters .. _staticsite_create_user_pool: .. data:: staticsite_create_user_pool - :type: Optional[bool] + :type: bool :value: False :noindex: @@ -284,7 +281,7 @@ Parameters .. _staticsite_custom_error_responses: .. data:: staticsite_custom_error_responses - :type: Optional[List[Dict[str, Union[int, str]]]] + :type: list[dict[str, int | str]] :value: [] :noindex: @@ -300,7 +297,7 @@ Parameters ResponsePagePath: /index.html .. data:: staticsite_enable_cf_logging - :type: Optional[bool] + :type: bool :value: True :noindex: @@ -313,7 +310,7 @@ Parameters staticsite_enable_cf_logging: true .. data:: staticsite_http_headers - :type: Optional[Dict[str, str]] + :type: dict[str, str] | None :value: {"Content-Security-Policy": "default-src https: 'unsafe-eval' 'unsafe-inline'; font-src 'self' 'unsafe-inline' 'unsafe-eval' data: https:; object-src 'none'; connect-src 'self' https://*.amazonaws.com https://*.amazoncognito.com", "Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload", "Referrer-Policy": "same-origin", "X-XSS-Protection": "1; mode=block", "X-Frame-Options": "DENY", "X-Content-Type-Options": "nosniff"} :noindex: @@ -340,7 +337,7 @@ Parameters .. versionadded:: 1.5.0 .. data:: staticsite_lambda_function_associations - :type: Optional[List[Dict[str, str]]] + :type: list[dict[str, str]] :value: [] :noindex: @@ -356,7 +353,7 @@ Parameters arn: arn:aws:lambda:::function:: .. data:: staticsite_non_spa - :type: Optional[bool] + :type: bool :value: False :noindex: @@ -377,7 +374,7 @@ Parameters .. versionadded:: 1.5.0 .. data:: staticsite_oauth_scopes - :type: Optional[List[str]] + :type: list[str] | None :value: ["phone", "email", "profile", "openid", "aws.cognito.signin.user.admin"] :noindex: @@ -401,7 +398,7 @@ Parameters .. versionadded:: 1.5.0 .. data:: staticsite_redirect_path_auth_refresh - :type: Optional[str] + :type: str | None :value: "/refreshauth" :noindex: @@ -418,7 +415,7 @@ Parameters .. versionadded:: 1.5.0 .. data:: staticsite_redirect_path_sign_in - :type: Optional[str] + :type: str | None :value: "/parseauth" :noindex: @@ -436,7 +433,7 @@ Parameters .. versionadded:: 1.5.0 .. data:: staticsite_redirect_path_sign_out - :type: Optional[str] + :type: str | None :value: "/" :noindex: @@ -456,7 +453,7 @@ Parameters .. _staticsite_rewrite_directory_index: .. data:: staticsite_rewrite_directory_index - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -469,7 +466,7 @@ Parameters staticsite_rewrite_directory_index: index.html .. data:: staticsite_role_boundary_arn - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -485,7 +482,7 @@ Parameters .. versionadded:: 1.8.0 .. data:: staticsite_sign_out_url - :type: Optional[str] + :type: str | None :value: "/signout" :noindex: @@ -502,7 +499,7 @@ Parameters .. versionadded:: 1.5.0 .. data:: staticsite_supported_identity_providers - :type: Optional[str] + :type: str | None :value: "COGNITO" :noindex: @@ -519,7 +516,7 @@ Parameters .. versionadded:: 1.5.0 .. data:: staticsite_user_pool_arn - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -536,7 +533,7 @@ Parameters .. versionadded:: 1.5.0 .. data:: staticsite_additional_redirect_domains - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -555,7 +552,7 @@ Parameters .. versionadded:: 1.14.0 .. data:: staticsite_web_acl - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -568,7 +565,7 @@ Parameters staticsite_web_acl: arn:aws:waf:::certificate/ .. data:: staticsite_required_group - :type: Optional[str] + :type: str | None :value: None :noindex: diff --git a/docs/source/staticsite/directory_structure.rst b/docs/source/staticsite/directory_structure.rst index 5cec8cf7f..371ca98e7 100644 --- a/docs/source/staticsite/directory_structure.rst +++ b/docs/source/staticsite/directory_structure.rst @@ -4,12 +4,9 @@ Directory Structure ################### -Example directory structures for a ref:`static site ` module. +Example directory structures for a :ref:`index:Static Site` :term:`Module`. -.. contents:: - :depth: 4 - *********** Angular SPA diff --git a/docs/source/staticsite/examples.rst b/docs/source/staticsite/examples.rst index 185a47aa6..696b746ca 100644 --- a/docs/source/staticsite/examples.rst +++ b/docs/source/staticsite/examples.rst @@ -4,12 +4,9 @@ Examples ######## -Example uses of the :ref:`static site ` module +Example uses of the :ref:`index:Static Site` :term:`Module` -.. contents:: - :depth: 4 - *********** Angular SPA diff --git a/docs/source/terminology.rst b/docs/source/terminology.rst index 9e190cfe7..c89756106 100644 --- a/docs/source/terminology.rst +++ b/docs/source/terminology.rst @@ -1,132 +1,70 @@ -.. _blueprints: terminology.html#blueprint -.. _CloudFormation: https://aws.amazon.com/cloudformation/ -.. _CloudFormation Parameters: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/parameters-section-structure.html -.. _troposphere: https://github.com/cloudtools/troposphere - ########### Terminology ########### +.. glossary:: + Blueprint + A python class that is responsible for creating a :link:`CloudFormation` template using :link:`troposphere`. + :term:`Blueprints ` are deploying using :term:`CFNgin`. -****** -Runway -****** - -.. _term-deploy-env: - -Deploy Environment -================== - -Deploy environments are used for selecting the options/variables/parameters to be used with each Module_. -The deploy environment is derived from the current directory (if its not a git repo), active git branch, or environment variable (``DEPLOY_ENVIRONMENT``). -Standard deploy environments would be something like prod, dev, and test. - -When using a git branch, Runway expects the branch to be prefixed with **ENV-**. -If this is found, Runway knows that it should always use the value that follows the prefix. -If it's the **master** branch, Runway will use the deploy environment name of *common*. -If the branch name does not follow either of these schemas and Runway is being run interactively from the CLI, it will prompt of confirmation of the deploy environment that should be used. - -When using a directory, Runway expects the directory's name to be prefixed with **ENV-**. -If this is found, Runway knows that it should always use the value that follows the prefix. - - -Deployment -========== - -A :ref:`deployment` contains a list of `modules <#module>`_ and options for -all the modules_ in the deployment_. -A :ref:`runway-config` can contain multiple :ref:`deployments` and a deployment_ can contain multiple modules_. - - -Lookup (Runway) -=============== - -A method for expanding values in the :ref:`runway-config` file when processing a deployment/module. -These are only supported in select areas of the :ref:`runway-config` (see the config docs for more details). - - -Module -====== - -A :ref:`module` is a directory containing a single infrastructure-as-code tool configuration of an application, a component, or some infrastructure (eg. a set of `CloudFormation`_ templates). -It is defined in a `deployment`_ by path. -Modules can also contain granular options that only pertain to it based on its :attr:`module.type`. - - -.. _term-param: - -Parameters -========== - -A mapping of ``key: value`` that is passed to a module. -Through the use of a `Lookup (Runway)`_, the value can be changed per region or deploy environment. -The ``value`` can be any data type but, support for complex data types depends on the :attr:`module.type`. - - -------------------------------------------------------------------------------- - - -*************** -Runway's CFngin -*************** - - -.. _term-blueprint: - -Blueprint -========= - -A python class that is responsible for creating a CloudFormation template. -Usually this is built using troposphere_. - - -context -======= - -Context is responsible for translating the values passed in via the -command line and specified in the :class:`~cfngin.config` to :class:`stacks `. - + CFNgin + Runway's CloudFormation engine used to deploy :link:`CloudFormation` Templates (JSON or YAML) and :term:`Blueprints ` written using :link:`troposphere`. -.. _term-graph: + Deploy Environment + :term:`Deploy Environments ` are used for selecting the options/variables/parameters to be used with each :term:`Module`. + The :term:`Deploy Environment` is derived from the current directory (if its not a git repo), active git branch, or environment variable (``DEPLOY_ENVIRONMENT``). + Standard :term:`Deploy Environments ` would be something like prod, dev, and test. -graph -===== + When using a git branch, Runway expects the branch to be prefixed with **ENV-**. + If this is found, Runway knows that it should always use the value that follows the prefix. + If it's the **master** branch, Runway will use the :term:`Deploy Environment` name of *common*. + If the branch name does not follow either of these schemas and Runway is being run interactively from the CLI, it will prompt of confirmation of the :term:`Deploy Environment` that should be used. -A mapping of **object name** to **set/list of dependencies**. + When using a directory, Runway expects the directory's name to be prefixed with **ENV-**. + If this is found, Runway knows that it should always use the value that follows the prefix. -A graph is constructed for each execution of CFNgin from the contents of the -:class:`~cfngin.config` file. + Deployment + A :ref:`Deployment ` contains a list of :term:`Modules ` and options for all the :term:`Modules ` in the :term:`Deployment`. + A :ref:`runway_config:Runway Config File` can contain multiple :ref:`Deployments ` and a :term:`Deployment` can contain multiple :term:`Modules `. -.. rubric:: Example + Lookup + In the context of Runway, a :term:`Lookup` is a method for expanding values in the :ref:`runway_config:Runway Config File` file when processing a :term:`Deployment`/:term:`Module`. + These are only supported in select areas of the :ref:`runway_config:Runway Config File` (see the config docs for more details). -.. code-block:: json + In the context of :term:`CFNgin`, a :term:`Lookup` is method for expanding values in the :class:`~cfngin.config` at runtime. - { - "stack1": [], - "stack2": [ - "stack1" - ] - } + Module + A :ref:`Module ` is a directory containing a single Infrastructure-as-Code tool configuration of an application, a component, or some infrastructure (e.g. a set of :link:`CloudFormation` Templates). + It is defined in a :term:`Deployment` by path. + :term:`Modules ` can also contain granular options that only pertain to it based on its :attr:`module.type`. -- **stack1** depends on nothing. -- **stack2** depends on **stack1** + Output + A :link:`CloudFormation` Template concept. + :class:`Stacks ` can output values allowing easy access to those values. + Often used to export the unique ID's of resources that Templates create. + :term:`CFNgin` makes it easy to pull :term:`Outputs ` from one :class:`~cfngin.stack` and then use them in the :attr:`~cfngin.stack.variables` of another :class:`~cfngin.stack`. -lookup -====== + Parameters + A mapping of ``key: value`` that is passed to a :term:`Module`. + Through the use of a :term:`Lookup`, the value can be changed per region or :term:`Deploy Environment`. + The ``value`` can be any data type but, support for complex data types depends on the :attr:`module.type`. -A method for expanding values in the :class:`~cfngin.config` at runtime. By default -lookups are used to reference Output values from other :class:`stacks ` within the -same :attr:`~cfngin.config.namespace`. + graph + A mapping of **object name** to **set/list of dependencies**. + A graph is constructed for each execution of :term:`CFNgin` from the contents of a :class:`~cfngin.config` file. -.. _term-outputs: + .. rubric:: Example -output -====== + .. code-block:: json -A CloudFormation Template concept. -:class:`Stacks ` can output values, allowing easy access to those values. -Often used to export the unique ID's of resources that templates create. + { + "stack1": [], + "stack2": [ + "stack1" + ] + } -CFNgin makes it simple to pull outputs from one :class:`~cfngin.stack` and then use them in the :attr:`~cfngin.stack.variables` of another :class:`~cfngin.stack`. + - **stack1** depends on nothing. + - **stack2** depends on **stack1** diff --git a/docs/source/terraform/advanced_features.rst b/docs/source/terraform/advanced_features.rst index 31106ca83..f4a43d3ed 100644 --- a/docs/source/terraform/advanced_features.rst +++ b/docs/source/terraform/advanced_features.rst @@ -7,9 +7,6 @@ Advanced Features Advanced features and detailed information for using Terraform with Runway. -.. contents:: - :depth: 4 - .. _tf-backend: @@ -35,7 +32,7 @@ However, it's generally preferable to separate the backend configuration out fro This form of configuration is known as `partial configuration`_ and allows for dynamic or secret values to be passed in at runtime. Below are examples of how to implement `partial configuration`_ with Runway. -All examples provided showcase the use of the s3 backend type as it is the easiest to use when going from zero to deployed (try :ref:`runway gen-sample cfngin ` for quickstart Terraform backend infrastructure). +All examples provided showcase the use of the s3 backend type as it is the easiest to use when going from zero to deployed (try :ref:`runway gen-sample cfngin ` for quickstart Terraform backend infrastructure). However, Runway supports the use of any `backend type `__ (refer to Terraform's documentation for proper `partial configuration`_ instructions). .. seealso:: diff --git a/docs/source/terraform/configuration.rst b/docs/source/terraform/configuration.rst index 97b265a64..8e7225e7d 100644 --- a/docs/source/terraform/configuration.rst +++ b/docs/source/terraform/configuration.rst @@ -4,8 +4,6 @@ Configuration ############# -.. contents:: - :depth: 4 ******* @@ -15,7 +13,7 @@ Options Options specific to Terraform Modules. .. data:: args - :type: Optional[Union[Dict[str, List[str]], List[str]]] + :type: dict[str, list[str]] | list[str] | None :value: None :noindex: @@ -32,7 +30,7 @@ Options specific to Terraform Modules. .. versionadded:: 1.8.1 .. data:: terraform_backend_config - :type: Optional[Dict[str, str]] + :type: dict[str, str] | None :value: {} :noindex: @@ -52,7 +50,7 @@ Options specific to Terraform Modules. Added support for any *key: value*. .. data:: terraform_version - :type: Optional[str] + :type: str | None :value: None :noindex: @@ -66,7 +64,7 @@ Options specific to Terraform Modules. terraform_version: 0.11.13 .. data:: terraform_write_auto_tfvars - :type: Optional[bool] + :type: str | None :value: False :noindex: diff --git a/docs/source/upgrades.rst b/docs/source/upgrades.rst index 2ad3aeb30..3cf989f9d 100644 --- a/docs/source/upgrades.rst +++ b/docs/source/upgrades.rst @@ -4,11 +4,10 @@ Upgrades ######## -.. contents:: - :depth: 4 - During a Runway upgrade (especially coming from a :code:`0.x` version) you may required to make changes to your configuration or modules. This page will describe common issues when upgrading and how to resolve them. + + ******************************* Updating the Runway Config File ******************************* diff --git a/infrastructure/blueprints/admin_role.py b/infrastructure/blueprints/admin_role.py index d98cbf31b..69f7d3fc4 100644 --- a/infrastructure/blueprints/admin_role.py +++ b/infrastructure/blueprints/admin_role.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict, Optional +from typing import TYPE_CHECKING, ClassVar import awacs.sts from awacs.aws import Allow, AWSPrincipal, PolicyDocument, Statement @@ -19,7 +19,7 @@ class AdminRole(Blueprint): """Blueprint for an admin role.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "CrossAccountAccessAccountIds": {"type": list, "default": []}, "PermissionsBoundary": {"type": str}, "RoleName": {"type": str, "default": ""}, @@ -34,9 +34,7 @@ def assume_role_policy(self) -> PolicyDocument: Statement( Action=[awacs.sts.AssumeRole], Effect=Allow, - Principal=AWSPrincipal( - self.variables["CrossAccountAccessAccountIds"] - ), + Principal=AWSPrincipal(self.variables["CrossAccountAccessAccountIds"]), ) ) return policy_doc @@ -47,7 +45,7 @@ def namespace(self) -> str: return self.context.namespace @cached_property - def role_name(self) -> Optional[str]: + def role_name(self) -> str | None: """Name of the role being created.""" val = self.variables["RoleName"] if val == "": diff --git a/infrastructure/blueprints/admin_user.py b/infrastructure/blueprints/admin_user.py index cecd3bb9a..c3c8315b9 100644 --- a/infrastructure/blueprints/admin_user.py +++ b/infrastructure/blueprints/admin_user.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict, Optional +from typing import TYPE_CHECKING, ClassVar from troposphere import NoValue from troposphere.iam import User @@ -17,7 +17,7 @@ class AdminUser(Blueprint): """Blueprint for an admin user.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "PermissionsBoundary": {"type": str}, "UserName": {"type": str, "default": ""}, } @@ -42,7 +42,7 @@ def user(self) -> User: return user @cached_property - def username(self) -> Optional[str]: + def username(self) -> str | None: """Name of the user being created.""" val = self.variables["UserName"] if val == "": @@ -53,4 +53,4 @@ def create_template(self) -> None: """Create a template from the Blueprint.""" self.template.set_description("Admin user") self.template.set_version("2010-09-09") - self.user # pylint: disable=pointless-statement + self.user # noqa: B018 diff --git a/infrastructure/blueprints/cfngin_bucket.py b/infrastructure/blueprints/cfngin_bucket.py index 20718958c..ce41af817 100644 --- a/infrastructure/blueprints/cfngin_bucket.py +++ b/infrastructure/blueprints/cfngin_bucket.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict +from typing import TYPE_CHECKING, ClassVar from troposphere import And, Equals, If, Not, NoValue, s3 @@ -17,7 +17,7 @@ class CfnginBucket(Blueprint): """Blueprint for a CFNgin Bucket.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "BucketName": { "type": CFNString, "description": "Name for the S3 bucket", @@ -63,11 +63,7 @@ def create_template(self) -> None: BucketName=self.bucket_name, DeletionPolicy=self.variables["DeletionPolicy"], LifecycleConfiguration=s3.LifecycleConfiguration( - Rules=[ - s3.LifecycleRule( - NoncurrentVersionExpirationInDays=30, Status="Enabled" - ) - ] + Rules=[s3.LifecycleRule(NoncurrentVersionExpirationInDays=30, Status="Enabled")] ), VersioningConfiguration=s3.VersioningConfiguration(Status="Enabled"), ) diff --git a/infrastructure/blueprints/prevent_privilege_escalation.py b/infrastructure/blueprints/prevent_privilege_escalation.py index 5cd3420e6..8ba913db2 100644 --- a/infrastructure/blueprints/prevent_privilege_escalation.py +++ b/infrastructure/blueprints/prevent_privilege_escalation.py @@ -6,7 +6,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict, List, Union +from typing import TYPE_CHECKING, ClassVar import awacs.iam import awacs.sts @@ -35,7 +35,7 @@ class AdminPreventPrivilegeEscalation(Blueprint): DESCRIPTION: ClassVar[str] = "Permission boundary for admin users." POLICY_NAME: ClassVar[str] = "AdminPreventPrivilegeEscalation" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "ApprovedPermissionBoundaries": { "default": [], "description": "List of policy names (not ARNs) that are approved to " @@ -55,42 +55,34 @@ def namespace(self) -> str: return self.context.namespace @cached_property - def approved_boundary_policies(self) -> List[Sub]: + def approved_boundary_policies(self) -> list[Sub]: """List of approved permission boundary policies.""" - tmp = [self.policy_arn] - for policy_name in self.variables["ApprovedPermissionBoundaries"]: - tmp.append( - Sub( - f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:policy/{policy_name}" - ) - ) - return tmp + return [ + self.policy_arn, + *[ + Sub(f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:policy/{policy_name}") + for policy_name in self.variables["ApprovedPermissionBoundaries"] + ], + ] @cached_property - def deny_assume_role_not_resources(self) -> List[Union[str, Sub]]: + def deny_assume_role_not_resources(self) -> list[str | Sub]: """List of IAM Role ARNs that can be assumed.""" - tmp: List[Union[str, Sub]] = [ - Sub( - f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:role/{self.namespace}-*" - ) + tmp: list[str | Sub] = [ + Sub(f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:role/{self.namespace}-*") ] - for arn in self.variables["DenyAssumeRoleNotResources"]: - tmp.append(arn) + tmp.extend(self.variables["DenyAssumeRoleNotResources"]) return tmp @property def policy_arn(self) -> Sub: """ARN of the IAM policy that will be created.""" - return Sub( - f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:policy/{self.POLICY_NAME}" - ) + return Sub(f"arn:${{AWS::Partition}}:iam::${{AWS::AccountId}}:policy/{self.POLICY_NAME}") @cached_property def statement_allow_admin_access(self) -> Statement: """Statement to allow admin access.""" - return Statement( - Action=[Action("*")], Effect=Allow, Resource=["*"], Sid="AllowAdminAccess" - ) + return Statement(Action=[Action("*")], Effect=Allow, Resource=["*"], Sid="AllowAdminAccess") @cached_property def statement_deny_alter_boundary_policy(self) -> Statement: @@ -143,9 +135,7 @@ def statement_deny_create_without_boundary(self) -> Statement: return Statement( Action=[awacs.iam.CreateRole, awacs.iam.CreateUser], Condition=Condition( - StringNotEquals( - {"iam:PermissionsBoundary": self.approved_boundary_policies} - ) + StringNotEquals({"iam:PermissionsBoundary": self.approved_boundary_policies}) ), Effect=Deny, Resource=[ @@ -162,14 +152,8 @@ def statement_deny_onica_sso(self) -> Statement: Action=[Action("*")], Effect=Deny, Resource=[ - Sub( - "arn:${AWS::Partition}:cloudformation:*:${AWS::AccountId}:stack/" - "onica-sso" - ), - Sub( - "arn:${AWS::Partition}:cloudformation:*:${AWS::AccountId}:stack/" - "onica-sso-*" - ), + Sub("arn:${AWS::Partition}:cloudformation:*:${AWS::AccountId}:stack/onica-sso"), + Sub("arn:${AWS::Partition}:cloudformation:*:${AWS::AccountId}:stack/onica-sso-*"), Sub("arn:${AWS::Partition}:iam::${AWS::AccountId}:policy/onica-sso"), Sub("arn:${AWS::Partition}:iam::${AWS::AccountId}:policy/onica-sso-*"), Sub("arn:${AWS::Partition}:iam::${AWS::AccountId}:role/onica-sso"), @@ -186,9 +170,7 @@ def statement_deny_put_boundary(self) -> Statement: awacs.iam.PutUserPermissionsBoundary, ], Condition=Condition( - StringNotEquals( - {"iam:PermissionsBoundary": self.approved_boundary_policies} - ) + StringNotEquals({"iam:PermissionsBoundary": self.approved_boundary_policies}) ), Effect=Deny, Resource=[ @@ -206,16 +188,14 @@ def statement_deny_remove_boundary_policy(self) -> Statement: awacs.iam.DeleteRolePermissionsBoundary, awacs.iam.DeleteUserPermissionsBoundary, ], - Condition=Condition( - StringEquals({"iam:PermissionsBoundary": self.policy_arn}) - ), + Condition=Condition(StringEquals({"iam:PermissionsBoundary": self.policy_arn})), Effect=Deny, Resource=["*"], Sid="DenyRemovalOfBoundaryFromUserOrRole", ) @cached_property - def statements(self) -> List[Statement]: + def statements(self) -> list[Statement]: """List of statements to add to the policy.""" return [ self.statement_allow_admin_access, diff --git a/infrastructure/blueprints/test_runner_boundary.py b/infrastructure/blueprints/test_runner_boundary.py index 4c774e461..d1c811931 100644 --- a/infrastructure/blueprints/test_runner_boundary.py +++ b/infrastructure/blueprints/test_runner_boundary.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import ClassVar, List +from typing import ClassVar import awacs.iam import awacs.s3 @@ -123,10 +123,7 @@ def statement_deny_namespace(self) -> Statement: Action("cloudformation", "List*"), ], Resource=[ - Sub( - "arn:aws:cloudformation:*:${AWS::AccountId}:stack/" - f"{self.namespace}-*" - ), + Sub(f"arn:aws:cloudformation:*:${{AWS::AccountId}}:stack/{self.namespace}-*"), f"arn:aws:s3:::{self.namespace}", f"arn:aws:s3:::{self.namespace}/*", f"arn:aws:s3:::{self.namespace}-*", @@ -135,9 +132,10 @@ def statement_deny_namespace(self) -> Statement: ) @cached_property - def statements(self) -> List[Statement]: + def statements(self) -> list[Statement]: """List of statements to add to the policy.""" - return super().statements + [ + return [ + *super().statements, self.statement_deny_change_cfngin_bucket, self.statement_deny_cloudtrail, self.statement_deny_iam, diff --git a/infrastructure/blueprints/test_runner_user.py b/infrastructure/blueprints/test_runner_user.py index abf6ee80b..0d8e81779 100644 --- a/infrastructure/blueprints/test_runner_user.py +++ b/infrastructure/blueprints/test_runner_user.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict +from typing import TYPE_CHECKING, ClassVar import awacs.sts from awacs.aws import Deny, PolicyDocument, Statement @@ -17,7 +17,7 @@ class TestRunnerUser(AdminUser): """Blueprint for a test runner user.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "DenyAssumeRoleNotResources": {"type": list, "default": []}, "PermissionsBoundary": {"type": str}, "UserName": {"type": str, "default": ""}, @@ -34,8 +34,7 @@ def create_template(self) -> None: Statement( Action=[awacs.sts.AssumeRole], Effect=Deny, - NotResource=self.variables["DenyAssumeRoleNotResources"] - or ["*"], + NotResource=self.variables["DenyAssumeRoleNotResources"] or ["*"], ) ], Version="2012-10-17", diff --git a/infrastructure/public/common/bucket-and-user.cdk/package.json b/infrastructure/public/common/bucket-and-user.cdk/package.json index b883231ef..7e24b9189 100644 --- a/infrastructure/public/common/bucket-and-user.cdk/package.json +++ b/infrastructure/public/common/bucket-and-user.cdk/package.json @@ -1,26 +1,26 @@ { - "name": "myapp", - "version": "1.0.0", - "scripts": { - "build": "tsc", - "lint": "tslint -c tslint.json 'bin/**/*.ts' 'lib/**/*.ts'", - "watch": "tsc -w", - "cdk": "cdk" - }, - "devDependencies": { - "@types/node": "8.10.40", - "@types/source-map-support": "^0.5.0", - "aws-cdk": "^2.101.1", - "aws-sdk": "^2.1511.0", - "prompt": "^1.0.0", - "ts-node": "^8.1.0", - "tslint": "^5.20.0", - "typescript": "^3.3.3333" - }, - "dependencies": { - "@aws-cdk/aws-iam": "^1.204.0", - "@aws-cdk/aws-s3": "^1.204.0", - "@aws-cdk/core": "^1.15.0", - "source-map-support": "^0.5.9" - } + "dependencies": { + "@aws-cdk/aws-iam": "^1.204.0", + "@aws-cdk/aws-s3": "^1.204.0", + "@aws-cdk/core": "^1.15.0", + "source-map-support": "^0.5.9" + }, + "devDependencies": { + "@types/node": "8.10.40", + "@types/source-map-support": "^0.5.0", + "aws-cdk": "^2.101.1", + "aws-sdk": "^2.1511.0", + "prompt": "^1.0.0", + "ts-node": "^8.1.0", + "tslint": "^5.20.0", + "typescript": "^3.3.3333" + }, + "name": "myapp", + "scripts": { + "build": "tsc", + "cdk": "cdk", + "lint": "tslint -c tslint.json 'bin/**/*.ts' 'lib/**/*.ts'", + "watch": "tsc -w" + }, + "version": "1.0.0" } diff --git a/npm/.npmignore b/npm/.npmignore deleted file mode 100644 index da2927653..000000000 --- a/npm/.npmignore +++ /dev/null @@ -1,24 +0,0 @@ -**/.egg -**/.travis -**/.vscode -**/codebuild -**/docs -**/integration_tests -**/build -**/dist -**/quickstarts -**/scripts -**/src -**/test -**/tf_test -**/.env -**/.gitignore -**/.pylintrv -**/.travis.yml -**/buildspec.yml -**/Makefile -**/MANAGEST.ini -**/Pipfile* -**/README.rst -**/runway.spec -**/setup* diff --git a/npm/postinstall.js b/npm/postinstall.js deleted file mode 100755 index e10cf8e05..000000000 --- a/npm/postinstall.js +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env node -const os = require('os'); -const fs = require('fs'); -const path = require('path'); -const tar = require('tar'); - -// e.g. '../..' for 'runway'; '../../..' for `@onica/runway', etc -let pathTraversal = '..' -for (var i = 0; i < process.env.npm_package_name.split("/").length; i++) { - pathTraversal += '/..' -} - -const basepath = `${path.resolve(process.cwd(), pathTraversal)}/node_modules`; // goes to the top level node_modules -const moduleDir = `${basepath}/${process.env.npm_package_name}/src`; -let osName; -let binPath; - -function symLink(target, dest_path, callback) { - return fs.symlink(target, dest_path, 'file', (err, data) => { - // error is thrown if the file already exists - if (err && err.code === 'EEXIST') { - fs.unlink(dest_path, (err, data) => { - if (err) { - console.log(err); - throw err; - } else { - return symLink(target, dest_path, callback); - } - }); - } else { - return callback(err, data); - } - }); -} - -// translate os name used during binary build -switch (os.platform()) { - case 'darwin': - osName = 'osx'; - break; - case 'win32': - osName = 'windows'; - break; - default: - osName = os.platform(); -} - -fs.mkdir(`${moduleDir}/runway`, { recursive: true }, (err, data) => { - if (err) throw err; - - // unzip the tar for os version to ./src/runway - tar.x({ - cwd: `${moduleDir}/runway`, - file: `${moduleDir}/${osName}/runway.tar.gz`, - gzip: true, - unlink: true - }, (err, data) => { - if (err) throw err; - - if (os.platform() !== 'win32') { - // determine correct bin path to use based on global/local install - if (process.env.npm_config_global) { - binPath = `${process.env.NVM_BIN || '/usr/local/bin'}/runway`; - } else { - try { - fs.mkdirSync(`${basepath}/.bin`, { recursive: true }); - } catch (err) { - // shouldn't need to catch an EEXIST error with the recursive option - // set on mkdirSync, but it still can occur (e.g. on older - // versions of nodejs without the recursive option) - // https://github.com/nodejs/node/issues/27293 - if (err && err.code !== 'EEXIST') { - throw err; - } - } - binPath = `${basepath}/.bin/runway`; - } - // create symlink in bin to the appropriate runway binary - symLink(`${moduleDir}/runway/runway-cli`, binPath, (err, data) => { - if (err) { - if (err.code === 'EACCES') { - console.log('User does not have sufficient privileges to install. Please try again with sudo.') - } - throw err; - } - }); - } else { - // determine correct bin path to use based on global/local install - if (process.env.npm_config_global) { - binPath = path.resolve(process.env.APPDATA, './npm/runway.bat'); - } else { - fs.mkdirSync(`${basepath}/.bin`, { recursive: true }); - binPath = `${basepath}/.bin/runway.bat`; - } - // symlink does not work for windows so we need to use a bat file - // this will overwrite the file if it already exists so no fancy error handling needed - fs.writeFile(binPath, `@"${moduleDir}/runway/runway-cli.exe" %*`, (err, data) => { - if (err) throw err; - }) - } - }); -}); diff --git a/npm/preuninstall.js b/npm/preuninstall.js deleted file mode 100644 index 77ffb3ab5..000000000 --- a/npm/preuninstall.js +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env node -const os = require('os'); -const fs = require('fs'); -const path = require('path'); - -const basepath = `${path.resolve(process.cwd(), '../..')}/node_modules`; // goes to the top level node_modules - -if (os.platform() === 'win32') { - if (process.env.npm_config_global) { - binPath = path.resolve(process.env.APPDATA, './npm/runway.bat'); - } else { - binPath = `${basepath}/.bin/runway.bat`; - } -} else { - if (process.env.npm_config_global) { - binPath = `${process.env.NVM_BIN || '/usr/local/bin'}/runway`; - } else { - binPath = `${basepath}/.bin/runway`; - } -} - -// remove symlink/exe from bin created by postinstall script -fs.unlink(binPath, (err, data) => { - if (err) { - // ignore file/dir missing - if (err.code !== 'ENOENT') { - throw err; - } - } -}); diff --git a/package-lock.json b/package-lock.json index d8244be44..a555975dc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,53 +1,43 @@ { "name": "runway", - "version": "2.0.0-dev", + "version": "0.0.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "runway", - "version": "2.0.0-dev", - "hasInstallScript": true, - "license": "Apache-2.0", - "os": [ - "darwin", - "linux", - "win32" - ], - "dependencies": { - "tar": "^7.2.0" - }, + "version": "0.0.0", "devDependencies": { - "cspell": "^8.10.0", - "pyright": "^1.1.223" + "cspell": "^8.14.2", + "pyright": "^1.1.377" } }, "node_modules/@cspell/cspell-bundled-dicts": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-bundled-dicts/-/cspell-bundled-dicts-8.10.0.tgz", - "integrity": "sha512-phqOVx1ArqfCVzuE0qHOEKCz0xVRlBzbQHCwab0twu1RmOK6ShLzQoAZqtEFK1Rm1fCguHJmQyuP+34lGt6nfQ==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-bundled-dicts/-/cspell-bundled-dicts-8.14.2.tgz", + "integrity": "sha512-Kv2Utj/RTSxfufGXkkoTZ/3ErCsYWpCijtDFr/FwSsM7mC0PzLpdlcD9xjtgrJO5Kwp7T47iTG21U4Mwddyi8Q==", "dev": true, "dependencies": { "@cspell/dict-ada": "^4.0.2", - "@cspell/dict-aws": "^4.0.2", + "@cspell/dict-aws": "^4.0.3", "@cspell/dict-bash": "^4.1.3", - "@cspell/dict-companies": "^3.1.2", - "@cspell/dict-cpp": "^5.1.10", + "@cspell/dict-companies": "^3.1.4", + "@cspell/dict-cpp": "^5.1.12", "@cspell/dict-cryptocurrencies": "^5.0.0", "@cspell/dict-csharp": "^4.0.2", - "@cspell/dict-css": "^4.0.12", + "@cspell/dict-css": "^4.0.13", "@cspell/dict-dart": "^2.0.3", "@cspell/dict-django": "^4.1.0", "@cspell/dict-docker": "^1.1.7", "@cspell/dict-dotnet": "^5.0.2", "@cspell/dict-elixir": "^4.0.3", "@cspell/dict-en_us": "^4.3.23", - "@cspell/dict-en-common-misspellings": "^2.0.3", + "@cspell/dict-en-common-misspellings": "^2.0.4", "@cspell/dict-en-gb": "1.1.33", "@cspell/dict-filetypes": "^3.0.4", "@cspell/dict-fonts": "^4.0.0", "@cspell/dict-fsharp": "^1.0.1", - "@cspell/dict-fullstack": "^3.1.8", + "@cspell/dict-fullstack": "^3.2.0", "@cspell/dict-gaming-terms": "^1.0.5", "@cspell/dict-git": "^3.0.0", "@cspell/dict-golang": "^6.0.9", @@ -57,28 +47,28 @@ "@cspell/dict-html-symbol-entities": "^4.0.0", "@cspell/dict-java": "^5.0.7", "@cspell/dict-julia": "^1.0.1", - "@cspell/dict-k8s": "^1.0.5", + "@cspell/dict-k8s": "^1.0.6", "@cspell/dict-latex": "^4.0.0", "@cspell/dict-lorem-ipsum": "^4.0.0", "@cspell/dict-lua": "^4.0.3", "@cspell/dict-makefile": "^1.0.0", "@cspell/dict-monkeyc": "^1.0.6", "@cspell/dict-node": "^5.0.1", - "@cspell/dict-npm": "^5.0.16", + "@cspell/dict-npm": "^5.0.18", "@cspell/dict-php": "^4.0.8", - "@cspell/dict-powershell": "^5.0.4", + "@cspell/dict-powershell": "^5.0.5", "@cspell/dict-public-licenses": "^2.0.7", - "@cspell/dict-python": "^4.2.1", + "@cspell/dict-python": "^4.2.4", "@cspell/dict-r": "^2.0.1", "@cspell/dict-ruby": "^5.0.2", - "@cspell/dict-rust": "^4.0.4", - "@cspell/dict-scala": "^5.0.2", - "@cspell/dict-software-terms": "^3.4.9", - "@cspell/dict-sql": "^2.1.3", + "@cspell/dict-rust": "^4.0.5", + "@cspell/dict-scala": "^5.0.3", + "@cspell/dict-software-terms": "^4.0.6", + "@cspell/dict-sql": "^2.1.5", "@cspell/dict-svelte": "^1.0.2", "@cspell/dict-swift": "^2.0.1", "@cspell/dict-terraform": "^1.0.0", - "@cspell/dict-typescript": "^3.1.5", + "@cspell/dict-typescript": "^3.1.6", "@cspell/dict-vue": "^3.0.0" }, "engines": { @@ -86,30 +76,30 @@ } }, "node_modules/@cspell/cspell-json-reporter": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-json-reporter/-/cspell-json-reporter-8.10.0.tgz", - "integrity": "sha512-hGMj4TmqqKdfeWQa1kIvFLKyebPTzFUdUugiy4iD5CuUcruIWeXT1XzCpCY726MjLFrZ34uW01MglG2Ptsn3Qg==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-json-reporter/-/cspell-json-reporter-8.14.2.tgz", + "integrity": "sha512-TZavcnNIZKX1xC/GNj80RgFVKHCT4pHT0qm9jCsQFH2QJfyCrUlkEvotKGSQ04lAyCwWg6Enq95qhouF8YbKUQ==", "dev": true, "dependencies": { - "@cspell/cspell-types": "8.10.0" + "@cspell/cspell-types": "8.14.2" }, "engines": { "node": ">=18" } }, "node_modules/@cspell/cspell-pipe": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-pipe/-/cspell-pipe-8.10.0.tgz", - "integrity": "sha512-qspmnz1d+0QgPwnBkoqBGY2GYtcA8uaQLCLhen8QOCybqhlHbn57hzeec8QZVDigJYZ/rVQbOceQ11QRK7IaMA==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-pipe/-/cspell-pipe-8.14.2.tgz", + "integrity": "sha512-aWMoXZAXEre0/M9AYWOW33YyOJZ06i4vvsEpWBDWpHpWQEmsR/7cMMgld8Pp3wlEjIUclUAKTYmrZ61PFWU/og==", "dev": true, "engines": { "node": ">=18" } }, "node_modules/@cspell/cspell-resolver": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-resolver/-/cspell-resolver-8.10.0.tgz", - "integrity": "sha512-ln8k05sR3pVaEYTm8CJ9fLXagdiUcy3c1sC956PJ3MJAq4y2RDedPOD306umF5EjmSasbM1fgbJ8T7L2nAgeJQ==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-resolver/-/cspell-resolver-8.14.2.tgz", + "integrity": "sha512-pSyBsAvslaN0dx0pHdvECJEuFDDBJGAD6G8U4BVbIyj2OPk0Ox0HrZIj6csYxxoJERAgNO/q7yCPwa4j9NNFXg==", "dev": true, "dependencies": { "global-directory": "^4.0.1" @@ -119,18 +109,18 @@ } }, "node_modules/@cspell/cspell-service-bus": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-service-bus/-/cspell-service-bus-8.10.0.tgz", - "integrity": "sha512-zxW2DDzeA7O7z92s+H2gAnqe0oOy0AxWAXv0orJnV4XAUJEnozgL+PC41l91PLKbYNVxpaXi/KKz4AAUIqI3AQ==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-service-bus/-/cspell-service-bus-8.14.2.tgz", + "integrity": "sha512-WUF7xf3YgXYIqjmBwLcVugYIrYL4WfXchgSo9rmbbnOcAArzsK+HKfzb4AniZAJ1unxcIQ0JnVlRmnCAKPjjLg==", "dev": true, "engines": { "node": ">=18" } }, "node_modules/@cspell/cspell-types": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-types/-/cspell-types-8.10.0.tgz", - "integrity": "sha512-mCzg0fLa9r8YirQmPM5yGu6VFEk/gsNEsZjmYqkDpzMy2plEpcg2QkTu58juL3XroeA7dhWn7pDCEhUGxt7eIg==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-types/-/cspell-types-8.14.2.tgz", + "integrity": "sha512-MRY8MjBNOKGMDSkxAKueYAgVL43miO+lDcLCBBP+7cNXqHiUFMIZteONcGp3kJT0dWS04dN6lKAXvaNF0aWcng==", "dev": true, "engines": { "node": ">=18" @@ -143,9 +133,9 @@ "dev": true }, "node_modules/@cspell/dict-aws": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@cspell/dict-aws/-/dict-aws-4.0.2.tgz", - "integrity": "sha512-aNGHWSV7dRLTIn8WJemzLoMF62qOaiUQlgnsCwH5fRCD/00gsWCwg106pnbkmK4AyabyxzneOV4dfecDJWkSxw==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@cspell/dict-aws/-/dict-aws-4.0.3.tgz", + "integrity": "sha512-0C0RQ4EM29fH0tIYv+EgDQEum0QI6OrmjENC9u98pB8UcnYxGG/SqinuPxo+TgcEuInj0Q73MsBpJ1l5xUnrsw==", "dev": true }, "node_modules/@cspell/dict-bash": { @@ -155,15 +145,15 @@ "dev": true }, "node_modules/@cspell/dict-companies": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@cspell/dict-companies/-/dict-companies-3.1.2.tgz", - "integrity": "sha512-OwR5i1xbYuJX7FtHQySmTy3iJtPV1rZQ3jFCxFGwrA1xRQ4rtRcDQ+sTXBCIAoJHkXa84f9J3zsngOKmMGyS/w==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@cspell/dict-companies/-/dict-companies-3.1.4.tgz", + "integrity": "sha512-y9e0amzEK36EiiKx3VAA+SHQJPpf2Qv5cCt5eTUSggpTkiFkCh6gRKQ97rVlrKh5GJrqinDwYIJtTsxuh2vy2Q==", "dev": true }, "node_modules/@cspell/dict-cpp": { - "version": "5.1.10", - "resolved": "https://registry.npmjs.org/@cspell/dict-cpp/-/dict-cpp-5.1.10.tgz", - "integrity": "sha512-BmIF0sAz2BgGEOwzYIeEm9ALneDjd1tcTbFbo+A1Hcq3zOKP8yViSgxS9CEN30KOZIyph6Tldp531UPEpoEl0Q==", + "version": "5.1.13", + "resolved": "https://registry.npmjs.org/@cspell/dict-cpp/-/dict-cpp-5.1.13.tgz", + "integrity": "sha512-cgF438519dJHCD1ERMfyp8UDjwinu+njzHnzOvTuY9VUn30koT7xyLq8DTLebIoxOy5FOSMMiMK3vPBXZjzl1g==", "dev": true }, "node_modules/@cspell/dict-cryptocurrencies": { @@ -179,9 +169,9 @@ "dev": true }, "node_modules/@cspell/dict-css": { - "version": "4.0.12", - "resolved": "https://registry.npmjs.org/@cspell/dict-css/-/dict-css-4.0.12.tgz", - "integrity": "sha512-vGBgPM92MkHQF5/2jsWcnaahOZ+C6OE/fPvd5ScBP72oFY9tn5GLuomcyO0z8vWCr2e0nUSX1OGimPtcQAlvSw==", + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/@cspell/dict-css/-/dict-css-4.0.13.tgz", + "integrity": "sha512-WfOQkqlAJTo8eIQeztaH0N0P+iF5hsJVKFuhy4jmARPISy8Efcv8QXk2/IVbmjJH0/ZV7dKRdnY5JFVXuVz37g==", "dev": true }, "node_modules/@cspell/dict-dart": { @@ -227,9 +217,9 @@ "dev": true }, "node_modules/@cspell/dict-en-common-misspellings": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@cspell/dict-en-common-misspellings/-/dict-en-common-misspellings-2.0.3.tgz", - "integrity": "sha512-8nF1z9nUiSgMyikL66HTbDO7jCGtB24TxKBasXIBwkBKMDZgA2M883iXdeByy6m1JJUcCGFkSftVYp2W0bUgjw==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@cspell/dict-en-common-misspellings/-/dict-en-common-misspellings-2.0.4.tgz", + "integrity": "sha512-lvOiRjV/FG4pAGZL3PN2GCVHSTCE92cwhfLGGkOsQtxSmef6WCHfHwp9auafkBlX0yFQSKDfq6/TlpQbjbJBtQ==", "dev": true }, "node_modules/@cspell/dict-en-gb": { @@ -257,9 +247,9 @@ "dev": true }, "node_modules/@cspell/dict-fullstack": { - "version": "3.1.8", - "resolved": "https://registry.npmjs.org/@cspell/dict-fullstack/-/dict-fullstack-3.1.8.tgz", - "integrity": "sha512-YRlZupL7uqMCtEBK0bDP9BrcPnjDhz7m4GBqCc1EYqfXauHbLmDT8ELha7T/E7wsFKniHSjzwDZzhNXo2lusRQ==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-fullstack/-/dict-fullstack-3.2.0.tgz", + "integrity": "sha512-sIGQwU6G3rLTo+nx0GKyirR5dQSFeTIzFTOrURw51ISf+jKG9a3OmvsVtc2OANfvEAOLOC9Wfd8WYhmsO8KRDQ==", "dev": true }, "node_modules/@cspell/dict-gaming-terms": { @@ -275,9 +265,9 @@ "dev": true }, "node_modules/@cspell/dict-golang": { - "version": "6.0.9", - "resolved": "https://registry.npmjs.org/@cspell/dict-golang/-/dict-golang-6.0.9.tgz", - "integrity": "sha512-etDt2WQauyEQDA+qPS5QtkYTb2I9l5IfQftAllVoB1aOrT6bxxpHvMEpJ0Hsn/vezxrCqa/BmtUbRxllIxIuSg==", + "version": "6.0.11", + "resolved": "https://registry.npmjs.org/@cspell/dict-golang/-/dict-golang-6.0.11.tgz", + "integrity": "sha512-BMFIDGh1HaFUe1cYBT1dotqyIQG2j3VkNntGQTBa/7i0aBnC5PBJDiAXnUeBHi0AVrz0hyAc7xtcK5KyKCEzwg==", "dev": true }, "node_modules/@cspell/dict-google": { @@ -317,9 +307,9 @@ "dev": true }, "node_modules/@cspell/dict-k8s": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@cspell/dict-k8s/-/dict-k8s-1.0.5.tgz", - "integrity": "sha512-Cj+/ZV4S+MKlwfocSJZqe/2UAd/sY8YtlZjbK25VN1nCnrsKrBjfkX29vclwSj1U9aJg4Z9jw/uMjoaKu9ZrpQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@cspell/dict-k8s/-/dict-k8s-1.0.6.tgz", + "integrity": "sha512-srhVDtwrd799uxMpsPOQqeDJY+gEocgZpoK06EFrb4GRYGhv7lXo9Fb+xQMyQytzOW9dw4DNOEck++nacDuymg==", "dev": true }, "node_modules/@cspell/dict-latex": { @@ -359,9 +349,9 @@ "dev": true }, "node_modules/@cspell/dict-npm": { - "version": "5.0.16", - "resolved": "https://registry.npmjs.org/@cspell/dict-npm/-/dict-npm-5.0.16.tgz", - "integrity": "sha512-ZWPnLAziEcSCvV0c8k9Qj88pfMu+wZwM5Qks87ShsfBgI8uLZ9tGHravA7gmjH1Gd7Bgxy2ulvXtSqIWPh1lew==", + "version": "5.0.18", + "resolved": "https://registry.npmjs.org/@cspell/dict-npm/-/dict-npm-5.0.18.tgz", + "integrity": "sha512-weMTyxWpzz19q4wv9n183BtFvdD5fCjtze+bFKpl+4rO/YlPhHL2cXLAeexJz/VDSBecwX4ybTZYoknd1h2J4w==", "dev": true }, "node_modules/@cspell/dict-php": { @@ -371,21 +361,21 @@ "dev": true }, "node_modules/@cspell/dict-powershell": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/@cspell/dict-powershell/-/dict-powershell-5.0.4.tgz", - "integrity": "sha512-eosDShapDgBWN9ULF7+sRNdUtzRnUdsfEdBSchDm8FZA4HOqxUSZy3b/cX/Rdw0Fnw0AKgk0kzgXw7tS6vwJMQ==", + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-powershell/-/dict-powershell-5.0.5.tgz", + "integrity": "sha512-3JVyvMoDJesAATYGOxcUWPbQPUvpZmkinV3m8HL1w1RrjeMVXXuK7U1jhopSneBtLhkU+9HKFwgh9l9xL9mY2Q==", "dev": true }, "node_modules/@cspell/dict-public-licenses": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/@cspell/dict-public-licenses/-/dict-public-licenses-2.0.7.tgz", - "integrity": "sha512-KlBXuGcN3LE7tQi/GEqKiDewWGGuopiAD0zRK1QilOx5Co8XAvs044gk4MNIQftc8r0nHeUI+irJKLGcR36DIQ==", + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/@cspell/dict-public-licenses/-/dict-public-licenses-2.0.8.tgz", + "integrity": "sha512-Sup+tFS7cDV0fgpoKtUqEZ6+fA/H+XUgBiqQ/Fbs6vUE3WCjJHOIVsP+udHuyMH7iBfJ4UFYOYeORcY4EaKdMg==", "dev": true }, "node_modules/@cspell/dict-python": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@cspell/dict-python/-/dict-python-4.2.1.tgz", - "integrity": "sha512-9X2jRgyM0cxBoFQRo4Zc8oacyWnXi+0/bMI5FGibZNZV4y/o9UoFEr6agjU260/cXHTjIdkX233nN7eb7dtyRg==", + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/@cspell/dict-python/-/dict-python-4.2.4.tgz", + "integrity": "sha512-sCtLBqMreb+8zRW2bXvFsfSnRUVU6IFm4mT6Dc4xbz0YajprbaPPh/kOUTw5IJRP8Uh+FFb7Xp2iH03CNWRq/A==", "dev": true, "dependencies": { "@cspell/dict-data-science": "^2.0.1" @@ -404,27 +394,27 @@ "dev": true }, "node_modules/@cspell/dict-rust": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@cspell/dict-rust/-/dict-rust-4.0.4.tgz", - "integrity": "sha512-v9/LcZknt/Xq7m1jdTWiQEtmkVVKdE1etAfGL2sgcWpZYewEa459HeWndNA0gfzQrpWX9sYay18mt7pqClJEdA==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-rust/-/dict-rust-4.0.5.tgz", + "integrity": "sha512-DIvlPRDemjKQy8rCqftAgGNZxY5Bg+Ps7qAIJjxkSjmMETyDgl0KTVuaJPt7EK4jJt6uCZ4ILy96npsHDPwoXA==", "dev": true }, "node_modules/@cspell/dict-scala": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/@cspell/dict-scala/-/dict-scala-5.0.2.tgz", - "integrity": "sha512-v97ClgidZt99JUm7OjhQugDHmhx4U8fcgunHvD/BsXWjXNj4cTr0m0YjofyZoL44WpICsNuFV9F/sv9OM5HUEw==", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@cspell/dict-scala/-/dict-scala-5.0.3.tgz", + "integrity": "sha512-4yGb4AInT99rqprxVNT9TYb1YSpq58Owzq7zi3ZS5T0u899Y4VsxsBiOgHnQ/4W+ygi+sp+oqef8w8nABR2lkg==", "dev": true }, "node_modules/@cspell/dict-software-terms": { - "version": "3.4.9", - "resolved": "https://registry.npmjs.org/@cspell/dict-software-terms/-/dict-software-terms-3.4.9.tgz", - "integrity": "sha512-J2uNH3ScBPQijXyzLfxsC1CYgq36MWvbynJzQJ15ZazTsecC0pQHynm3/6VH4X/BphV2eXB0GRJT3yMicYLGCw==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/@cspell/dict-software-terms/-/dict-software-terms-4.0.8.tgz", + "integrity": "sha512-LmsIHkjWgVEg3Wrcomaj5Fp/m/l2Eiqj8Qhgcj8rAnui8DPqiKLSZKL3f1iRiWEMqbnzjpFpcmb1bOp8mcBWkA==", "dev": true }, "node_modules/@cspell/dict-sql": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/@cspell/dict-sql/-/dict-sql-2.1.3.tgz", - "integrity": "sha512-SEyTNKJrjqD6PAzZ9WpdSu6P7wgdNtGV2RV8Kpuw1x6bV+YsSptuClYG+JSdRExBTE6LwIe1bTklejUp3ZP8TQ==", + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-sql/-/dict-sql-2.1.5.tgz", + "integrity": "sha512-FmxanytHXss7GAWAXmgaxl3icTCW7YxlimyOSPNfm+njqeUDjw3kEv4mFNDDObBJv8Ec5AWCbUDkWIpkE3IpKg==", "dev": true }, "node_modules/@cspell/dict-svelte": { @@ -446,9 +436,9 @@ "dev": true }, "node_modules/@cspell/dict-typescript": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/@cspell/dict-typescript/-/dict-typescript-3.1.5.tgz", - "integrity": "sha512-EkIwwNV/xqEoBPJml2S16RXj65h1kvly8dfDLgXerrKw6puybZdvAHerAph6/uPTYdtLcsPyJYkPt5ISOJYrtw==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/@cspell/dict-typescript/-/dict-typescript-3.1.6.tgz", + "integrity": "sha512-1beC6O4P/j23VuxX+i0+F7XqPVc3hhiAzGJHEKqnWf5cWAXQtg0xz3xQJ5MvYx2a7iLaSa+lu7+05vG9UHyu9Q==", "dev": true }, "node_modules/@cspell/dict-vue": { @@ -458,9 +448,9 @@ "dev": true }, "node_modules/@cspell/dynamic-import": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/dynamic-import/-/dynamic-import-8.10.0.tgz", - "integrity": "sha512-QdZWDZfDAFHHjcBW6otijlblCB3T2r8b5q2X0+XOjE2yd+uF78Ma8pEHrgzQ7sRAkVV9SehhHqdkxOfOkYorKQ==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/dynamic-import/-/dynamic-import-8.14.2.tgz", + "integrity": "sha512-5MbqtIligU7yPwHWU/5yFCgMvur4i1bRAF1Cy8y2dDtHsa204S/w/SaXs+51EFLp2eNbCiBisCBrwJFT7R1RxA==", "dev": true, "dependencies": { "import-meta-resolve": "^4.1.0" @@ -469,49 +459,31 @@ "node": ">=18.0" } }, - "node_modules/@cspell/strong-weak-map": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/strong-weak-map/-/strong-weak-map-8.10.0.tgz", - "integrity": "sha512-V7lq4k1tebppBdzzqdgk6FHch/PG4kIWQ2k6b9JT6yqc7ewN75KwU0tSgIMoxoJFedRE2ZnUG404SAd7jWYxug==", + "node_modules/@cspell/filetypes": { + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/filetypes/-/filetypes-8.14.2.tgz", + "integrity": "sha512-ZevArA0mWeVTTqHicxCPZIAeCibpY3NwWK/x6d1Lgu7RPk/daoGAM546Q2SLChFu+r10tIH7pRG212A6Q9ihPA==", "dev": true, "engines": { "node": ">=18" } }, - "node_modules/@cspell/url": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/url/-/url-8.10.0.tgz", - "integrity": "sha512-U4+uMJKe3d+BwBjrzhNVxc1CUBVynlw5qeJkSdZJzqOdDFFA9yiKfLpHYmY5Sc/Iin8XAYuAd09Mxsc3E714Iw==", + "node_modules/@cspell/strong-weak-map": { + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/strong-weak-map/-/strong-weak-map-8.14.2.tgz", + "integrity": "sha512-7sRzJc392CQYNNrtdPEfOHJdRqsqf6nASCtbS5A9hL2UrdWQ4uN7r/D+Y1HpuizwY9eOkZvarcFfsYt5wE0Pug==", "dev": true, "engines": { - "node": ">=18.0" - } - }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" + "node": ">=18" } }, - "node_modules/@isaacs/fs-minipass": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", - "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", - "dependencies": { - "minipass": "^7.0.4" - }, + "node_modules/@cspell/url": { + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/url/-/url-8.14.2.tgz", + "integrity": "sha512-YmWW+B/2XQcCynLpiAQF77Bitm5Cynw3/BICZkbdveKjJkUzEmXB+U2qWuwXOyU8xUYuwkP63YM8McnI567rUA==", + "dev": true, "engines": { - "node": ">=18.0.0" + "node": ">=18.0" } }, "node_modules/@nodelib/fs.scandir": { @@ -549,19 +521,11 @@ "node": ">= 8" } }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "optional": true, - "engines": { - "node": ">=14" - } - }, "node_modules/ansi-regex": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, "engines": { "node": ">=12" }, @@ -569,36 +533,12 @@ "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, - "node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, "node_modules/array-timsort": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/array-timsort/-/array-timsort-1.0.3.tgz", "integrity": "sha512-/+3GRL7dDAGEfM6TseQk/U+mi18TU2Ms9I3UlLdUMhz2hbvGNTKdj9xniwXfUqgYhHxRx0+8UnKkvlNwVU+cWQ==", "dev": true }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, "node_modules/braces": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", @@ -647,14 +587,6 @@ "url": "https://github.com/chalk/chalk-template?sponsor=1" } }, - "node_modules/chownr": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", - "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", - "engines": { - "node": ">=18" - } - }, "node_modules/clear-module": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/clear-module/-/clear-module-4.1.2.tgz", @@ -671,22 +603,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, "node_modules/commander": { "version": "12.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", @@ -697,9 +613,9 @@ } }, "node_modules/comment-json": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/comment-json/-/comment-json-4.2.3.tgz", - "integrity": "sha512-SsxdiOf064DWoZLH799Ata6u7iV658A11PlWtZATDlXPpKGJnbJZ5Z24ybixAi+LUUqJ/GKowAejtC5GFUG7Tw==", + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/comment-json/-/comment-json-4.2.5.tgz", + "integrity": "sha512-bKw/r35jR3HGt5PEPm1ljsQQGyCrR8sFGNiN5L+ykDHdpO8Smxkrkla9Yi6NkQyUrb8V54PGhfMs6NrIwtxtdw==", "dev": true, "dependencies": { "array-timsort": "^1.0.3", @@ -718,43 +634,31 @@ "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", "dev": true }, - "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/cspell": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell/-/cspell-8.10.0.tgz", - "integrity": "sha512-7HzPH84a5IzDQZB8qgFsOl/5w0NECG193MfR1aLkczv1v/13aGsQGiG33kXFufCuTyVYa5CrcwXaPXDRpWZ13Q==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell/-/cspell-8.14.2.tgz", + "integrity": "sha512-ii/W7fwO4chNQVYl1C/8k7RW8EXzLb69rvg08p8mSJx8B2UasVJ9tuJpTH2Spo1jX6N3H0dKPWUbd1fAmdAhPg==", "dev": true, "dependencies": { - "@cspell/cspell-json-reporter": "8.10.0", - "@cspell/cspell-pipe": "8.10.0", - "@cspell/cspell-types": "8.10.0", - "@cspell/dynamic-import": "8.10.0", + "@cspell/cspell-json-reporter": "8.14.2", + "@cspell/cspell-pipe": "8.14.2", + "@cspell/cspell-types": "8.14.2", + "@cspell/dynamic-import": "8.14.2", + "@cspell/url": "8.14.2", "chalk": "^5.3.0", "chalk-template": "^1.1.0", "commander": "^12.1.0", - "cspell-gitignore": "8.10.0", - "cspell-glob": "8.10.0", - "cspell-io": "8.10.0", - "cspell-lib": "8.10.0", + "cspell-dictionary": "8.14.2", + "cspell-gitignore": "8.14.2", + "cspell-glob": "8.14.2", + "cspell-io": "8.14.2", + "cspell-lib": "8.14.2", "fast-glob": "^3.3.2", "fast-json-stable-stringify": "^2.1.0", - "file-entry-cache": "^8.0.0", + "file-entry-cache": "^9.0.0", "get-stdin": "^9.0.0", - "semver": "^7.6.2", - "strip-ansi": "^7.1.0", - "vscode-uri": "^3.0.8" + "semver": "^7.6.3", + "strip-ansi": "^7.1.0" }, "bin": { "cspell": "bin.mjs", @@ -768,44 +672,43 @@ } }, "node_modules/cspell-config-lib": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-config-lib/-/cspell-config-lib-8.10.0.tgz", - "integrity": "sha512-3rssZH65y4dYIQN0ZgTkTvqThIRVzn18hw7Gx1ZjTFdVMsyc0fRSqtCSOWgi8P5U+GPeyQ3ylvv2RxrTKWaNxw==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-config-lib/-/cspell-config-lib-8.14.2.tgz", + "integrity": "sha512-yHP1BdcH5dbjb8qiZr6+bxEnJ+rxTULQ00wBz3eBPWCghJywEAYYvMWoYuxVtPpndlkKYC1wJAHsyNkweQyepA==", "dev": true, "dependencies": { - "@cspell/cspell-types": "8.10.0", - "comment-json": "^4.2.3", - "yaml": "^2.4.5" + "@cspell/cspell-types": "8.14.2", + "comment-json": "^4.2.5", + "yaml": "^2.5.0" }, "engines": { "node": ">=18" } }, "node_modules/cspell-dictionary": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-dictionary/-/cspell-dictionary-8.10.0.tgz", - "integrity": "sha512-mjWrT5pbcnS7LmQpLPZJxa2ohP1wEy6VegQc922AZIvnxvYJ7ZXX/UrUdmQ/ggjKp3bDPf+si1rAcN7oHUAcDA==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-dictionary/-/cspell-dictionary-8.14.2.tgz", + "integrity": "sha512-gWuAvf6queGGUvGbfAxxUq55cZ0OevWPbjnCrSB0PpJ4tqdFd8dLcvVrIKzoE2sBXKPw2NDkmoEngs6iGavC0w==", "dev": true, "dependencies": { - "@cspell/cspell-pipe": "8.10.0", - "@cspell/cspell-types": "8.10.0", - "cspell-trie-lib": "8.10.0", - "fast-equals": "^5.0.1", - "gensequence": "^7.0.0" + "@cspell/cspell-pipe": "8.14.2", + "@cspell/cspell-types": "8.14.2", + "cspell-trie-lib": "8.14.2", + "fast-equals": "^5.0.1" }, "engines": { "node": ">=18" } }, "node_modules/cspell-gitignore": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-gitignore/-/cspell-gitignore-8.10.0.tgz", - "integrity": "sha512-FNbrYEFoQi8kHQVLJrGWH6c6Mh/ccmziOBW7KMAWt+sgigMtslu8OokbVUJYdt6R3ESNaGflOW9eVhbVfc6llw==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-gitignore/-/cspell-gitignore-8.14.2.tgz", + "integrity": "sha512-lrO/49NaKBpkR7vFxv4OOY+oHmsG5+gNQejrBBWD9Nv9vvjJtz/G36X/rcN6M6tFcQQMWwa01kf04nxz8Ejuhg==", "dev": true, "dependencies": { - "@cspell/url": "8.10.0", - "cspell-glob": "8.10.0", - "cspell-io": "8.10.0", + "@cspell/url": "8.14.2", + "cspell-glob": "8.14.2", + "cspell-io": "8.14.2", "find-up-simple": "^1.0.0" }, "bin": { @@ -816,12 +719,12 @@ } }, "node_modules/cspell-glob": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-glob/-/cspell-glob-8.10.0.tgz", - "integrity": "sha512-d/q5PZYY+dgMEbmpnkDcs8FjUiR5e5IsCMiiDzhTRslswRPNXwZq9tUKhrGod/hbNH9M28fxnLEHZJFBy91wRQ==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-glob/-/cspell-glob-8.14.2.tgz", + "integrity": "sha512-9Q1Kgoo1ev3fKTpp9y5n8M4RLxd8B0f5o4y5FQe4dBU0j/bt+/YDrLZNWDm77JViV606XQ6fimG1FTTq6pT9/g==", "dev": true, "dependencies": { - "@cspell/url": "8.10.0", + "@cspell/url": "8.14.2", "micromatch": "^4.0.7" }, "engines": { @@ -829,13 +732,13 @@ } }, "node_modules/cspell-grammar": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-grammar/-/cspell-grammar-8.10.0.tgz", - "integrity": "sha512-bAz2Zcxvf0uex1EHXeWhg3K9ciHFIzcAidwMiDjiaf8/bX4VqOMDzYvv8NRaFdZ3WbaT6yO+jcsUg5kEmCjlvA==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-grammar/-/cspell-grammar-8.14.2.tgz", + "integrity": "sha512-eYwceVP80FGYVJenE42ALnvEKOXaXjq4yVbb1Ni1umO/9qamLWNCQ1RP6rRACy5e/cXviAbhrQ5Mtw6n+pyPEQ==", "dev": true, "dependencies": { - "@cspell/cspell-pipe": "8.10.0", - "@cspell/cspell-types": "8.10.0" + "@cspell/cspell-pipe": "8.14.2", + "@cspell/cspell-types": "8.14.2" }, "bin": { "cspell-grammar": "bin.mjs" @@ -845,45 +748,46 @@ } }, "node_modules/cspell-io": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-io/-/cspell-io-8.10.0.tgz", - "integrity": "sha512-IQJE4nybgvkIotsRh3Xblv6PIkhOtusUrF8dAO2oc8zNRuBQwPnVvtP1w2/flWXTucTt5LOM7rHkzoEYMaX6cA==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-io/-/cspell-io-8.14.2.tgz", + "integrity": "sha512-uaKpHiY3DAgfdzgKMQml6U8F8o9udMuYxGqYa5FVfN7D5Ap7B2edQzSLTUYwxrFEn4skSfp6XY73+nzJvxzH4Q==", "dev": true, "dependencies": { - "@cspell/cspell-service-bus": "8.10.0", - "@cspell/url": "8.10.0" + "@cspell/cspell-service-bus": "8.14.2", + "@cspell/url": "8.14.2" }, "engines": { "node": ">=18" } }, "node_modules/cspell-lib": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-lib/-/cspell-lib-8.10.0.tgz", - "integrity": "sha512-QL1sKLBjIfBjpmgsrhv1NXzW35asS+XqeK/F6IMujri7K2aUhd7zTrh75tyIuSQ7ZoI4zzPvqwbQvZeRnAQd1Q==", - "dev": true, - "dependencies": { - "@cspell/cspell-bundled-dicts": "8.10.0", - "@cspell/cspell-pipe": "8.10.0", - "@cspell/cspell-resolver": "8.10.0", - "@cspell/cspell-types": "8.10.0", - "@cspell/dynamic-import": "8.10.0", - "@cspell/strong-weak-map": "8.10.0", - "@cspell/url": "8.10.0", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-lib/-/cspell-lib-8.14.2.tgz", + "integrity": "sha512-d2oiIXHXnADmnhIuFLOdNE63L7OUfzgpLbYaqAWbkImCUDkevfGrOgnX8TJ03fUgZID4nvQ+3kgu/n2j4eLZjQ==", + "dev": true, + "dependencies": { + "@cspell/cspell-bundled-dicts": "8.14.2", + "@cspell/cspell-pipe": "8.14.2", + "@cspell/cspell-resolver": "8.14.2", + "@cspell/cspell-types": "8.14.2", + "@cspell/dynamic-import": "8.14.2", + "@cspell/filetypes": "8.14.2", + "@cspell/strong-weak-map": "8.14.2", + "@cspell/url": "8.14.2", "clear-module": "^4.1.2", - "comment-json": "^4.2.3", - "cspell-config-lib": "8.10.0", - "cspell-dictionary": "8.10.0", - "cspell-glob": "8.10.0", - "cspell-grammar": "8.10.0", - "cspell-io": "8.10.0", - "cspell-trie-lib": "8.10.0", + "comment-json": "^4.2.5", + "cspell-config-lib": "8.14.2", + "cspell-dictionary": "8.14.2", + "cspell-glob": "8.14.2", + "cspell-grammar": "8.14.2", + "cspell-io": "8.14.2", + "cspell-trie-lib": "8.14.2", "env-paths": "^3.0.0", "fast-equals": "^5.0.1", "gensequence": "^7.0.0", "import-fresh": "^3.3.0", "resolve-from": "^5.0.0", - "vscode-languageserver-textdocument": "^1.0.11", + "vscode-languageserver-textdocument": "^1.0.12", "vscode-uri": "^3.0.8", "xdg-basedir": "^5.1.0" }, @@ -892,29 +796,19 @@ } }, "node_modules/cspell-trie-lib": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-trie-lib/-/cspell-trie-lib-8.10.0.tgz", - "integrity": "sha512-B8TlC37vnM5kEUs144EiHxVinVEh5/u2oBhJv9NZT1yNab+Qp6/k3sPzUIlrjtzzEpKeuCJnZVqgx4cKZmDGqw==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-trie-lib/-/cspell-trie-lib-8.14.2.tgz", + "integrity": "sha512-rZMbaEBGoyy4/zxKECaMyVyGLbuUxYmZ5jlEgiA3xPtEdWwJ4iWRTo5G6dWbQsXoxPYdAXXZ0/q0GQ2y6Jt0kw==", "dev": true, "dependencies": { - "@cspell/cspell-pipe": "8.10.0", - "@cspell/cspell-types": "8.10.0", + "@cspell/cspell-pipe": "8.14.2", + "@cspell/cspell-types": "8.14.2", "gensequence": "^7.0.0" }, "engines": { "node": ">=18" } }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, "node_modules/env-paths": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", @@ -981,15 +875,15 @@ } }, "node_modules/file-entry-cache": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", - "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-9.0.0.tgz", + "integrity": "sha512-6MgEugi8p2tiUhqO7GnPsmbCCzj0YRCwwaTbpGRyKZesjRSzkqkAE9fPp7V2yMs5hwfgbQLgdvSSkGNg1s5Uvw==", "dev": true, "dependencies": { - "flat-cache": "^4.0.0" + "flat-cache": "^5.0.0" }, "engines": { - "node": ">=16.0.0" + "node": ">=18" } }, "node_modules/fill-range": { @@ -1017,38 +911,37 @@ } }, "node_modules/flat-cache": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.0.tgz", - "integrity": "sha512-EryKbCE/wxpxKniQlyas6PY1I9vwtF3uCBweX+N8KYTCn3Y12RTGtQAJ/bd5pl7kxUAc8v/R3Ake/N17OZiFqA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-5.0.0.tgz", + "integrity": "sha512-JrqFmyUl2PnPi1OvLyTVHnQvwQ0S+e6lGSwu8OkAZlSaNIZciTY2H/cOOROxsBA1m/LZNHDsqAgDZt6akWcjsQ==", "dev": true, "dependencies": { - "flatted": "^3.2.9", - "keyv": "^4.5.4", - "rimraf": "^5.0.5" + "flatted": "^3.3.1", + "keyv": "^4.5.4" }, "engines": { - "node": ">=16" + "node": ">=18" } }, "node_modules/flatted": { - "version": "3.2.9", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz", - "integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", "dev": true }, - "node_modules/foreground-child": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", - "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, "node_modules/gensequence": { @@ -1072,27 +965,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/glob": { - "version": "10.3.10", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz", - "integrity": "sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.3.5", - "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", - "path-scurry": "^1.10.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -1194,14 +1066,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "engines": { - "node": ">=8" - } - }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -1223,28 +1087,6 @@ "node": ">=0.12.0" } }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, - "node_modules/jackspeak": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", - "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, "node_modules/json-buffer": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", @@ -1282,54 +1124,6 @@ "node": ">=8.6" } }, - "node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/minizlib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz", - "integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==", - "dependencies": { - "minipass": "^7.0.4", - "rimraf": "^5.0.5" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/parent-module": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-2.0.0.tgz", @@ -1342,37 +1136,6 @@ "node": ">=8" } }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-scurry": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz", - "integrity": "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==", - "dependencies": { - "lru-cache": "^9.1.1 || ^10.0.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.1.0.tgz", - "integrity": "sha512-/1clY/ui8CzjKFyjdvwPWJUYKiFVXG2I2cY0ssG7h4+hwk+XOIX7ZSG9Q7TW8TW3Kp3BUSqgFWBLgL4PJ+Blag==", - "engines": { - "node": "14 || >=16.14" - } - }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -1386,16 +1149,19 @@ } }, "node_modules/pyright": { - "version": "1.1.223", - "resolved": "https://registry.npmjs.org/pyright/-/pyright-1.1.223.tgz", - "integrity": "sha512-EGLKMQRmSkKNe/eDy6MEkqq5lF91C5MCMYxvmCgKGUjZHIa+HByJIiRY2cOTm7g2CAv0eE7Ift701VxLM5Q0iQ==", + "version": "1.1.377", + "resolved": "https://registry.npmjs.org/pyright/-/pyright-1.1.377.tgz", + "integrity": "sha512-y6ENYuyZXTczPnPWZnqx78pE+ZgyIotEas2M/LFRTq3EfbgVk84EcvuSKLIy2DJeDKjKDxVP/LVmDNHabljD3g==", "dev": true, "bin": { "pyright": "index.js", "pyright-langserver": "langserver.index.js" }, "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" } }, "node_modules/queue-microtask": { @@ -1446,23 +1212,6 @@ "node": ">=0.10.0" } }, - "node_modules/rimraf": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.5.tgz", - "integrity": "sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A==", - "dependencies": { - "glob": "^10.3.7" - }, - "bin": { - "rimraf": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -1487,9 +1236,9 @@ } }, "node_modules/semver": { - "version": "7.6.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", - "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "dev": true, "bin": { "semver": "bin/semver.js" @@ -1498,94 +1247,11 @@ "node": ">=10" } }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, - "node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/strip-ansi": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, "dependencies": { "ansi-regex": "^6.0.1" }, @@ -1596,50 +1262,6 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/tar": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.0.tgz", - "integrity": "sha512-XQs0S8fuAkQWuqhDeCdMlJXDX80D7EOVLDPVFkna9yQfzS+PHKgfxcei0jf6/+QAWcjqrnC8uM3fSAnrQl+XYg==", - "dependencies": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", - "yallist": "^5.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/tar/node_modules/yallist": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", - "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", - "engines": { - "node": ">=18" - } - }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -1653,9 +1275,9 @@ } }, "node_modules/vscode-languageserver-textdocument": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.11.tgz", - "integrity": "sha512-X+8T3GoiwTVlJbicx/sIAF+yuJAqz8VvwJyoMVhwEMoEKE/fkDmrqUgDMyBECcM2A2frVZIUj5HI/ErRXCfOeA==", + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", + "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", "dev": true }, "node_modules/vscode-uri": { @@ -1664,104 +1286,6 @@ "integrity": "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==", "dev": true }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/xdg-basedir": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-5.1.0.tgz", @@ -1775,9 +1299,9 @@ } }, "node_modules/yaml": { - "version": "2.4.5", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.5.tgz", - "integrity": "sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.0.tgz", + "integrity": "sha512-2wWLbGbYDiSqqIKoPjar3MPgB94ErzCtrNE1FdqGuaO0pi2JGjmE8aW8TDZwzU7vuxcGRdL/4gPQwQ7hD5AMSw==", "dev": true, "bin": { "yaml": "bin.mjs" @@ -1789,31 +1313,31 @@ }, "dependencies": { "@cspell/cspell-bundled-dicts": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-bundled-dicts/-/cspell-bundled-dicts-8.10.0.tgz", - "integrity": "sha512-phqOVx1ArqfCVzuE0qHOEKCz0xVRlBzbQHCwab0twu1RmOK6ShLzQoAZqtEFK1Rm1fCguHJmQyuP+34lGt6nfQ==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-bundled-dicts/-/cspell-bundled-dicts-8.14.2.tgz", + "integrity": "sha512-Kv2Utj/RTSxfufGXkkoTZ/3ErCsYWpCijtDFr/FwSsM7mC0PzLpdlcD9xjtgrJO5Kwp7T47iTG21U4Mwddyi8Q==", "dev": true, "requires": { "@cspell/dict-ada": "^4.0.2", - "@cspell/dict-aws": "^4.0.2", + "@cspell/dict-aws": "^4.0.3", "@cspell/dict-bash": "^4.1.3", - "@cspell/dict-companies": "^3.1.2", - "@cspell/dict-cpp": "^5.1.10", + "@cspell/dict-companies": "^3.1.4", + "@cspell/dict-cpp": "^5.1.12", "@cspell/dict-cryptocurrencies": "^5.0.0", "@cspell/dict-csharp": "^4.0.2", - "@cspell/dict-css": "^4.0.12", + "@cspell/dict-css": "^4.0.13", "@cspell/dict-dart": "^2.0.3", "@cspell/dict-django": "^4.1.0", "@cspell/dict-docker": "^1.1.7", "@cspell/dict-dotnet": "^5.0.2", "@cspell/dict-elixir": "^4.0.3", "@cspell/dict-en_us": "^4.3.23", - "@cspell/dict-en-common-misspellings": "^2.0.3", + "@cspell/dict-en-common-misspellings": "^2.0.4", "@cspell/dict-en-gb": "1.1.33", "@cspell/dict-filetypes": "^3.0.4", "@cspell/dict-fonts": "^4.0.0", "@cspell/dict-fsharp": "^1.0.1", - "@cspell/dict-fullstack": "^3.1.8", + "@cspell/dict-fullstack": "^3.2.0", "@cspell/dict-gaming-terms": "^1.0.5", "@cspell/dict-git": "^3.0.0", "@cspell/dict-golang": "^6.0.9", @@ -1823,65 +1347,65 @@ "@cspell/dict-html-symbol-entities": "^4.0.0", "@cspell/dict-java": "^5.0.7", "@cspell/dict-julia": "^1.0.1", - "@cspell/dict-k8s": "^1.0.5", + "@cspell/dict-k8s": "^1.0.6", "@cspell/dict-latex": "^4.0.0", "@cspell/dict-lorem-ipsum": "^4.0.0", "@cspell/dict-lua": "^4.0.3", "@cspell/dict-makefile": "^1.0.0", "@cspell/dict-monkeyc": "^1.0.6", "@cspell/dict-node": "^5.0.1", - "@cspell/dict-npm": "^5.0.16", + "@cspell/dict-npm": "^5.0.18", "@cspell/dict-php": "^4.0.8", - "@cspell/dict-powershell": "^5.0.4", + "@cspell/dict-powershell": "^5.0.5", "@cspell/dict-public-licenses": "^2.0.7", - "@cspell/dict-python": "^4.2.1", + "@cspell/dict-python": "^4.2.4", "@cspell/dict-r": "^2.0.1", "@cspell/dict-ruby": "^5.0.2", - "@cspell/dict-rust": "^4.0.4", - "@cspell/dict-scala": "^5.0.2", - "@cspell/dict-software-terms": "^3.4.9", - "@cspell/dict-sql": "^2.1.3", + "@cspell/dict-rust": "^4.0.5", + "@cspell/dict-scala": "^5.0.3", + "@cspell/dict-software-terms": "^4.0.6", + "@cspell/dict-sql": "^2.1.5", "@cspell/dict-svelte": "^1.0.2", "@cspell/dict-swift": "^2.0.1", "@cspell/dict-terraform": "^1.0.0", - "@cspell/dict-typescript": "^3.1.5", + "@cspell/dict-typescript": "^3.1.6", "@cspell/dict-vue": "^3.0.0" } }, "@cspell/cspell-json-reporter": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-json-reporter/-/cspell-json-reporter-8.10.0.tgz", - "integrity": "sha512-hGMj4TmqqKdfeWQa1kIvFLKyebPTzFUdUugiy4iD5CuUcruIWeXT1XzCpCY726MjLFrZ34uW01MglG2Ptsn3Qg==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-json-reporter/-/cspell-json-reporter-8.14.2.tgz", + "integrity": "sha512-TZavcnNIZKX1xC/GNj80RgFVKHCT4pHT0qm9jCsQFH2QJfyCrUlkEvotKGSQ04lAyCwWg6Enq95qhouF8YbKUQ==", "dev": true, "requires": { - "@cspell/cspell-types": "8.10.0" + "@cspell/cspell-types": "8.14.2" } }, "@cspell/cspell-pipe": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-pipe/-/cspell-pipe-8.10.0.tgz", - "integrity": "sha512-qspmnz1d+0QgPwnBkoqBGY2GYtcA8uaQLCLhen8QOCybqhlHbn57hzeec8QZVDigJYZ/rVQbOceQ11QRK7IaMA==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-pipe/-/cspell-pipe-8.14.2.tgz", + "integrity": "sha512-aWMoXZAXEre0/M9AYWOW33YyOJZ06i4vvsEpWBDWpHpWQEmsR/7cMMgld8Pp3wlEjIUclUAKTYmrZ61PFWU/og==", "dev": true }, "@cspell/cspell-resolver": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-resolver/-/cspell-resolver-8.10.0.tgz", - "integrity": "sha512-ln8k05sR3pVaEYTm8CJ9fLXagdiUcy3c1sC956PJ3MJAq4y2RDedPOD306umF5EjmSasbM1fgbJ8T7L2nAgeJQ==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-resolver/-/cspell-resolver-8.14.2.tgz", + "integrity": "sha512-pSyBsAvslaN0dx0pHdvECJEuFDDBJGAD6G8U4BVbIyj2OPk0Ox0HrZIj6csYxxoJERAgNO/q7yCPwa4j9NNFXg==", "dev": true, "requires": { "global-directory": "^4.0.1" } }, "@cspell/cspell-service-bus": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-service-bus/-/cspell-service-bus-8.10.0.tgz", - "integrity": "sha512-zxW2DDzeA7O7z92s+H2gAnqe0oOy0AxWAXv0orJnV4XAUJEnozgL+PC41l91PLKbYNVxpaXi/KKz4AAUIqI3AQ==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-service-bus/-/cspell-service-bus-8.14.2.tgz", + "integrity": "sha512-WUF7xf3YgXYIqjmBwLcVugYIrYL4WfXchgSo9rmbbnOcAArzsK+HKfzb4AniZAJ1unxcIQ0JnVlRmnCAKPjjLg==", "dev": true }, "@cspell/cspell-types": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/cspell-types/-/cspell-types-8.10.0.tgz", - "integrity": "sha512-mCzg0fLa9r8YirQmPM5yGu6VFEk/gsNEsZjmYqkDpzMy2plEpcg2QkTu58juL3XroeA7dhWn7pDCEhUGxt7eIg==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/cspell-types/-/cspell-types-8.14.2.tgz", + "integrity": "sha512-MRY8MjBNOKGMDSkxAKueYAgVL43miO+lDcLCBBP+7cNXqHiUFMIZteONcGp3kJT0dWS04dN6lKAXvaNF0aWcng==", "dev": true }, "@cspell/dict-ada": { @@ -1891,9 +1415,9 @@ "dev": true }, "@cspell/dict-aws": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@cspell/dict-aws/-/dict-aws-4.0.2.tgz", - "integrity": "sha512-aNGHWSV7dRLTIn8WJemzLoMF62qOaiUQlgnsCwH5fRCD/00gsWCwg106pnbkmK4AyabyxzneOV4dfecDJWkSxw==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@cspell/dict-aws/-/dict-aws-4.0.3.tgz", + "integrity": "sha512-0C0RQ4EM29fH0tIYv+EgDQEum0QI6OrmjENC9u98pB8UcnYxGG/SqinuPxo+TgcEuInj0Q73MsBpJ1l5xUnrsw==", "dev": true }, "@cspell/dict-bash": { @@ -1903,15 +1427,15 @@ "dev": true }, "@cspell/dict-companies": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@cspell/dict-companies/-/dict-companies-3.1.2.tgz", - "integrity": "sha512-OwR5i1xbYuJX7FtHQySmTy3iJtPV1rZQ3jFCxFGwrA1xRQ4rtRcDQ+sTXBCIAoJHkXa84f9J3zsngOKmMGyS/w==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@cspell/dict-companies/-/dict-companies-3.1.4.tgz", + "integrity": "sha512-y9e0amzEK36EiiKx3VAA+SHQJPpf2Qv5cCt5eTUSggpTkiFkCh6gRKQ97rVlrKh5GJrqinDwYIJtTsxuh2vy2Q==", "dev": true }, "@cspell/dict-cpp": { - "version": "5.1.10", - "resolved": "https://registry.npmjs.org/@cspell/dict-cpp/-/dict-cpp-5.1.10.tgz", - "integrity": "sha512-BmIF0sAz2BgGEOwzYIeEm9ALneDjd1tcTbFbo+A1Hcq3zOKP8yViSgxS9CEN30KOZIyph6Tldp531UPEpoEl0Q==", + "version": "5.1.13", + "resolved": "https://registry.npmjs.org/@cspell/dict-cpp/-/dict-cpp-5.1.13.tgz", + "integrity": "sha512-cgF438519dJHCD1ERMfyp8UDjwinu+njzHnzOvTuY9VUn30koT7xyLq8DTLebIoxOy5FOSMMiMK3vPBXZjzl1g==", "dev": true }, "@cspell/dict-cryptocurrencies": { @@ -1927,9 +1451,9 @@ "dev": true }, "@cspell/dict-css": { - "version": "4.0.12", - "resolved": "https://registry.npmjs.org/@cspell/dict-css/-/dict-css-4.0.12.tgz", - "integrity": "sha512-vGBgPM92MkHQF5/2jsWcnaahOZ+C6OE/fPvd5ScBP72oFY9tn5GLuomcyO0z8vWCr2e0nUSX1OGimPtcQAlvSw==", + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/@cspell/dict-css/-/dict-css-4.0.13.tgz", + "integrity": "sha512-WfOQkqlAJTo8eIQeztaH0N0P+iF5hsJVKFuhy4jmARPISy8Efcv8QXk2/IVbmjJH0/ZV7dKRdnY5JFVXuVz37g==", "dev": true }, "@cspell/dict-dart": { @@ -1975,9 +1499,9 @@ "dev": true }, "@cspell/dict-en-common-misspellings": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@cspell/dict-en-common-misspellings/-/dict-en-common-misspellings-2.0.3.tgz", - "integrity": "sha512-8nF1z9nUiSgMyikL66HTbDO7jCGtB24TxKBasXIBwkBKMDZgA2M883iXdeByy6m1JJUcCGFkSftVYp2W0bUgjw==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@cspell/dict-en-common-misspellings/-/dict-en-common-misspellings-2.0.4.tgz", + "integrity": "sha512-lvOiRjV/FG4pAGZL3PN2GCVHSTCE92cwhfLGGkOsQtxSmef6WCHfHwp9auafkBlX0yFQSKDfq6/TlpQbjbJBtQ==", "dev": true }, "@cspell/dict-en-gb": { @@ -2005,9 +1529,9 @@ "dev": true }, "@cspell/dict-fullstack": { - "version": "3.1.8", - "resolved": "https://registry.npmjs.org/@cspell/dict-fullstack/-/dict-fullstack-3.1.8.tgz", - "integrity": "sha512-YRlZupL7uqMCtEBK0bDP9BrcPnjDhz7m4GBqCc1EYqfXauHbLmDT8ELha7T/E7wsFKniHSjzwDZzhNXo2lusRQ==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-fullstack/-/dict-fullstack-3.2.0.tgz", + "integrity": "sha512-sIGQwU6G3rLTo+nx0GKyirR5dQSFeTIzFTOrURw51ISf+jKG9a3OmvsVtc2OANfvEAOLOC9Wfd8WYhmsO8KRDQ==", "dev": true }, "@cspell/dict-gaming-terms": { @@ -2023,9 +1547,9 @@ "dev": true }, "@cspell/dict-golang": { - "version": "6.0.9", - "resolved": "https://registry.npmjs.org/@cspell/dict-golang/-/dict-golang-6.0.9.tgz", - "integrity": "sha512-etDt2WQauyEQDA+qPS5QtkYTb2I9l5IfQftAllVoB1aOrT6bxxpHvMEpJ0Hsn/vezxrCqa/BmtUbRxllIxIuSg==", + "version": "6.0.11", + "resolved": "https://registry.npmjs.org/@cspell/dict-golang/-/dict-golang-6.0.11.tgz", + "integrity": "sha512-BMFIDGh1HaFUe1cYBT1dotqyIQG2j3VkNntGQTBa/7i0aBnC5PBJDiAXnUeBHi0AVrz0hyAc7xtcK5KyKCEzwg==", "dev": true }, "@cspell/dict-google": { @@ -2065,9 +1589,9 @@ "dev": true }, "@cspell/dict-k8s": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@cspell/dict-k8s/-/dict-k8s-1.0.5.tgz", - "integrity": "sha512-Cj+/ZV4S+MKlwfocSJZqe/2UAd/sY8YtlZjbK25VN1nCnrsKrBjfkX29vclwSj1U9aJg4Z9jw/uMjoaKu9ZrpQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@cspell/dict-k8s/-/dict-k8s-1.0.6.tgz", + "integrity": "sha512-srhVDtwrd799uxMpsPOQqeDJY+gEocgZpoK06EFrb4GRYGhv7lXo9Fb+xQMyQytzOW9dw4DNOEck++nacDuymg==", "dev": true }, "@cspell/dict-latex": { @@ -2107,9 +1631,9 @@ "dev": true }, "@cspell/dict-npm": { - "version": "5.0.16", - "resolved": "https://registry.npmjs.org/@cspell/dict-npm/-/dict-npm-5.0.16.tgz", - "integrity": "sha512-ZWPnLAziEcSCvV0c8k9Qj88pfMu+wZwM5Qks87ShsfBgI8uLZ9tGHravA7gmjH1Gd7Bgxy2ulvXtSqIWPh1lew==", + "version": "5.0.18", + "resolved": "https://registry.npmjs.org/@cspell/dict-npm/-/dict-npm-5.0.18.tgz", + "integrity": "sha512-weMTyxWpzz19q4wv9n183BtFvdD5fCjtze+bFKpl+4rO/YlPhHL2cXLAeexJz/VDSBecwX4ybTZYoknd1h2J4w==", "dev": true }, "@cspell/dict-php": { @@ -2119,21 +1643,21 @@ "dev": true }, "@cspell/dict-powershell": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/@cspell/dict-powershell/-/dict-powershell-5.0.4.tgz", - "integrity": "sha512-eosDShapDgBWN9ULF7+sRNdUtzRnUdsfEdBSchDm8FZA4HOqxUSZy3b/cX/Rdw0Fnw0AKgk0kzgXw7tS6vwJMQ==", + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-powershell/-/dict-powershell-5.0.5.tgz", + "integrity": "sha512-3JVyvMoDJesAATYGOxcUWPbQPUvpZmkinV3m8HL1w1RrjeMVXXuK7U1jhopSneBtLhkU+9HKFwgh9l9xL9mY2Q==", "dev": true }, "@cspell/dict-public-licenses": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/@cspell/dict-public-licenses/-/dict-public-licenses-2.0.7.tgz", - "integrity": "sha512-KlBXuGcN3LE7tQi/GEqKiDewWGGuopiAD0zRK1QilOx5Co8XAvs044gk4MNIQftc8r0nHeUI+irJKLGcR36DIQ==", + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/@cspell/dict-public-licenses/-/dict-public-licenses-2.0.8.tgz", + "integrity": "sha512-Sup+tFS7cDV0fgpoKtUqEZ6+fA/H+XUgBiqQ/Fbs6vUE3WCjJHOIVsP+udHuyMH7iBfJ4UFYOYeORcY4EaKdMg==", "dev": true }, "@cspell/dict-python": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@cspell/dict-python/-/dict-python-4.2.1.tgz", - "integrity": "sha512-9X2jRgyM0cxBoFQRo4Zc8oacyWnXi+0/bMI5FGibZNZV4y/o9UoFEr6agjU260/cXHTjIdkX233nN7eb7dtyRg==", + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/@cspell/dict-python/-/dict-python-4.2.4.tgz", + "integrity": "sha512-sCtLBqMreb+8zRW2bXvFsfSnRUVU6IFm4mT6Dc4xbz0YajprbaPPh/kOUTw5IJRP8Uh+FFb7Xp2iH03CNWRq/A==", "dev": true, "requires": { "@cspell/dict-data-science": "^2.0.1" @@ -2152,27 +1676,27 @@ "dev": true }, "@cspell/dict-rust": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@cspell/dict-rust/-/dict-rust-4.0.4.tgz", - "integrity": "sha512-v9/LcZknt/Xq7m1jdTWiQEtmkVVKdE1etAfGL2sgcWpZYewEa459HeWndNA0gfzQrpWX9sYay18mt7pqClJEdA==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-rust/-/dict-rust-4.0.5.tgz", + "integrity": "sha512-DIvlPRDemjKQy8rCqftAgGNZxY5Bg+Ps7qAIJjxkSjmMETyDgl0KTVuaJPt7EK4jJt6uCZ4ILy96npsHDPwoXA==", "dev": true }, "@cspell/dict-scala": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/@cspell/dict-scala/-/dict-scala-5.0.2.tgz", - "integrity": "sha512-v97ClgidZt99JUm7OjhQugDHmhx4U8fcgunHvD/BsXWjXNj4cTr0m0YjofyZoL44WpICsNuFV9F/sv9OM5HUEw==", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@cspell/dict-scala/-/dict-scala-5.0.3.tgz", + "integrity": "sha512-4yGb4AInT99rqprxVNT9TYb1YSpq58Owzq7zi3ZS5T0u899Y4VsxsBiOgHnQ/4W+ygi+sp+oqef8w8nABR2lkg==", "dev": true }, "@cspell/dict-software-terms": { - "version": "3.4.9", - "resolved": "https://registry.npmjs.org/@cspell/dict-software-terms/-/dict-software-terms-3.4.9.tgz", - "integrity": "sha512-J2uNH3ScBPQijXyzLfxsC1CYgq36MWvbynJzQJ15ZazTsecC0pQHynm3/6VH4X/BphV2eXB0GRJT3yMicYLGCw==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/@cspell/dict-software-terms/-/dict-software-terms-4.0.8.tgz", + "integrity": "sha512-LmsIHkjWgVEg3Wrcomaj5Fp/m/l2Eiqj8Qhgcj8rAnui8DPqiKLSZKL3f1iRiWEMqbnzjpFpcmb1bOp8mcBWkA==", "dev": true }, "@cspell/dict-sql": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/@cspell/dict-sql/-/dict-sql-2.1.3.tgz", - "integrity": "sha512-SEyTNKJrjqD6PAzZ9WpdSu6P7wgdNtGV2RV8Kpuw1x6bV+YsSptuClYG+JSdRExBTE6LwIe1bTklejUp3ZP8TQ==", + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-sql/-/dict-sql-2.1.5.tgz", + "integrity": "sha512-FmxanytHXss7GAWAXmgaxl3icTCW7YxlimyOSPNfm+njqeUDjw3kEv4mFNDDObBJv8Ec5AWCbUDkWIpkE3IpKg==", "dev": true }, "@cspell/dict-svelte": { @@ -2194,9 +1718,9 @@ "dev": true }, "@cspell/dict-typescript": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/@cspell/dict-typescript/-/dict-typescript-3.1.5.tgz", - "integrity": "sha512-EkIwwNV/xqEoBPJml2S16RXj65h1kvly8dfDLgXerrKw6puybZdvAHerAph6/uPTYdtLcsPyJYkPt5ISOJYrtw==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/@cspell/dict-typescript/-/dict-typescript-3.1.6.tgz", + "integrity": "sha512-1beC6O4P/j23VuxX+i0+F7XqPVc3hhiAzGJHEKqnWf5cWAXQtg0xz3xQJ5MvYx2a7iLaSa+lu7+05vG9UHyu9Q==", "dev": true }, "@cspell/dict-vue": { @@ -2206,47 +1730,32 @@ "dev": true }, "@cspell/dynamic-import": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/dynamic-import/-/dynamic-import-8.10.0.tgz", - "integrity": "sha512-QdZWDZfDAFHHjcBW6otijlblCB3T2r8b5q2X0+XOjE2yd+uF78Ma8pEHrgzQ7sRAkVV9SehhHqdkxOfOkYorKQ==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/dynamic-import/-/dynamic-import-8.14.2.tgz", + "integrity": "sha512-5MbqtIligU7yPwHWU/5yFCgMvur4i1bRAF1Cy8y2dDtHsa204S/w/SaXs+51EFLp2eNbCiBisCBrwJFT7R1RxA==", "dev": true, "requires": { "import-meta-resolve": "^4.1.0" } }, + "@cspell/filetypes": { + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/filetypes/-/filetypes-8.14.2.tgz", + "integrity": "sha512-ZevArA0mWeVTTqHicxCPZIAeCibpY3NwWK/x6d1Lgu7RPk/daoGAM546Q2SLChFu+r10tIH7pRG212A6Q9ihPA==", + "dev": true + }, "@cspell/strong-weak-map": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/strong-weak-map/-/strong-weak-map-8.10.0.tgz", - "integrity": "sha512-V7lq4k1tebppBdzzqdgk6FHch/PG4kIWQ2k6b9JT6yqc7ewN75KwU0tSgIMoxoJFedRE2ZnUG404SAd7jWYxug==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/strong-weak-map/-/strong-weak-map-8.14.2.tgz", + "integrity": "sha512-7sRzJc392CQYNNrtdPEfOHJdRqsqf6nASCtbS5A9hL2UrdWQ4uN7r/D+Y1HpuizwY9eOkZvarcFfsYt5wE0Pug==", "dev": true }, "@cspell/url": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/@cspell/url/-/url-8.10.0.tgz", - "integrity": "sha512-U4+uMJKe3d+BwBjrzhNVxc1CUBVynlw5qeJkSdZJzqOdDFFA9yiKfLpHYmY5Sc/Iin8XAYuAd09Mxsc3E714Iw==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/@cspell/url/-/url-8.14.2.tgz", + "integrity": "sha512-YmWW+B/2XQcCynLpiAQF77Bitm5Cynw3/BICZkbdveKjJkUzEmXB+U2qWuwXOyU8xUYuwkP63YM8McnI567rUA==", "dev": true }, - "@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "requires": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - } - }, - "@isaacs/fs-minipass": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", - "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", - "requires": { - "minipass": "^7.0.4" - } - }, "@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -2273,21 +1782,11 @@ "fastq": "^1.6.0" } }, - "@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "optional": true - }, "ansi-regex": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==" - }, - "ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==" + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true }, "array-timsort": { "version": "1.0.3", @@ -2295,19 +1794,6 @@ "integrity": "sha512-/+3GRL7dDAGEfM6TseQk/U+mi18TU2Ms9I3UlLdUMhz2hbvGNTKdj9xniwXfUqgYhHxRx0+8UnKkvlNwVU+cWQ==", "dev": true }, - "balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "requires": { - "balanced-match": "^1.0.0" - } - }, "braces": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", @@ -2338,11 +1824,6 @@ "chalk": "^5.2.0" } }, - "chownr": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", - "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==" - }, "clear-module": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/clear-module/-/clear-module-4.1.2.tgz", @@ -2353,19 +1834,6 @@ "resolve-from": "^5.0.0" } }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, "commander": { "version": "12.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", @@ -2373,9 +1841,9 @@ "dev": true }, "comment-json": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/comment-json/-/comment-json-4.2.3.tgz", - "integrity": "sha512-SsxdiOf064DWoZLH799Ata6u7iV658A11PlWtZATDlXPpKGJnbJZ5Z24ybixAi+LUUqJ/GKowAejtC5GFUG7Tw==", + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/comment-json/-/comment-json-4.2.5.tgz", + "integrity": "sha512-bKw/r35jR3HGt5PEPm1ljsQQGyCrR8sFGNiN5L+ykDHdpO8Smxkrkla9Yi6NkQyUrb8V54PGhfMs6NrIwtxtdw==", "dev": true, "requires": { "array-timsort": "^1.0.3", @@ -2391,160 +1859,141 @@ "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", "dev": true }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, "cspell": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell/-/cspell-8.10.0.tgz", - "integrity": "sha512-7HzPH84a5IzDQZB8qgFsOl/5w0NECG193MfR1aLkczv1v/13aGsQGiG33kXFufCuTyVYa5CrcwXaPXDRpWZ13Q==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell/-/cspell-8.14.2.tgz", + "integrity": "sha512-ii/W7fwO4chNQVYl1C/8k7RW8EXzLb69rvg08p8mSJx8B2UasVJ9tuJpTH2Spo1jX6N3H0dKPWUbd1fAmdAhPg==", "dev": true, "requires": { - "@cspell/cspell-json-reporter": "8.10.0", - "@cspell/cspell-pipe": "8.10.0", - "@cspell/cspell-types": "8.10.0", - "@cspell/dynamic-import": "8.10.0", + "@cspell/cspell-json-reporter": "8.14.2", + "@cspell/cspell-pipe": "8.14.2", + "@cspell/cspell-types": "8.14.2", + "@cspell/dynamic-import": "8.14.2", + "@cspell/url": "8.14.2", "chalk": "^5.3.0", "chalk-template": "^1.1.0", "commander": "^12.1.0", - "cspell-gitignore": "8.10.0", - "cspell-glob": "8.10.0", - "cspell-io": "8.10.0", - "cspell-lib": "8.10.0", + "cspell-dictionary": "8.14.2", + "cspell-gitignore": "8.14.2", + "cspell-glob": "8.14.2", + "cspell-io": "8.14.2", + "cspell-lib": "8.14.2", "fast-glob": "^3.3.2", "fast-json-stable-stringify": "^2.1.0", - "file-entry-cache": "^8.0.0", + "file-entry-cache": "^9.0.0", "get-stdin": "^9.0.0", - "semver": "^7.6.2", - "strip-ansi": "^7.1.0", - "vscode-uri": "^3.0.8" + "semver": "^7.6.3", + "strip-ansi": "^7.1.0" } }, "cspell-config-lib": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-config-lib/-/cspell-config-lib-8.10.0.tgz", - "integrity": "sha512-3rssZH65y4dYIQN0ZgTkTvqThIRVzn18hw7Gx1ZjTFdVMsyc0fRSqtCSOWgi8P5U+GPeyQ3ylvv2RxrTKWaNxw==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-config-lib/-/cspell-config-lib-8.14.2.tgz", + "integrity": "sha512-yHP1BdcH5dbjb8qiZr6+bxEnJ+rxTULQ00wBz3eBPWCghJywEAYYvMWoYuxVtPpndlkKYC1wJAHsyNkweQyepA==", "dev": true, "requires": { - "@cspell/cspell-types": "8.10.0", - "comment-json": "^4.2.3", - "yaml": "^2.4.5" + "@cspell/cspell-types": "8.14.2", + "comment-json": "^4.2.5", + "yaml": "^2.5.0" } }, "cspell-dictionary": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-dictionary/-/cspell-dictionary-8.10.0.tgz", - "integrity": "sha512-mjWrT5pbcnS7LmQpLPZJxa2ohP1wEy6VegQc922AZIvnxvYJ7ZXX/UrUdmQ/ggjKp3bDPf+si1rAcN7oHUAcDA==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-dictionary/-/cspell-dictionary-8.14.2.tgz", + "integrity": "sha512-gWuAvf6queGGUvGbfAxxUq55cZ0OevWPbjnCrSB0PpJ4tqdFd8dLcvVrIKzoE2sBXKPw2NDkmoEngs6iGavC0w==", "dev": true, "requires": { - "@cspell/cspell-pipe": "8.10.0", - "@cspell/cspell-types": "8.10.0", - "cspell-trie-lib": "8.10.0", - "fast-equals": "^5.0.1", - "gensequence": "^7.0.0" + "@cspell/cspell-pipe": "8.14.2", + "@cspell/cspell-types": "8.14.2", + "cspell-trie-lib": "8.14.2", + "fast-equals": "^5.0.1" } }, "cspell-gitignore": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-gitignore/-/cspell-gitignore-8.10.0.tgz", - "integrity": "sha512-FNbrYEFoQi8kHQVLJrGWH6c6Mh/ccmziOBW7KMAWt+sgigMtslu8OokbVUJYdt6R3ESNaGflOW9eVhbVfc6llw==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-gitignore/-/cspell-gitignore-8.14.2.tgz", + "integrity": "sha512-lrO/49NaKBpkR7vFxv4OOY+oHmsG5+gNQejrBBWD9Nv9vvjJtz/G36X/rcN6M6tFcQQMWwa01kf04nxz8Ejuhg==", "dev": true, "requires": { - "@cspell/url": "8.10.0", - "cspell-glob": "8.10.0", - "cspell-io": "8.10.0", + "@cspell/url": "8.14.2", + "cspell-glob": "8.14.2", + "cspell-io": "8.14.2", "find-up-simple": "^1.0.0" } }, "cspell-glob": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-glob/-/cspell-glob-8.10.0.tgz", - "integrity": "sha512-d/q5PZYY+dgMEbmpnkDcs8FjUiR5e5IsCMiiDzhTRslswRPNXwZq9tUKhrGod/hbNH9M28fxnLEHZJFBy91wRQ==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-glob/-/cspell-glob-8.14.2.tgz", + "integrity": "sha512-9Q1Kgoo1ev3fKTpp9y5n8M4RLxd8B0f5o4y5FQe4dBU0j/bt+/YDrLZNWDm77JViV606XQ6fimG1FTTq6pT9/g==", "dev": true, "requires": { - "@cspell/url": "8.10.0", + "@cspell/url": "8.14.2", "micromatch": "^4.0.7" } }, "cspell-grammar": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-grammar/-/cspell-grammar-8.10.0.tgz", - "integrity": "sha512-bAz2Zcxvf0uex1EHXeWhg3K9ciHFIzcAidwMiDjiaf8/bX4VqOMDzYvv8NRaFdZ3WbaT6yO+jcsUg5kEmCjlvA==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-grammar/-/cspell-grammar-8.14.2.tgz", + "integrity": "sha512-eYwceVP80FGYVJenE42ALnvEKOXaXjq4yVbb1Ni1umO/9qamLWNCQ1RP6rRACy5e/cXviAbhrQ5Mtw6n+pyPEQ==", "dev": true, "requires": { - "@cspell/cspell-pipe": "8.10.0", - "@cspell/cspell-types": "8.10.0" + "@cspell/cspell-pipe": "8.14.2", + "@cspell/cspell-types": "8.14.2" } }, "cspell-io": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-io/-/cspell-io-8.10.0.tgz", - "integrity": "sha512-IQJE4nybgvkIotsRh3Xblv6PIkhOtusUrF8dAO2oc8zNRuBQwPnVvtP1w2/flWXTucTt5LOM7rHkzoEYMaX6cA==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-io/-/cspell-io-8.14.2.tgz", + "integrity": "sha512-uaKpHiY3DAgfdzgKMQml6U8F8o9udMuYxGqYa5FVfN7D5Ap7B2edQzSLTUYwxrFEn4skSfp6XY73+nzJvxzH4Q==", "dev": true, "requires": { - "@cspell/cspell-service-bus": "8.10.0", - "@cspell/url": "8.10.0" + "@cspell/cspell-service-bus": "8.14.2", + "@cspell/url": "8.14.2" } }, "cspell-lib": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-lib/-/cspell-lib-8.10.0.tgz", - "integrity": "sha512-QL1sKLBjIfBjpmgsrhv1NXzW35asS+XqeK/F6IMujri7K2aUhd7zTrh75tyIuSQ7ZoI4zzPvqwbQvZeRnAQd1Q==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-lib/-/cspell-lib-8.14.2.tgz", + "integrity": "sha512-d2oiIXHXnADmnhIuFLOdNE63L7OUfzgpLbYaqAWbkImCUDkevfGrOgnX8TJ03fUgZID4nvQ+3kgu/n2j4eLZjQ==", "dev": true, "requires": { - "@cspell/cspell-bundled-dicts": "8.10.0", - "@cspell/cspell-pipe": "8.10.0", - "@cspell/cspell-resolver": "8.10.0", - "@cspell/cspell-types": "8.10.0", - "@cspell/dynamic-import": "8.10.0", - "@cspell/strong-weak-map": "8.10.0", - "@cspell/url": "8.10.0", + "@cspell/cspell-bundled-dicts": "8.14.2", + "@cspell/cspell-pipe": "8.14.2", + "@cspell/cspell-resolver": "8.14.2", + "@cspell/cspell-types": "8.14.2", + "@cspell/dynamic-import": "8.14.2", + "@cspell/filetypes": "8.14.2", + "@cspell/strong-weak-map": "8.14.2", + "@cspell/url": "8.14.2", "clear-module": "^4.1.2", - "comment-json": "^4.2.3", - "cspell-config-lib": "8.10.0", - "cspell-dictionary": "8.10.0", - "cspell-glob": "8.10.0", - "cspell-grammar": "8.10.0", - "cspell-io": "8.10.0", - "cspell-trie-lib": "8.10.0", + "comment-json": "^4.2.5", + "cspell-config-lib": "8.14.2", + "cspell-dictionary": "8.14.2", + "cspell-glob": "8.14.2", + "cspell-grammar": "8.14.2", + "cspell-io": "8.14.2", + "cspell-trie-lib": "8.14.2", "env-paths": "^3.0.0", "fast-equals": "^5.0.1", "gensequence": "^7.0.0", "import-fresh": "^3.3.0", "resolve-from": "^5.0.0", - "vscode-languageserver-textdocument": "^1.0.11", + "vscode-languageserver-textdocument": "^1.0.12", "vscode-uri": "^3.0.8", "xdg-basedir": "^5.1.0" } }, "cspell-trie-lib": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/cspell-trie-lib/-/cspell-trie-lib-8.10.0.tgz", - "integrity": "sha512-B8TlC37vnM5kEUs144EiHxVinVEh5/u2oBhJv9NZT1yNab+Qp6/k3sPzUIlrjtzzEpKeuCJnZVqgx4cKZmDGqw==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/cspell-trie-lib/-/cspell-trie-lib-8.14.2.tgz", + "integrity": "sha512-rZMbaEBGoyy4/zxKECaMyVyGLbuUxYmZ5jlEgiA3xPtEdWwJ4iWRTo5G6dWbQsXoxPYdAXXZ0/q0GQ2y6Jt0kw==", "dev": true, "requires": { - "@cspell/cspell-pipe": "8.10.0", - "@cspell/cspell-types": "8.10.0", + "@cspell/cspell-pipe": "8.14.2", + "@cspell/cspell-types": "8.14.2", "gensequence": "^7.0.0" } }, - "eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, "env-paths": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", @@ -2592,12 +2041,12 @@ } }, "file-entry-cache": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", - "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-9.0.0.tgz", + "integrity": "sha512-6MgEugi8p2tiUhqO7GnPsmbCCzj0YRCwwaTbpGRyKZesjRSzkqkAE9fPp7V2yMs5hwfgbQLgdvSSkGNg1s5Uvw==", "dev": true, "requires": { - "flat-cache": "^4.0.0" + "flat-cache": "^5.0.0" } }, "fill-range": { @@ -2616,30 +2065,27 @@ "dev": true }, "flat-cache": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.0.tgz", - "integrity": "sha512-EryKbCE/wxpxKniQlyas6PY1I9vwtF3uCBweX+N8KYTCn3Y12RTGtQAJ/bd5pl7kxUAc8v/R3Ake/N17OZiFqA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-5.0.0.tgz", + "integrity": "sha512-JrqFmyUl2PnPi1OvLyTVHnQvwQ0S+e6lGSwu8OkAZlSaNIZciTY2H/cOOROxsBA1m/LZNHDsqAgDZt6akWcjsQ==", "dev": true, "requires": { - "flatted": "^3.2.9", - "keyv": "^4.5.4", - "rimraf": "^5.0.5" + "flatted": "^3.3.1", + "keyv": "^4.5.4" } }, "flatted": { - "version": "3.2.9", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz", - "integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", "dev": true }, - "foreground-child": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", - "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", - "requires": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - } + "fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "optional": true }, "gensequence": { "version": "7.0.0", @@ -2653,18 +2099,6 @@ "integrity": "sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==", "dev": true }, - "glob": { - "version": "10.3.10", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz", - "integrity": "sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==", - "requires": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.3.5", - "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", - "path-scurry": "^1.10.1" - } - }, "glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -2734,11 +2168,6 @@ "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "dev": true }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" - }, "is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -2754,20 +2183,6 @@ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, - "jackspeak": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", - "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", - "requires": { - "@isaacs/cliui": "^8.0.2", - "@pkgjs/parseargs": "^0.11.0" - } - }, "json-buffer": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", @@ -2799,33 +2214,6 @@ "picomatch": "^2.3.1" } }, - "minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "requires": { - "brace-expansion": "^2.0.1" - } - }, - "minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==" - }, - "minizlib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz", - "integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==", - "requires": { - "minipass": "^7.0.4", - "rimraf": "^5.0.5" - } - }, - "mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==" - }, "parent-module": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-2.0.0.tgz", @@ -2835,27 +2223,6 @@ "callsites": "^3.1.0" } }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" - }, - "path-scurry": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz", - "integrity": "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==", - "requires": { - "lru-cache": "^9.1.1 || ^10.0.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "dependencies": { - "lru-cache": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.1.0.tgz", - "integrity": "sha512-/1clY/ui8CzjKFyjdvwPWJUYKiFVXG2I2cY0ssG7h4+hwk+XOIX7ZSG9Q7TW8TW3Kp3BUSqgFWBLgL4PJ+Blag==" - } - } - }, "picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -2863,10 +2230,13 @@ "dev": true }, "pyright": { - "version": "1.1.223", - "resolved": "https://registry.npmjs.org/pyright/-/pyright-1.1.223.tgz", - "integrity": "sha512-EGLKMQRmSkKNe/eDy6MEkqq5lF91C5MCMYxvmCgKGUjZHIa+HByJIiRY2cOTm7g2CAv0eE7Ift701VxLM5Q0iQ==", - "dev": true + "version": "1.1.377", + "resolved": "https://registry.npmjs.org/pyright/-/pyright-1.1.377.tgz", + "integrity": "sha512-y6ENYuyZXTczPnPWZnqx78pE+ZgyIotEas2M/LFRTq3EfbgVk84EcvuSKLIy2DJeDKjKDxVP/LVmDNHabljD3g==", + "dev": true, + "requires": { + "fsevents": "~2.3.3" + } }, "queue-microtask": { "version": "1.2.3", @@ -2892,14 +2262,6 @@ "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", "dev": true }, - "rimraf": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.5.tgz", - "integrity": "sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A==", - "requires": { - "glob": "^10.3.7" - } - }, "run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -2910,112 +2272,20 @@ } }, "semver": { - "version": "7.6.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", - "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "dev": true }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" - }, - "signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==" - }, - "string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "requires": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - } - }, - "string-width-cjs": { - "version": "npm:string-width@4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - } - } - }, "strip-ansi": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, "requires": { "ansi-regex": "^6.0.1" } }, - "strip-ansi-cjs": { - "version": "npm:strip-ansi@6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - } - } - }, - "tar": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.0.tgz", - "integrity": "sha512-XQs0S8fuAkQWuqhDeCdMlJXDX80D7EOVLDPVFkna9yQfzS+PHKgfxcei0jf6/+QAWcjqrnC8uM3fSAnrQl+XYg==", - "requires": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", - "yallist": "^5.0.0" - }, - "dependencies": { - "yallist": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", - "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==" - } - } - }, "to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -3026,9 +2296,9 @@ } }, "vscode-languageserver-textdocument": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.11.tgz", - "integrity": "sha512-X+8T3GoiwTVlJbicx/sIAF+yuJAqz8VvwJyoMVhwEMoEKE/fkDmrqUgDMyBECcM2A2frVZIUj5HI/ErRXCfOeA==", + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", + "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", "dev": true }, "vscode-uri": { @@ -3037,72 +2307,6 @@ "integrity": "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==", "dev": true }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "requires": { - "isexe": "^2.0.0" - } - }, - "wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "requires": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - } - }, - "wrap-ansi-cjs": { - "version": "npm:wrap-ansi@7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - } - } - }, "xdg-basedir": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-5.1.0.tgz", @@ -3110,9 +2314,9 @@ "dev": true }, "yaml": { - "version": "2.4.5", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.5.tgz", - "integrity": "sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.0.tgz", + "integrity": "sha512-2wWLbGbYDiSqqIKoPjar3MPgB94ErzCtrNE1FdqGuaO0pi2JGjmE8aW8TDZwzU7vuxcGRdL/4gPQwQ7hD5AMSw==", "dev": true } } diff --git a/package.json b/package.json index cf4f7ec7b..89ad83096 100644 --- a/package.json +++ b/package.json @@ -1,65 +1,8 @@ { - "name": "runway", - "description": "Simplify infrastructure/app testing/deployment", - "main": "NA", - "scripts": { - "postinstall": "node ./postinstall.js", - "preuninstall": "node ./preuninstall.js", - "py-type-check": "pyright --venv-path ./" - }, - "files": [ - "src/osx/*", - "src/linux/*", - "src/windows/*", - "postinstall.js", - "preuninstall.js" - ], - "repository": { - "type": "git", - "url": "https://github.com/onicagroup/runway" - }, - "keywords": [ - "aws", - "ci", - "cd", - "cli", - "test", - "tool", - "serverless", - "terraform", - "cloudformation", - "cdk" - ], - "author": { - "name": "Onica Group LLC", - "email": "opensource@onica.com", - "url": "https://onica.com" - }, - "license": "Apache-2.0", - "licenses": [ - { - "type": "Apache-2.0", - "url": "http://www.apache.org/licenses/LICENSE-2.0" - } - ], - "bugs": { - "url": "https://github.com/onicagroup/runway/issues" - }, - "homepage": "https://github.com/onicagroup/runway", - "os": [ - "darwin", - "linux", - "win32" - ], - "version": "2.0.0-dev", - "dependencies": { - "tar": "^7.2.0" - }, - "publishConfig": { - "access": "public" - }, "devDependencies": { - "cspell": "^8.10.0", - "pyright": "^1.1.223" - } + "cspell": "^8.14.2", + "pyright": "^1.1.377" + }, + "name": "runway", + "version": "0.0.0" } diff --git a/poetry.lock b/poetry.lock index 457bbe80a..15b701007 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,63 +1,45 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" +version = "0.7.16" +description = "A light, configurable Sphinx theme" optional = false -python-versions = ">=3.6" -files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, -] - -[[package]] -name = "altgraph" -version = "0.17.4" -description = "Python graph (network) package" -optional = false -python-versions = "*" +python-versions = ">=3.9" files = [ - {file = "altgraph-0.17.4-py2.py3-none-any.whl", hash = "sha256:642743b4750de17e655e6711601b077bc6598dbfa3ba5fa2b2a35ce12b508dff"}, - {file = "altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406"}, + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] -name = "astroid" -version = "2.15.5" -description = "An abstract syntax tree for Python with inference support." +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" optional = false -python-versions = ">=3.7.2" +python-versions = ">=3.8" files = [ - {file = "astroid-2.15.5-py3-none-any.whl", hash = "sha256:078e5212f9885fa85fbb0cf0101978a336190aadea6e13305409d099f71b2324"}, - {file = "astroid-2.15.5.tar.gz", hash = "sha256:1039262575027b441137ab4a62a793a9b43defb42c32d5670f38686207cd780f"}, -] - -[package.dependencies] -lazy-object-proxy = ">=1.4.0" -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} -wrapt = [ - {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [[package]] name = "attrs" -version = "22.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "awacs" @@ -92,44 +74,68 @@ dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverles [[package]] name = "babel" -version = "2.12.1" +version = "2.16.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +files = [ + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" files = [ - {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, - {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + [[package]] name = "black" -version = "24.4.2" +version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] @@ -149,17 +155,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.34.139" +version = "1.35.2" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.139-py3-none-any.whl", hash = "sha256:98b2a12bcb30e679fa9f60fc74145a39db5ec2ca7b7c763f42896e3bd9b3a38d"}, - {file = "boto3-1.34.139.tar.gz", hash = "sha256:32b99f0d76ec81fdca287ace2c9744a2eb8b92cb62bf4d26d52a4f516b63a6bf"}, + {file = "boto3-1.35.2-py3-none-any.whl", hash = "sha256:c2f0837a259002489e59d1c30008791e3b3bb59e30e48c64e1d2d270147a4549"}, + {file = "boto3-1.35.2.tar.gz", hash = "sha256:cbf197ce28f04bc1ffa1db0aa26a1903d9bfa57a490f70537932e84367cdd15b"}, ] [package.dependencies] -botocore = ">=1.34.139,<1.35.0" +botocore = ">=1.35.2,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -168,434 +174,435 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.34.139" -description = "Type annotations for boto3 1.34.139 generated with mypy-boto3-builder 7.25.0" +version = "1.35.2" +description = "Type annotations for boto3 1.35.2 generated with mypy-boto3-builder 7.26.0" optional = false python-versions = ">=3.8" files = [ - {file = "boto3_stubs-1.34.139-py3-none-any.whl", hash = "sha256:ad2b935bfed068c9637bcb4e4c603d373ca8c21df6910089a4efa3faafaefcd7"}, - {file = "boto3_stubs-1.34.139.tar.gz", hash = "sha256:311b5ea157ff0178f3a9583eae78822170467afb874ba78621634db4e74e7b36"}, + {file = "boto3_stubs-1.35.2-py3-none-any.whl", hash = "sha256:b86347f84329ee616a5c583c6087f3708e3166d325f1600d09117db07875262a"}, + {file = "boto3_stubs-1.35.2.tar.gz", hash = "sha256:3b06987af5e125e35c61d3ee530cafeda8e63e45075349aaf783419af52c5587"}, ] [package.dependencies] botocore-stubs = "*" -mypy-boto3-acm = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"acm\""} -mypy-boto3-cloudformation = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"cloudformation\""} -mypy-boto3-cloudfront = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"cloudfront\""} -mypy-boto3-cognito-idp = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"cognito-idp\""} -mypy-boto3-dynamodb = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"dynamodb\""} -mypy-boto3-ec2 = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"ec2\""} -mypy-boto3-ecr = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"ecr\""} -mypy-boto3-ecs = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"ecs\""} -mypy-boto3-iam = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"iam\""} -mypy-boto3-kms = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"kms\""} -mypy-boto3-lambda = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"lambda\""} -mypy-boto3-route53 = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"route53\""} -mypy-boto3-s3 = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"s3\""} -mypy-boto3-ssm = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"ssm\""} -mypy-boto3-sts = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"sts\""} +mypy-boto3-acm = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"acm\""} +mypy-boto3-cloudformation = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"cloudformation\""} +mypy-boto3-cloudfront = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"cloudfront\""} +mypy-boto3-cognito-idp = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"cognito-idp\""} +mypy-boto3-dynamodb = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"dynamodb\""} +mypy-boto3-ec2 = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"ec2\""} +mypy-boto3-ecr = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"ecr\""} +mypy-boto3-ecs = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"ecs\""} +mypy-boto3-iam = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"iam\""} +mypy-boto3-kms = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"kms\""} +mypy-boto3-lambda = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"lambda\""} +mypy-boto3-route53 = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"route53\""} +mypy-boto3-s3 = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"s3\""} +mypy-boto3-ssm = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"ssm\""} +mypy-boto3-sts = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"sts\""} types-s3transfer = "*" typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [package.extras] -accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)"] -account = ["mypy-boto3-account (>=1.34.0,<1.35.0)"] -acm = ["mypy-boto3-acm (>=1.34.0,<1.35.0)"] -acm-pca = ["mypy-boto3-acm-pca (>=1.34.0,<1.35.0)"] -all = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)", "mypy-boto3-account (>=1.34.0,<1.35.0)", "mypy-boto3-acm (>=1.34.0,<1.35.0)", "mypy-boto3-acm-pca (>=1.34.0,<1.35.0)", "mypy-boto3-amp (>=1.34.0,<1.35.0)", "mypy-boto3-amplify (>=1.34.0,<1.35.0)", "mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)", "mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)", "mypy-boto3-apigateway (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)", "mypy-boto3-appconfig (>=1.34.0,<1.35.0)", "mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)", "mypy-boto3-appfabric (>=1.34.0,<1.35.0)", "mypy-boto3-appflow (>=1.34.0,<1.35.0)", "mypy-boto3-appintegrations (>=1.34.0,<1.35.0)", "mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-application-insights (>=1.34.0,<1.35.0)", "mypy-boto3-application-signals (>=1.34.0,<1.35.0)", "mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-appmesh (>=1.34.0,<1.35.0)", "mypy-boto3-apprunner (>=1.34.0,<1.35.0)", "mypy-boto3-appstream (>=1.34.0,<1.35.0)", "mypy-boto3-appsync (>=1.34.0,<1.35.0)", "mypy-boto3-apptest (>=1.34.0,<1.35.0)", "mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)", "mypy-boto3-artifact (>=1.34.0,<1.35.0)", "mypy-boto3-athena (>=1.34.0,<1.35.0)", "mypy-boto3-auditmanager (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)", "mypy-boto3-b2bi (>=1.34.0,<1.35.0)", "mypy-boto3-backup (>=1.34.0,<1.35.0)", "mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)", "mypy-boto3-batch (>=1.34.0,<1.35.0)", "mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-billingconductor (>=1.34.0,<1.35.0)", "mypy-boto3-braket (>=1.34.0,<1.35.0)", "mypy-boto3-budgets (>=1.34.0,<1.35.0)", "mypy-boto3-ce (>=1.34.0,<1.35.0)", "mypy-boto3-chatbot (>=1.34.0,<1.35.0)", "mypy-boto3-chime (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)", "mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)", "mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)", "mypy-boto3-cloud9 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)", "mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)", "mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)", "mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)", "mypy-boto3-codeartifact (>=1.34.0,<1.35.0)", "mypy-boto3-codebuild (>=1.34.0,<1.35.0)", "mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)", "mypy-boto3-codecommit (>=1.34.0,<1.35.0)", "mypy-boto3-codeconnections (>=1.34.0,<1.35.0)", "mypy-boto3-codedeploy (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)", "mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-codepipeline (>=1.34.0,<1.35.0)", "mypy-boto3-codestar (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)", "mypy-boto3-comprehend (>=1.34.0,<1.35.0)", "mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)", "mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)", "mypy-boto3-config (>=1.34.0,<1.35.0)", "mypy-boto3-connect (>=1.34.0,<1.35.0)", "mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)", "mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)", "mypy-boto3-connectcases (>=1.34.0,<1.35.0)", "mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)", "mypy-boto3-controlcatalog (>=1.34.0,<1.35.0)", "mypy-boto3-controltower (>=1.34.0,<1.35.0)", "mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)", "mypy-boto3-cur (>=1.34.0,<1.35.0)", "mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)", "mypy-boto3-databrew (>=1.34.0,<1.35.0)", "mypy-boto3-dataexchange (>=1.34.0,<1.35.0)", "mypy-boto3-datapipeline (>=1.34.0,<1.35.0)", "mypy-boto3-datasync (>=1.34.0,<1.35.0)", "mypy-boto3-datazone (>=1.34.0,<1.35.0)", "mypy-boto3-dax (>=1.34.0,<1.35.0)", "mypy-boto3-deadline (>=1.34.0,<1.35.0)", "mypy-boto3-detective (>=1.34.0,<1.35.0)", "mypy-boto3-devicefarm (>=1.34.0,<1.35.0)", "mypy-boto3-devops-guru (>=1.34.0,<1.35.0)", "mypy-boto3-directconnect (>=1.34.0,<1.35.0)", "mypy-boto3-discovery (>=1.34.0,<1.35.0)", "mypy-boto3-dlm (>=1.34.0,<1.35.0)", "mypy-boto3-dms (>=1.34.0,<1.35.0)", "mypy-boto3-docdb (>=1.34.0,<1.35.0)", "mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)", "mypy-boto3-drs (>=1.34.0,<1.35.0)", "mypy-boto3-ds (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)", "mypy-boto3-ebs (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)", "mypy-boto3-ecr (>=1.34.0,<1.35.0)", "mypy-boto3-ecr-public (>=1.34.0,<1.35.0)", "mypy-boto3-ecs (>=1.34.0,<1.35.0)", "mypy-boto3-efs (>=1.34.0,<1.35.0)", "mypy-boto3-eks (>=1.34.0,<1.35.0)", "mypy-boto3-eks-auth (>=1.34.0,<1.35.0)", "mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)", "mypy-boto3-elasticache (>=1.34.0,<1.35.0)", "mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)", "mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)", "mypy-boto3-elb (>=1.34.0,<1.35.0)", "mypy-boto3-elbv2 (>=1.34.0,<1.35.0)", "mypy-boto3-emr (>=1.34.0,<1.35.0)", "mypy-boto3-emr-containers (>=1.34.0,<1.35.0)", "mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-entityresolution (>=1.34.0,<1.35.0)", "mypy-boto3-es (>=1.34.0,<1.35.0)", "mypy-boto3-events (>=1.34.0,<1.35.0)", "mypy-boto3-evidently (>=1.34.0,<1.35.0)", "mypy-boto3-finspace (>=1.34.0,<1.35.0)", "mypy-boto3-finspace-data (>=1.34.0,<1.35.0)", "mypy-boto3-firehose (>=1.34.0,<1.35.0)", "mypy-boto3-fis (>=1.34.0,<1.35.0)", "mypy-boto3-fms (>=1.34.0,<1.35.0)", "mypy-boto3-forecast (>=1.34.0,<1.35.0)", "mypy-boto3-forecastquery (>=1.34.0,<1.35.0)", "mypy-boto3-frauddetector (>=1.34.0,<1.35.0)", "mypy-boto3-freetier (>=1.34.0,<1.35.0)", "mypy-boto3-fsx (>=1.34.0,<1.35.0)", "mypy-boto3-gamelift (>=1.34.0,<1.35.0)", "mypy-boto3-glacier (>=1.34.0,<1.35.0)", "mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)", "mypy-boto3-glue (>=1.34.0,<1.35.0)", "mypy-boto3-grafana (>=1.34.0,<1.35.0)", "mypy-boto3-greengrass (>=1.34.0,<1.35.0)", "mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)", "mypy-boto3-groundstation (>=1.34.0,<1.35.0)", "mypy-boto3-guardduty (>=1.34.0,<1.35.0)", "mypy-boto3-health (>=1.34.0,<1.35.0)", "mypy-boto3-healthlake (>=1.34.0,<1.35.0)", "mypy-boto3-iam (>=1.34.0,<1.35.0)", "mypy-boto3-identitystore (>=1.34.0,<1.35.0)", "mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)", "mypy-boto3-importexport (>=1.34.0,<1.35.0)", "mypy-boto3-inspector (>=1.34.0,<1.35.0)", "mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)", "mypy-boto3-inspector2 (>=1.34.0,<1.35.0)", "mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)", "mypy-boto3-iot (>=1.34.0,<1.35.0)", "mypy-boto3-iot-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)", "mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)", "mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)", "mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)", "mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)", "mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)", "mypy-boto3-iotwireless (>=1.34.0,<1.35.0)", "mypy-boto3-ivs (>=1.34.0,<1.35.0)", "mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)", "mypy-boto3-ivschat (>=1.34.0,<1.35.0)", "mypy-boto3-kafka (>=1.34.0,<1.35.0)", "mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-kendra (>=1.34.0,<1.35.0)", "mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)", "mypy-boto3-keyspaces (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)", "mypy-boto3-kms (>=1.34.0,<1.35.0)", "mypy-boto3-lakeformation (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)", "mypy-boto3-lex-models (>=1.34.0,<1.35.0)", "mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-lightsail (>=1.34.0,<1.35.0)", "mypy-boto3-location (>=1.34.0,<1.35.0)", "mypy-boto3-logs (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)", "mypy-boto3-m2 (>=1.34.0,<1.35.0)", "mypy-boto3-machinelearning (>=1.34.0,<1.35.0)", "mypy-boto3-macie2 (>=1.34.0,<1.35.0)", "mypy-boto3-mailmanager (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)", "mypy-boto3-medialive (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)", "mypy-boto3-mediatailor (>=1.34.0,<1.35.0)", "mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)", "mypy-boto3-memorydb (>=1.34.0,<1.35.0)", "mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)", "mypy-boto3-mgh (>=1.34.0,<1.35.0)", "mypy-boto3-mgn (>=1.34.0,<1.35.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)", "mypy-boto3-mobile (>=1.34.0,<1.35.0)", "mypy-boto3-mq (>=1.34.0,<1.35.0)", "mypy-boto3-mturk (>=1.34.0,<1.35.0)", "mypy-boto3-mwaa (>=1.34.0,<1.35.0)", "mypy-boto3-neptune (>=1.34.0,<1.35.0)", "mypy-boto3-neptune-graph (>=1.34.0,<1.35.0)", "mypy-boto3-neptunedata (>=1.34.0,<1.35.0)", "mypy-boto3-network-firewall (>=1.34.0,<1.35.0)", "mypy-boto3-networkmanager (>=1.34.0,<1.35.0)", "mypy-boto3-networkmonitor (>=1.34.0,<1.35.0)", "mypy-boto3-nimble (>=1.34.0,<1.35.0)", "mypy-boto3-oam (>=1.34.0,<1.35.0)", "mypy-boto3-omics (>=1.34.0,<1.35.0)", "mypy-boto3-opensearch (>=1.34.0,<1.35.0)", "mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)", "mypy-boto3-opsworks (>=1.34.0,<1.35.0)", "mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)", "mypy-boto3-organizations (>=1.34.0,<1.35.0)", "mypy-boto3-osis (>=1.34.0,<1.35.0)", "mypy-boto3-outposts (>=1.34.0,<1.35.0)", "mypy-boto3-panorama (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)", "mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)", "mypy-boto3-pca-connector-scep (>=1.34.0,<1.35.0)", "mypy-boto3-personalize (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-events (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-pi (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)", "mypy-boto3-pipes (>=1.34.0,<1.35.0)", "mypy-boto3-polly (>=1.34.0,<1.35.0)", "mypy-boto3-pricing (>=1.34.0,<1.35.0)", "mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)", "mypy-boto3-proton (>=1.34.0,<1.35.0)", "mypy-boto3-qbusiness (>=1.34.0,<1.35.0)", "mypy-boto3-qconnect (>=1.34.0,<1.35.0)", "mypy-boto3-qldb (>=1.34.0,<1.35.0)", "mypy-boto3-qldb-session (>=1.34.0,<1.35.0)", "mypy-boto3-quicksight (>=1.34.0,<1.35.0)", "mypy-boto3-ram (>=1.34.0,<1.35.0)", "mypy-boto3-rbin (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-rds-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-rekognition (>=1.34.0,<1.35.0)", "mypy-boto3-repostspace (>=1.34.0,<1.35.0)", "mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)", "mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)", "mypy-boto3-resource-groups (>=1.34.0,<1.35.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)", "mypy-boto3-robomaker (>=1.34.0,<1.35.0)", "mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)", "mypy-boto3-route53 (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)", "mypy-boto3-route53domains (>=1.34.0,<1.35.0)", "mypy-boto3-route53profiles (>=1.34.0,<1.35.0)", "mypy-boto3-route53resolver (>=1.34.0,<1.35.0)", "mypy-boto3-rum (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-s3control (>=1.34.0,<1.35.0)", "mypy-boto3-s3outposts (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-savingsplans (>=1.34.0,<1.35.0)", "mypy-boto3-scheduler (>=1.34.0,<1.35.0)", "mypy-boto3-schemas (>=1.34.0,<1.35.0)", "mypy-boto3-sdb (>=1.34.0,<1.35.0)", "mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)", "mypy-boto3-securityhub (>=1.34.0,<1.35.0)", "mypy-boto3-securitylake (>=1.34.0,<1.35.0)", "mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)", "mypy-boto3-service-quotas (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)", "mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)", "mypy-boto3-ses (>=1.34.0,<1.35.0)", "mypy-boto3-sesv2 (>=1.34.0,<1.35.0)", "mypy-boto3-shield (>=1.34.0,<1.35.0)", "mypy-boto3-signer (>=1.34.0,<1.35.0)", "mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)", "mypy-boto3-sms (>=1.34.0,<1.35.0)", "mypy-boto3-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)", "mypy-boto3-snowball (>=1.34.0,<1.35.0)", "mypy-boto3-sns (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)", "mypy-boto3-ssm (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)", "mypy-boto3-sso (>=1.34.0,<1.35.0)", "mypy-boto3-sso-admin (>=1.34.0,<1.35.0)", "mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)", "mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)", "mypy-boto3-storagegateway (>=1.34.0,<1.35.0)", "mypy-boto3-sts (>=1.34.0,<1.35.0)", "mypy-boto3-supplychain (>=1.34.0,<1.35.0)", "mypy-boto3-support (>=1.34.0,<1.35.0)", "mypy-boto3-support-app (>=1.34.0,<1.35.0)", "mypy-boto3-swf (>=1.34.0,<1.35.0)", "mypy-boto3-synthetics (>=1.34.0,<1.35.0)", "mypy-boto3-taxsettings (>=1.34.0,<1.35.0)", "mypy-boto3-textract (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-influxdb (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-query (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-write (>=1.34.0,<1.35.0)", "mypy-boto3-tnb (>=1.34.0,<1.35.0)", "mypy-boto3-transcribe (>=1.34.0,<1.35.0)", "mypy-boto3-transfer (>=1.34.0,<1.35.0)", "mypy-boto3-translate (>=1.34.0,<1.35.0)", "mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)", "mypy-boto3-voice-id (>=1.34.0,<1.35.0)", "mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)", "mypy-boto3-waf (>=1.34.0,<1.35.0)", "mypy-boto3-waf-regional (>=1.34.0,<1.35.0)", "mypy-boto3-wafv2 (>=1.34.0,<1.35.0)", "mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)", "mypy-boto3-wisdom (>=1.34.0,<1.35.0)", "mypy-boto3-workdocs (>=1.34.0,<1.35.0)", "mypy-boto3-worklink (>=1.34.0,<1.35.0)", "mypy-boto3-workmail (>=1.34.0,<1.35.0)", "mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)", "mypy-boto3-xray (>=1.34.0,<1.35.0)"] -amp = ["mypy-boto3-amp (>=1.34.0,<1.35.0)"] -amplify = ["mypy-boto3-amplify (>=1.34.0,<1.35.0)"] -amplifybackend = ["mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)"] -amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)"] -apigateway = ["mypy-boto3-apigateway (>=1.34.0,<1.35.0)"] -apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)"] -apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)"] -appconfig = ["mypy-boto3-appconfig (>=1.34.0,<1.35.0)"] -appconfigdata = ["mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)"] -appfabric = ["mypy-boto3-appfabric (>=1.34.0,<1.35.0)"] -appflow = ["mypy-boto3-appflow (>=1.34.0,<1.35.0)"] -appintegrations = ["mypy-boto3-appintegrations (>=1.34.0,<1.35.0)"] -application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)"] -application-insights = ["mypy-boto3-application-insights (>=1.34.0,<1.35.0)"] -application-signals = ["mypy-boto3-application-signals (>=1.34.0,<1.35.0)"] -applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)"] -appmesh = ["mypy-boto3-appmesh (>=1.34.0,<1.35.0)"] -apprunner = ["mypy-boto3-apprunner (>=1.34.0,<1.35.0)"] -appstream = ["mypy-boto3-appstream (>=1.34.0,<1.35.0)"] -appsync = ["mypy-boto3-appsync (>=1.34.0,<1.35.0)"] -apptest = ["mypy-boto3-apptest (>=1.34.0,<1.35.0)"] -arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)"] -artifact = ["mypy-boto3-artifact (>=1.34.0,<1.35.0)"] -athena = ["mypy-boto3-athena (>=1.34.0,<1.35.0)"] -auditmanager = ["mypy-boto3-auditmanager (>=1.34.0,<1.35.0)"] -autoscaling = ["mypy-boto3-autoscaling (>=1.34.0,<1.35.0)"] -autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)"] -b2bi = ["mypy-boto3-b2bi (>=1.34.0,<1.35.0)"] -backup = ["mypy-boto3-backup (>=1.34.0,<1.35.0)"] -backup-gateway = ["mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)"] -batch = ["mypy-boto3-batch (>=1.34.0,<1.35.0)"] -bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)"] -bedrock = ["mypy-boto3-bedrock (>=1.34.0,<1.35.0)"] -bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)"] -bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)"] -bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)"] -billingconductor = ["mypy-boto3-billingconductor (>=1.34.0,<1.35.0)"] -boto3 = ["boto3 (==1.34.139)", "botocore (==1.34.139)"] -braket = ["mypy-boto3-braket (>=1.34.0,<1.35.0)"] -budgets = ["mypy-boto3-budgets (>=1.34.0,<1.35.0)"] -ce = ["mypy-boto3-ce (>=1.34.0,<1.35.0)"] -chatbot = ["mypy-boto3-chatbot (>=1.34.0,<1.35.0)"] -chime = ["mypy-boto3-chime (>=1.34.0,<1.35.0)"] -chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)"] -chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)"] -chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)"] -chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)"] -chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)"] -cleanrooms = ["mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)"] -cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)"] -cloud9 = ["mypy-boto3-cloud9 (>=1.34.0,<1.35.0)"] -cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)"] -clouddirectory = ["mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)"] -cloudformation = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)"] -cloudfront = ["mypy-boto3-cloudfront (>=1.34.0,<1.35.0)"] -cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)"] -cloudhsm = ["mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)"] -cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)"] -cloudsearch = ["mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)"] -cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)"] -cloudtrail = ["mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)"] -cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)"] -cloudwatch = ["mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)"] -codeartifact = ["mypy-boto3-codeartifact (>=1.34.0,<1.35.0)"] -codebuild = ["mypy-boto3-codebuild (>=1.34.0,<1.35.0)"] -codecatalyst = ["mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)"] -codecommit = ["mypy-boto3-codecommit (>=1.34.0,<1.35.0)"] -codeconnections = ["mypy-boto3-codeconnections (>=1.34.0,<1.35.0)"] -codedeploy = ["mypy-boto3-codedeploy (>=1.34.0,<1.35.0)"] -codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)"] -codeguru-security = ["mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)"] -codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)"] -codepipeline = ["mypy-boto3-codepipeline (>=1.34.0,<1.35.0)"] -codestar = ["mypy-boto3-codestar (>=1.34.0,<1.35.0)"] -codestar-connections = ["mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)"] -codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)"] -cognito-identity = ["mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)"] -cognito-idp = ["mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)"] -cognito-sync = ["mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)"] -comprehend = ["mypy-boto3-comprehend (>=1.34.0,<1.35.0)"] -comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)"] -compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)"] -config = ["mypy-boto3-config (>=1.34.0,<1.35.0)"] -connect = ["mypy-boto3-connect (>=1.34.0,<1.35.0)"] -connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)"] -connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)"] -connectcases = ["mypy-boto3-connectcases (>=1.34.0,<1.35.0)"] -connectparticipant = ["mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)"] -controlcatalog = ["mypy-boto3-controlcatalog (>=1.34.0,<1.35.0)"] -controltower = ["mypy-boto3-controltower (>=1.34.0,<1.35.0)"] -cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)"] -cur = ["mypy-boto3-cur (>=1.34.0,<1.35.0)"] -customer-profiles = ["mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)"] -databrew = ["mypy-boto3-databrew (>=1.34.0,<1.35.0)"] -dataexchange = ["mypy-boto3-dataexchange (>=1.34.0,<1.35.0)"] -datapipeline = ["mypy-boto3-datapipeline (>=1.34.0,<1.35.0)"] -datasync = ["mypy-boto3-datasync (>=1.34.0,<1.35.0)"] -datazone = ["mypy-boto3-datazone (>=1.34.0,<1.35.0)"] -dax = ["mypy-boto3-dax (>=1.34.0,<1.35.0)"] -deadline = ["mypy-boto3-deadline (>=1.34.0,<1.35.0)"] -detective = ["mypy-boto3-detective (>=1.34.0,<1.35.0)"] -devicefarm = ["mypy-boto3-devicefarm (>=1.34.0,<1.35.0)"] -devops-guru = ["mypy-boto3-devops-guru (>=1.34.0,<1.35.0)"] -directconnect = ["mypy-boto3-directconnect (>=1.34.0,<1.35.0)"] -discovery = ["mypy-boto3-discovery (>=1.34.0,<1.35.0)"] -dlm = ["mypy-boto3-dlm (>=1.34.0,<1.35.0)"] -dms = ["mypy-boto3-dms (>=1.34.0,<1.35.0)"] -docdb = ["mypy-boto3-docdb (>=1.34.0,<1.35.0)"] -docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)"] -drs = ["mypy-boto3-drs (>=1.34.0,<1.35.0)"] -ds = ["mypy-boto3-ds (>=1.34.0,<1.35.0)"] -dynamodb = ["mypy-boto3-dynamodb (>=1.34.0,<1.35.0)"] -dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)"] -ebs = ["mypy-boto3-ebs (>=1.34.0,<1.35.0)"] -ec2 = ["mypy-boto3-ec2 (>=1.34.0,<1.35.0)"] -ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)"] -ecr = ["mypy-boto3-ecr (>=1.34.0,<1.35.0)"] -ecr-public = ["mypy-boto3-ecr-public (>=1.34.0,<1.35.0)"] -ecs = ["mypy-boto3-ecs (>=1.34.0,<1.35.0)"] -efs = ["mypy-boto3-efs (>=1.34.0,<1.35.0)"] -eks = ["mypy-boto3-eks (>=1.34.0,<1.35.0)"] -eks-auth = ["mypy-boto3-eks-auth (>=1.34.0,<1.35.0)"] -elastic-inference = ["mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)"] -elasticache = ["mypy-boto3-elasticache (>=1.34.0,<1.35.0)"] -elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)"] -elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)"] -elb = ["mypy-boto3-elb (>=1.34.0,<1.35.0)"] -elbv2 = ["mypy-boto3-elbv2 (>=1.34.0,<1.35.0)"] -emr = ["mypy-boto3-emr (>=1.34.0,<1.35.0)"] -emr-containers = ["mypy-boto3-emr-containers (>=1.34.0,<1.35.0)"] -emr-serverless = ["mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)"] -entityresolution = ["mypy-boto3-entityresolution (>=1.34.0,<1.35.0)"] -es = ["mypy-boto3-es (>=1.34.0,<1.35.0)"] -essential = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)"] -events = ["mypy-boto3-events (>=1.34.0,<1.35.0)"] -evidently = ["mypy-boto3-evidently (>=1.34.0,<1.35.0)"] -finspace = ["mypy-boto3-finspace (>=1.34.0,<1.35.0)"] -finspace-data = ["mypy-boto3-finspace-data (>=1.34.0,<1.35.0)"] -firehose = ["mypy-boto3-firehose (>=1.34.0,<1.35.0)"] -fis = ["mypy-boto3-fis (>=1.34.0,<1.35.0)"] -fms = ["mypy-boto3-fms (>=1.34.0,<1.35.0)"] -forecast = ["mypy-boto3-forecast (>=1.34.0,<1.35.0)"] -forecastquery = ["mypy-boto3-forecastquery (>=1.34.0,<1.35.0)"] -frauddetector = ["mypy-boto3-frauddetector (>=1.34.0,<1.35.0)"] -freetier = ["mypy-boto3-freetier (>=1.34.0,<1.35.0)"] -fsx = ["mypy-boto3-fsx (>=1.34.0,<1.35.0)"] -gamelift = ["mypy-boto3-gamelift (>=1.34.0,<1.35.0)"] -glacier = ["mypy-boto3-glacier (>=1.34.0,<1.35.0)"] -globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)"] -glue = ["mypy-boto3-glue (>=1.34.0,<1.35.0)"] -grafana = ["mypy-boto3-grafana (>=1.34.0,<1.35.0)"] -greengrass = ["mypy-boto3-greengrass (>=1.34.0,<1.35.0)"] -greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)"] -groundstation = ["mypy-boto3-groundstation (>=1.34.0,<1.35.0)"] -guardduty = ["mypy-boto3-guardduty (>=1.34.0,<1.35.0)"] -health = ["mypy-boto3-health (>=1.34.0,<1.35.0)"] -healthlake = ["mypy-boto3-healthlake (>=1.34.0,<1.35.0)"] -iam = ["mypy-boto3-iam (>=1.34.0,<1.35.0)"] -identitystore = ["mypy-boto3-identitystore (>=1.34.0,<1.35.0)"] -imagebuilder = ["mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)"] -importexport = ["mypy-boto3-importexport (>=1.34.0,<1.35.0)"] -inspector = ["mypy-boto3-inspector (>=1.34.0,<1.35.0)"] -inspector-scan = ["mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)"] -inspector2 = ["mypy-boto3-inspector2 (>=1.34.0,<1.35.0)"] -internetmonitor = ["mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)"] -iot = ["mypy-boto3-iot (>=1.34.0,<1.35.0)"] -iot-data = ["mypy-boto3-iot-data (>=1.34.0,<1.35.0)"] -iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)"] -iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)"] -iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)"] -iotanalytics = ["mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)"] -iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)"] -iotevents = ["mypy-boto3-iotevents (>=1.34.0,<1.35.0)"] -iotevents-data = ["mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)"] -iotfleethub = ["mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)"] -iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)"] -iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)"] -iotsitewise = ["mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)"] -iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)"] -iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)"] -iotwireless = ["mypy-boto3-iotwireless (>=1.34.0,<1.35.0)"] -ivs = ["mypy-boto3-ivs (>=1.34.0,<1.35.0)"] -ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)"] -ivschat = ["mypy-boto3-ivschat (>=1.34.0,<1.35.0)"] -kafka = ["mypy-boto3-kafka (>=1.34.0,<1.35.0)"] -kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)"] -kendra = ["mypy-boto3-kendra (>=1.34.0,<1.35.0)"] -kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)"] -keyspaces = ["mypy-boto3-keyspaces (>=1.34.0,<1.35.0)"] -kinesis = ["mypy-boto3-kinesis (>=1.34.0,<1.35.0)"] -kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)"] -kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)"] -kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)"] -kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)"] -kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)"] -kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)"] -kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)"] -kms = ["mypy-boto3-kms (>=1.34.0,<1.35.0)"] -lakeformation = ["mypy-boto3-lakeformation (>=1.34.0,<1.35.0)"] -lambda = ["mypy-boto3-lambda (>=1.34.0,<1.35.0)"] -launch-wizard = ["mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)"] -lex-models = ["mypy-boto3-lex-models (>=1.34.0,<1.35.0)"] -lex-runtime = ["mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)"] -lexv2-models = ["mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)"] -lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)"] -license-manager = ["mypy-boto3-license-manager (>=1.34.0,<1.35.0)"] -license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)"] -license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)"] -lightsail = ["mypy-boto3-lightsail (>=1.34.0,<1.35.0)"] -location = ["mypy-boto3-location (>=1.34.0,<1.35.0)"] -logs = ["mypy-boto3-logs (>=1.34.0,<1.35.0)"] -lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)"] -lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)"] -lookoutvision = ["mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)"] -m2 = ["mypy-boto3-m2 (>=1.34.0,<1.35.0)"] -machinelearning = ["mypy-boto3-machinelearning (>=1.34.0,<1.35.0)"] -macie2 = ["mypy-boto3-macie2 (>=1.34.0,<1.35.0)"] -mailmanager = ["mypy-boto3-mailmanager (>=1.34.0,<1.35.0)"] -managedblockchain = ["mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)"] -managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)"] -marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)"] -marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)"] -marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)"] -marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)"] -marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)"] -mediaconnect = ["mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)"] -mediaconvert = ["mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)"] -medialive = ["mypy-boto3-medialive (>=1.34.0,<1.35.0)"] -mediapackage = ["mypy-boto3-mediapackage (>=1.34.0,<1.35.0)"] -mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)"] -mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)"] -mediastore = ["mypy-boto3-mediastore (>=1.34.0,<1.35.0)"] -mediastore-data = ["mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)"] -mediatailor = ["mypy-boto3-mediatailor (>=1.34.0,<1.35.0)"] -medical-imaging = ["mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)"] -memorydb = ["mypy-boto3-memorydb (>=1.34.0,<1.35.0)"] -meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)"] -mgh = ["mypy-boto3-mgh (>=1.34.0,<1.35.0)"] -mgn = ["mypy-boto3-mgn (>=1.34.0,<1.35.0)"] -migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)"] -migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)"] -migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)"] -migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)"] -mobile = ["mypy-boto3-mobile (>=1.34.0,<1.35.0)"] -mq = ["mypy-boto3-mq (>=1.34.0,<1.35.0)"] -mturk = ["mypy-boto3-mturk (>=1.34.0,<1.35.0)"] -mwaa = ["mypy-boto3-mwaa (>=1.34.0,<1.35.0)"] -neptune = ["mypy-boto3-neptune (>=1.34.0,<1.35.0)"] -neptune-graph = ["mypy-boto3-neptune-graph (>=1.34.0,<1.35.0)"] -neptunedata = ["mypy-boto3-neptunedata (>=1.34.0,<1.35.0)"] -network-firewall = ["mypy-boto3-network-firewall (>=1.34.0,<1.35.0)"] -networkmanager = ["mypy-boto3-networkmanager (>=1.34.0,<1.35.0)"] -networkmonitor = ["mypy-boto3-networkmonitor (>=1.34.0,<1.35.0)"] -nimble = ["mypy-boto3-nimble (>=1.34.0,<1.35.0)"] -oam = ["mypy-boto3-oam (>=1.34.0,<1.35.0)"] -omics = ["mypy-boto3-omics (>=1.34.0,<1.35.0)"] -opensearch = ["mypy-boto3-opensearch (>=1.34.0,<1.35.0)"] -opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)"] -opsworks = ["mypy-boto3-opsworks (>=1.34.0,<1.35.0)"] -opsworkscm = ["mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)"] -organizations = ["mypy-boto3-organizations (>=1.34.0,<1.35.0)"] -osis = ["mypy-boto3-osis (>=1.34.0,<1.35.0)"] -outposts = ["mypy-boto3-outposts (>=1.34.0,<1.35.0)"] -panorama = ["mypy-boto3-panorama (>=1.34.0,<1.35.0)"] -payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)"] -payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)"] -pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)"] -pca-connector-scep = ["mypy-boto3-pca-connector-scep (>=1.34.0,<1.35.0)"] -personalize = ["mypy-boto3-personalize (>=1.34.0,<1.35.0)"] -personalize-events = ["mypy-boto3-personalize-events (>=1.34.0,<1.35.0)"] -personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)"] -pi = ["mypy-boto3-pi (>=1.34.0,<1.35.0)"] -pinpoint = ["mypy-boto3-pinpoint (>=1.34.0,<1.35.0)"] -pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)"] -pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)"] -pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)"] -pipes = ["mypy-boto3-pipes (>=1.34.0,<1.35.0)"] -polly = ["mypy-boto3-polly (>=1.34.0,<1.35.0)"] -pricing = ["mypy-boto3-pricing (>=1.34.0,<1.35.0)"] -privatenetworks = ["mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)"] -proton = ["mypy-boto3-proton (>=1.34.0,<1.35.0)"] -qbusiness = ["mypy-boto3-qbusiness (>=1.34.0,<1.35.0)"] -qconnect = ["mypy-boto3-qconnect (>=1.34.0,<1.35.0)"] -qldb = ["mypy-boto3-qldb (>=1.34.0,<1.35.0)"] -qldb-session = ["mypy-boto3-qldb-session (>=1.34.0,<1.35.0)"] -quicksight = ["mypy-boto3-quicksight (>=1.34.0,<1.35.0)"] -ram = ["mypy-boto3-ram (>=1.34.0,<1.35.0)"] -rbin = ["mypy-boto3-rbin (>=1.34.0,<1.35.0)"] -rds = ["mypy-boto3-rds (>=1.34.0,<1.35.0)"] -rds-data = ["mypy-boto3-rds-data (>=1.34.0,<1.35.0)"] -redshift = ["mypy-boto3-redshift (>=1.34.0,<1.35.0)"] -redshift-data = ["mypy-boto3-redshift-data (>=1.34.0,<1.35.0)"] -redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)"] -rekognition = ["mypy-boto3-rekognition (>=1.34.0,<1.35.0)"] -repostspace = ["mypy-boto3-repostspace (>=1.34.0,<1.35.0)"] -resiliencehub = ["mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)"] -resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)"] -resource-groups = ["mypy-boto3-resource-groups (>=1.34.0,<1.35.0)"] -resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)"] -robomaker = ["mypy-boto3-robomaker (>=1.34.0,<1.35.0)"] -rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)"] -route53 = ["mypy-boto3-route53 (>=1.34.0,<1.35.0)"] -route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)"] -route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)"] -route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)"] -route53domains = ["mypy-boto3-route53domains (>=1.34.0,<1.35.0)"] -route53profiles = ["mypy-boto3-route53profiles (>=1.34.0,<1.35.0)"] -route53resolver = ["mypy-boto3-route53resolver (>=1.34.0,<1.35.0)"] -rum = ["mypy-boto3-rum (>=1.34.0,<1.35.0)"] -s3 = ["mypy-boto3-s3 (>=1.34.0,<1.35.0)"] -s3control = ["mypy-boto3-s3control (>=1.34.0,<1.35.0)"] -s3outposts = ["mypy-boto3-s3outposts (>=1.34.0,<1.35.0)"] -sagemaker = ["mypy-boto3-sagemaker (>=1.34.0,<1.35.0)"] -sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)"] -sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)"] -sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)"] -sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)"] -sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)"] -sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)"] -savingsplans = ["mypy-boto3-savingsplans (>=1.34.0,<1.35.0)"] -scheduler = ["mypy-boto3-scheduler (>=1.34.0,<1.35.0)"] -schemas = ["mypy-boto3-schemas (>=1.34.0,<1.35.0)"] -sdb = ["mypy-boto3-sdb (>=1.34.0,<1.35.0)"] -secretsmanager = ["mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)"] -securityhub = ["mypy-boto3-securityhub (>=1.34.0,<1.35.0)"] -securitylake = ["mypy-boto3-securitylake (>=1.34.0,<1.35.0)"] -serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)"] -service-quotas = ["mypy-boto3-service-quotas (>=1.34.0,<1.35.0)"] -servicecatalog = ["mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)"] -servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)"] -servicediscovery = ["mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)"] -ses = ["mypy-boto3-ses (>=1.34.0,<1.35.0)"] -sesv2 = ["mypy-boto3-sesv2 (>=1.34.0,<1.35.0)"] -shield = ["mypy-boto3-shield (>=1.34.0,<1.35.0)"] -signer = ["mypy-boto3-signer (>=1.34.0,<1.35.0)"] -simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)"] -sms = ["mypy-boto3-sms (>=1.34.0,<1.35.0)"] -sms-voice = ["mypy-boto3-sms-voice (>=1.34.0,<1.35.0)"] -snow-device-management = ["mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)"] -snowball = ["mypy-boto3-snowball (>=1.34.0,<1.35.0)"] -sns = ["mypy-boto3-sns (>=1.34.0,<1.35.0)"] -sqs = ["mypy-boto3-sqs (>=1.34.0,<1.35.0)"] -ssm = ["mypy-boto3-ssm (>=1.34.0,<1.35.0)"] -ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)"] -ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)"] -ssm-sap = ["mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)"] -sso = ["mypy-boto3-sso (>=1.34.0,<1.35.0)"] -sso-admin = ["mypy-boto3-sso-admin (>=1.34.0,<1.35.0)"] -sso-oidc = ["mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)"] -stepfunctions = ["mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)"] -storagegateway = ["mypy-boto3-storagegateway (>=1.34.0,<1.35.0)"] -sts = ["mypy-boto3-sts (>=1.34.0,<1.35.0)"] -supplychain = ["mypy-boto3-supplychain (>=1.34.0,<1.35.0)"] -support = ["mypy-boto3-support (>=1.34.0,<1.35.0)"] -support-app = ["mypy-boto3-support-app (>=1.34.0,<1.35.0)"] -swf = ["mypy-boto3-swf (>=1.34.0,<1.35.0)"] -synthetics = ["mypy-boto3-synthetics (>=1.34.0,<1.35.0)"] -taxsettings = ["mypy-boto3-taxsettings (>=1.34.0,<1.35.0)"] -textract = ["mypy-boto3-textract (>=1.34.0,<1.35.0)"] -timestream-influxdb = ["mypy-boto3-timestream-influxdb (>=1.34.0,<1.35.0)"] -timestream-query = ["mypy-boto3-timestream-query (>=1.34.0,<1.35.0)"] -timestream-write = ["mypy-boto3-timestream-write (>=1.34.0,<1.35.0)"] -tnb = ["mypy-boto3-tnb (>=1.34.0,<1.35.0)"] -transcribe = ["mypy-boto3-transcribe (>=1.34.0,<1.35.0)"] -transfer = ["mypy-boto3-transfer (>=1.34.0,<1.35.0)"] -translate = ["mypy-boto3-translate (>=1.34.0,<1.35.0)"] -trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)"] -verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)"] -voice-id = ["mypy-boto3-voice-id (>=1.34.0,<1.35.0)"] -vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)"] -waf = ["mypy-boto3-waf (>=1.34.0,<1.35.0)"] -waf-regional = ["mypy-boto3-waf-regional (>=1.34.0,<1.35.0)"] -wafv2 = ["mypy-boto3-wafv2 (>=1.34.0,<1.35.0)"] -wellarchitected = ["mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)"] -wisdom = ["mypy-boto3-wisdom (>=1.34.0,<1.35.0)"] -workdocs = ["mypy-boto3-workdocs (>=1.34.0,<1.35.0)"] -worklink = ["mypy-boto3-worklink (>=1.34.0,<1.35.0)"] -workmail = ["mypy-boto3-workmail (>=1.34.0,<1.35.0)"] -workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)"] -workspaces = ["mypy-boto3-workspaces (>=1.34.0,<1.35.0)"] -workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)"] -workspaces-web = ["mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)"] -xray = ["mypy-boto3-xray (>=1.34.0,<1.35.0)"] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.35.0,<1.36.0)"] +account = ["mypy-boto3-account (>=1.35.0,<1.36.0)"] +acm = ["mypy-boto3-acm (>=1.35.0,<1.36.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.35.0,<1.36.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.35.0,<1.36.0)", "mypy-boto3-account (>=1.35.0,<1.36.0)", "mypy-boto3-acm (>=1.35.0,<1.36.0)", "mypy-boto3-acm-pca (>=1.35.0,<1.36.0)", "mypy-boto3-amp (>=1.35.0,<1.36.0)", "mypy-boto3-amplify (>=1.35.0,<1.36.0)", "mypy-boto3-amplifybackend (>=1.35.0,<1.36.0)", "mypy-boto3-amplifyuibuilder (>=1.35.0,<1.36.0)", "mypy-boto3-apigateway (>=1.35.0,<1.36.0)", "mypy-boto3-apigatewaymanagementapi (>=1.35.0,<1.36.0)", "mypy-boto3-apigatewayv2 (>=1.35.0,<1.36.0)", "mypy-boto3-appconfig (>=1.35.0,<1.36.0)", "mypy-boto3-appconfigdata (>=1.35.0,<1.36.0)", "mypy-boto3-appfabric (>=1.35.0,<1.36.0)", "mypy-boto3-appflow (>=1.35.0,<1.36.0)", "mypy-boto3-appintegrations (>=1.35.0,<1.36.0)", "mypy-boto3-application-autoscaling (>=1.35.0,<1.36.0)", "mypy-boto3-application-insights (>=1.35.0,<1.36.0)", "mypy-boto3-application-signals (>=1.35.0,<1.36.0)", "mypy-boto3-applicationcostprofiler (>=1.35.0,<1.36.0)", "mypy-boto3-appmesh (>=1.35.0,<1.36.0)", "mypy-boto3-apprunner (>=1.35.0,<1.36.0)", "mypy-boto3-appstream (>=1.35.0,<1.36.0)", "mypy-boto3-appsync (>=1.35.0,<1.36.0)", "mypy-boto3-apptest (>=1.35.0,<1.36.0)", "mypy-boto3-arc-zonal-shift (>=1.35.0,<1.36.0)", "mypy-boto3-artifact (>=1.35.0,<1.36.0)", "mypy-boto3-athena (>=1.35.0,<1.36.0)", "mypy-boto3-auditmanager (>=1.35.0,<1.36.0)", "mypy-boto3-autoscaling (>=1.35.0,<1.36.0)", "mypy-boto3-autoscaling-plans (>=1.35.0,<1.36.0)", "mypy-boto3-b2bi (>=1.35.0,<1.36.0)", "mypy-boto3-backup (>=1.35.0,<1.36.0)", "mypy-boto3-backup-gateway (>=1.35.0,<1.36.0)", "mypy-boto3-batch (>=1.35.0,<1.36.0)", "mypy-boto3-bcm-data-exports (>=1.35.0,<1.36.0)", "mypy-boto3-bedrock (>=1.35.0,<1.36.0)", "mypy-boto3-bedrock-agent (>=1.35.0,<1.36.0)", "mypy-boto3-bedrock-agent-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-bedrock-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-billingconductor (>=1.35.0,<1.36.0)", "mypy-boto3-braket (>=1.35.0,<1.36.0)", "mypy-boto3-budgets (>=1.35.0,<1.36.0)", "mypy-boto3-ce (>=1.35.0,<1.36.0)", "mypy-boto3-chatbot (>=1.35.0,<1.36.0)", "mypy-boto3-chime (>=1.35.0,<1.36.0)", "mypy-boto3-chime-sdk-identity (>=1.35.0,<1.36.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.35.0,<1.36.0)", "mypy-boto3-chime-sdk-meetings (>=1.35.0,<1.36.0)", "mypy-boto3-chime-sdk-messaging (>=1.35.0,<1.36.0)", "mypy-boto3-chime-sdk-voice (>=1.35.0,<1.36.0)", "mypy-boto3-cleanrooms (>=1.35.0,<1.36.0)", "mypy-boto3-cleanroomsml (>=1.35.0,<1.36.0)", "mypy-boto3-cloud9 (>=1.35.0,<1.36.0)", "mypy-boto3-cloudcontrol (>=1.35.0,<1.36.0)", "mypy-boto3-clouddirectory (>=1.35.0,<1.36.0)", "mypy-boto3-cloudformation (>=1.35.0,<1.36.0)", "mypy-boto3-cloudfront (>=1.35.0,<1.36.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.35.0,<1.36.0)", "mypy-boto3-cloudhsm (>=1.35.0,<1.36.0)", "mypy-boto3-cloudhsmv2 (>=1.35.0,<1.36.0)", "mypy-boto3-cloudsearch (>=1.35.0,<1.36.0)", "mypy-boto3-cloudsearchdomain (>=1.35.0,<1.36.0)", "mypy-boto3-cloudtrail (>=1.35.0,<1.36.0)", "mypy-boto3-cloudtrail-data (>=1.35.0,<1.36.0)", "mypy-boto3-cloudwatch (>=1.35.0,<1.36.0)", "mypy-boto3-codeartifact (>=1.35.0,<1.36.0)", "mypy-boto3-codebuild (>=1.35.0,<1.36.0)", "mypy-boto3-codecatalyst (>=1.35.0,<1.36.0)", "mypy-boto3-codecommit (>=1.35.0,<1.36.0)", "mypy-boto3-codeconnections (>=1.35.0,<1.36.0)", "mypy-boto3-codedeploy (>=1.35.0,<1.36.0)", "mypy-boto3-codeguru-reviewer (>=1.35.0,<1.36.0)", "mypy-boto3-codeguru-security (>=1.35.0,<1.36.0)", "mypy-boto3-codeguruprofiler (>=1.35.0,<1.36.0)", "mypy-boto3-codepipeline (>=1.35.0,<1.36.0)", "mypy-boto3-codestar (>=1.35.0,<1.36.0)", "mypy-boto3-codestar-connections (>=1.35.0,<1.36.0)", "mypy-boto3-codestar-notifications (>=1.35.0,<1.36.0)", "mypy-boto3-cognito-identity (>=1.35.0,<1.36.0)", "mypy-boto3-cognito-idp (>=1.35.0,<1.36.0)", "mypy-boto3-cognito-sync (>=1.35.0,<1.36.0)", "mypy-boto3-comprehend (>=1.35.0,<1.36.0)", "mypy-boto3-comprehendmedical (>=1.35.0,<1.36.0)", "mypy-boto3-compute-optimizer (>=1.35.0,<1.36.0)", "mypy-boto3-config (>=1.35.0,<1.36.0)", "mypy-boto3-connect (>=1.35.0,<1.36.0)", "mypy-boto3-connect-contact-lens (>=1.35.0,<1.36.0)", "mypy-boto3-connectcampaigns (>=1.35.0,<1.36.0)", "mypy-boto3-connectcases (>=1.35.0,<1.36.0)", "mypy-boto3-connectparticipant (>=1.35.0,<1.36.0)", "mypy-boto3-controlcatalog (>=1.35.0,<1.36.0)", "mypy-boto3-controltower (>=1.35.0,<1.36.0)", "mypy-boto3-cost-optimization-hub (>=1.35.0,<1.36.0)", "mypy-boto3-cur (>=1.35.0,<1.36.0)", "mypy-boto3-customer-profiles (>=1.35.0,<1.36.0)", "mypy-boto3-databrew (>=1.35.0,<1.36.0)", "mypy-boto3-dataexchange (>=1.35.0,<1.36.0)", "mypy-boto3-datapipeline (>=1.35.0,<1.36.0)", "mypy-boto3-datasync (>=1.35.0,<1.36.0)", "mypy-boto3-datazone (>=1.35.0,<1.36.0)", "mypy-boto3-dax (>=1.35.0,<1.36.0)", "mypy-boto3-deadline (>=1.35.0,<1.36.0)", "mypy-boto3-detective (>=1.35.0,<1.36.0)", "mypy-boto3-devicefarm (>=1.35.0,<1.36.0)", "mypy-boto3-devops-guru (>=1.35.0,<1.36.0)", "mypy-boto3-directconnect (>=1.35.0,<1.36.0)", "mypy-boto3-discovery (>=1.35.0,<1.36.0)", "mypy-boto3-dlm (>=1.35.0,<1.36.0)", "mypy-boto3-dms (>=1.35.0,<1.36.0)", "mypy-boto3-docdb (>=1.35.0,<1.36.0)", "mypy-boto3-docdb-elastic (>=1.35.0,<1.36.0)", "mypy-boto3-drs (>=1.35.0,<1.36.0)", "mypy-boto3-ds (>=1.35.0,<1.36.0)", "mypy-boto3-dynamodb (>=1.35.0,<1.36.0)", "mypy-boto3-dynamodbstreams (>=1.35.0,<1.36.0)", "mypy-boto3-ebs (>=1.35.0,<1.36.0)", "mypy-boto3-ec2 (>=1.35.0,<1.36.0)", "mypy-boto3-ec2-instance-connect (>=1.35.0,<1.36.0)", "mypy-boto3-ecr (>=1.35.0,<1.36.0)", "mypy-boto3-ecr-public (>=1.35.0,<1.36.0)", "mypy-boto3-ecs (>=1.35.0,<1.36.0)", "mypy-boto3-efs (>=1.35.0,<1.36.0)", "mypy-boto3-eks (>=1.35.0,<1.36.0)", "mypy-boto3-eks-auth (>=1.35.0,<1.36.0)", "mypy-boto3-elastic-inference (>=1.35.0,<1.36.0)", "mypy-boto3-elasticache (>=1.35.0,<1.36.0)", "mypy-boto3-elasticbeanstalk (>=1.35.0,<1.36.0)", "mypy-boto3-elastictranscoder (>=1.35.0,<1.36.0)", "mypy-boto3-elb (>=1.35.0,<1.36.0)", "mypy-boto3-elbv2 (>=1.35.0,<1.36.0)", "mypy-boto3-emr (>=1.35.0,<1.36.0)", "mypy-boto3-emr-containers (>=1.35.0,<1.36.0)", "mypy-boto3-emr-serverless (>=1.35.0,<1.36.0)", "mypy-boto3-entityresolution (>=1.35.0,<1.36.0)", "mypy-boto3-es (>=1.35.0,<1.36.0)", "mypy-boto3-events (>=1.35.0,<1.36.0)", "mypy-boto3-evidently (>=1.35.0,<1.36.0)", "mypy-boto3-finspace (>=1.35.0,<1.36.0)", "mypy-boto3-finspace-data (>=1.35.0,<1.36.0)", "mypy-boto3-firehose (>=1.35.0,<1.36.0)", "mypy-boto3-fis (>=1.35.0,<1.36.0)", "mypy-boto3-fms (>=1.35.0,<1.36.0)", "mypy-boto3-forecast (>=1.35.0,<1.36.0)", "mypy-boto3-forecastquery (>=1.35.0,<1.36.0)", "mypy-boto3-frauddetector (>=1.35.0,<1.36.0)", "mypy-boto3-freetier (>=1.35.0,<1.36.0)", "mypy-boto3-fsx (>=1.35.0,<1.36.0)", "mypy-boto3-gamelift (>=1.35.0,<1.36.0)", "mypy-boto3-glacier (>=1.35.0,<1.36.0)", "mypy-boto3-globalaccelerator (>=1.35.0,<1.36.0)", "mypy-boto3-glue (>=1.35.0,<1.36.0)", "mypy-boto3-grafana (>=1.35.0,<1.36.0)", "mypy-boto3-greengrass (>=1.35.0,<1.36.0)", "mypy-boto3-greengrassv2 (>=1.35.0,<1.36.0)", "mypy-boto3-groundstation (>=1.35.0,<1.36.0)", "mypy-boto3-guardduty (>=1.35.0,<1.36.0)", "mypy-boto3-health (>=1.35.0,<1.36.0)", "mypy-boto3-healthlake (>=1.35.0,<1.36.0)", "mypy-boto3-iam (>=1.35.0,<1.36.0)", "mypy-boto3-identitystore (>=1.35.0,<1.36.0)", "mypy-boto3-imagebuilder (>=1.35.0,<1.36.0)", "mypy-boto3-importexport (>=1.35.0,<1.36.0)", "mypy-boto3-inspector (>=1.35.0,<1.36.0)", "mypy-boto3-inspector-scan (>=1.35.0,<1.36.0)", "mypy-boto3-inspector2 (>=1.35.0,<1.36.0)", "mypy-boto3-internetmonitor (>=1.35.0,<1.36.0)", "mypy-boto3-iot (>=1.35.0,<1.36.0)", "mypy-boto3-iot-data (>=1.35.0,<1.36.0)", "mypy-boto3-iot-jobs-data (>=1.35.0,<1.36.0)", "mypy-boto3-iot1click-devices (>=1.35.0,<1.36.0)", "mypy-boto3-iot1click-projects (>=1.35.0,<1.36.0)", "mypy-boto3-iotanalytics (>=1.35.0,<1.36.0)", "mypy-boto3-iotdeviceadvisor (>=1.35.0,<1.36.0)", "mypy-boto3-iotevents (>=1.35.0,<1.36.0)", "mypy-boto3-iotevents-data (>=1.35.0,<1.36.0)", "mypy-boto3-iotfleethub (>=1.35.0,<1.36.0)", "mypy-boto3-iotfleetwise (>=1.35.0,<1.36.0)", "mypy-boto3-iotsecuretunneling (>=1.35.0,<1.36.0)", "mypy-boto3-iotsitewise (>=1.35.0,<1.36.0)", "mypy-boto3-iotthingsgraph (>=1.35.0,<1.36.0)", "mypy-boto3-iottwinmaker (>=1.35.0,<1.36.0)", "mypy-boto3-iotwireless (>=1.35.0,<1.36.0)", "mypy-boto3-ivs (>=1.35.0,<1.36.0)", "mypy-boto3-ivs-realtime (>=1.35.0,<1.36.0)", "mypy-boto3-ivschat (>=1.35.0,<1.36.0)", "mypy-boto3-kafka (>=1.35.0,<1.36.0)", "mypy-boto3-kafkaconnect (>=1.35.0,<1.36.0)", "mypy-boto3-kendra (>=1.35.0,<1.36.0)", "mypy-boto3-kendra-ranking (>=1.35.0,<1.36.0)", "mypy-boto3-keyspaces (>=1.35.0,<1.36.0)", "mypy-boto3-kinesis (>=1.35.0,<1.36.0)", "mypy-boto3-kinesis-video-archived-media (>=1.35.0,<1.36.0)", "mypy-boto3-kinesis-video-media (>=1.35.0,<1.36.0)", "mypy-boto3-kinesis-video-signaling (>=1.35.0,<1.36.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.35.0,<1.36.0)", "mypy-boto3-kinesisanalytics (>=1.35.0,<1.36.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.35.0,<1.36.0)", "mypy-boto3-kinesisvideo (>=1.35.0,<1.36.0)", "mypy-boto3-kms (>=1.35.0,<1.36.0)", "mypy-boto3-lakeformation (>=1.35.0,<1.36.0)", "mypy-boto3-lambda (>=1.35.0,<1.36.0)", "mypy-boto3-launch-wizard (>=1.35.0,<1.36.0)", "mypy-boto3-lex-models (>=1.35.0,<1.36.0)", "mypy-boto3-lex-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-lexv2-models (>=1.35.0,<1.36.0)", "mypy-boto3-lexv2-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-license-manager (>=1.35.0,<1.36.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.35.0,<1.36.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.35.0,<1.36.0)", "mypy-boto3-lightsail (>=1.35.0,<1.36.0)", "mypy-boto3-location (>=1.35.0,<1.36.0)", "mypy-boto3-logs (>=1.35.0,<1.36.0)", "mypy-boto3-lookoutequipment (>=1.35.0,<1.36.0)", "mypy-boto3-lookoutmetrics (>=1.35.0,<1.36.0)", "mypy-boto3-lookoutvision (>=1.35.0,<1.36.0)", "mypy-boto3-m2 (>=1.35.0,<1.36.0)", "mypy-boto3-machinelearning (>=1.35.0,<1.36.0)", "mypy-boto3-macie2 (>=1.35.0,<1.36.0)", "mypy-boto3-mailmanager (>=1.35.0,<1.36.0)", "mypy-boto3-managedblockchain (>=1.35.0,<1.36.0)", "mypy-boto3-managedblockchain-query (>=1.35.0,<1.36.0)", "mypy-boto3-marketplace-agreement (>=1.35.0,<1.36.0)", "mypy-boto3-marketplace-catalog (>=1.35.0,<1.36.0)", "mypy-boto3-marketplace-deployment (>=1.35.0,<1.36.0)", "mypy-boto3-marketplace-entitlement (>=1.35.0,<1.36.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.35.0,<1.36.0)", "mypy-boto3-mediaconnect (>=1.35.0,<1.36.0)", "mypy-boto3-mediaconvert (>=1.35.0,<1.36.0)", "mypy-boto3-medialive (>=1.35.0,<1.36.0)", "mypy-boto3-mediapackage (>=1.35.0,<1.36.0)", "mypy-boto3-mediapackage-vod (>=1.35.0,<1.36.0)", "mypy-boto3-mediapackagev2 (>=1.35.0,<1.36.0)", "mypy-boto3-mediastore (>=1.35.0,<1.36.0)", "mypy-boto3-mediastore-data (>=1.35.0,<1.36.0)", "mypy-boto3-mediatailor (>=1.35.0,<1.36.0)", "mypy-boto3-medical-imaging (>=1.35.0,<1.36.0)", "mypy-boto3-memorydb (>=1.35.0,<1.36.0)", "mypy-boto3-meteringmarketplace (>=1.35.0,<1.36.0)", "mypy-boto3-mgh (>=1.35.0,<1.36.0)", "mypy-boto3-mgn (>=1.35.0,<1.36.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.35.0,<1.36.0)", "mypy-boto3-migrationhub-config (>=1.35.0,<1.36.0)", "mypy-boto3-migrationhuborchestrator (>=1.35.0,<1.36.0)", "mypy-boto3-migrationhubstrategy (>=1.35.0,<1.36.0)", "mypy-boto3-mq (>=1.35.0,<1.36.0)", "mypy-boto3-mturk (>=1.35.0,<1.36.0)", "mypy-boto3-mwaa (>=1.35.0,<1.36.0)", "mypy-boto3-neptune (>=1.35.0,<1.36.0)", "mypy-boto3-neptune-graph (>=1.35.0,<1.36.0)", "mypy-boto3-neptunedata (>=1.35.0,<1.36.0)", "mypy-boto3-network-firewall (>=1.35.0,<1.36.0)", "mypy-boto3-networkmanager (>=1.35.0,<1.36.0)", "mypy-boto3-networkmonitor (>=1.35.0,<1.36.0)", "mypy-boto3-nimble (>=1.35.0,<1.36.0)", "mypy-boto3-oam (>=1.35.0,<1.36.0)", "mypy-boto3-omics (>=1.35.0,<1.36.0)", "mypy-boto3-opensearch (>=1.35.0,<1.36.0)", "mypy-boto3-opensearchserverless (>=1.35.0,<1.36.0)", "mypy-boto3-opsworks (>=1.35.0,<1.36.0)", "mypy-boto3-opsworkscm (>=1.35.0,<1.36.0)", "mypy-boto3-organizations (>=1.35.0,<1.36.0)", "mypy-boto3-osis (>=1.35.0,<1.36.0)", "mypy-boto3-outposts (>=1.35.0,<1.36.0)", "mypy-boto3-panorama (>=1.35.0,<1.36.0)", "mypy-boto3-payment-cryptography (>=1.35.0,<1.36.0)", "mypy-boto3-payment-cryptography-data (>=1.35.0,<1.36.0)", "mypy-boto3-pca-connector-ad (>=1.35.0,<1.36.0)", "mypy-boto3-pca-connector-scep (>=1.35.0,<1.36.0)", "mypy-boto3-personalize (>=1.35.0,<1.36.0)", "mypy-boto3-personalize-events (>=1.35.0,<1.36.0)", "mypy-boto3-personalize-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-pi (>=1.35.0,<1.36.0)", "mypy-boto3-pinpoint (>=1.35.0,<1.36.0)", "mypy-boto3-pinpoint-email (>=1.35.0,<1.36.0)", "mypy-boto3-pinpoint-sms-voice (>=1.35.0,<1.36.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.35.0,<1.36.0)", "mypy-boto3-pipes (>=1.35.0,<1.36.0)", "mypy-boto3-polly (>=1.35.0,<1.36.0)", "mypy-boto3-pricing (>=1.35.0,<1.36.0)", "mypy-boto3-privatenetworks (>=1.35.0,<1.36.0)", "mypy-boto3-proton (>=1.35.0,<1.36.0)", "mypy-boto3-qapps (>=1.35.0,<1.36.0)", "mypy-boto3-qbusiness (>=1.35.0,<1.36.0)", "mypy-boto3-qconnect (>=1.35.0,<1.36.0)", "mypy-boto3-qldb (>=1.35.0,<1.36.0)", "mypy-boto3-qldb-session (>=1.35.0,<1.36.0)", "mypy-boto3-quicksight (>=1.35.0,<1.36.0)", "mypy-boto3-ram (>=1.35.0,<1.36.0)", "mypy-boto3-rbin (>=1.35.0,<1.36.0)", "mypy-boto3-rds (>=1.35.0,<1.36.0)", "mypy-boto3-rds-data (>=1.35.0,<1.36.0)", "mypy-boto3-redshift (>=1.35.0,<1.36.0)", "mypy-boto3-redshift-data (>=1.35.0,<1.36.0)", "mypy-boto3-redshift-serverless (>=1.35.0,<1.36.0)", "mypy-boto3-rekognition (>=1.35.0,<1.36.0)", "mypy-boto3-repostspace (>=1.35.0,<1.36.0)", "mypy-boto3-resiliencehub (>=1.35.0,<1.36.0)", "mypy-boto3-resource-explorer-2 (>=1.35.0,<1.36.0)", "mypy-boto3-resource-groups (>=1.35.0,<1.36.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.35.0,<1.36.0)", "mypy-boto3-robomaker (>=1.35.0,<1.36.0)", "mypy-boto3-rolesanywhere (>=1.35.0,<1.36.0)", "mypy-boto3-route53 (>=1.35.0,<1.36.0)", "mypy-boto3-route53-recovery-cluster (>=1.35.0,<1.36.0)", "mypy-boto3-route53-recovery-control-config (>=1.35.0,<1.36.0)", "mypy-boto3-route53-recovery-readiness (>=1.35.0,<1.36.0)", "mypy-boto3-route53domains (>=1.35.0,<1.36.0)", "mypy-boto3-route53profiles (>=1.35.0,<1.36.0)", "mypy-boto3-route53resolver (>=1.35.0,<1.36.0)", "mypy-boto3-rum (>=1.35.0,<1.36.0)", "mypy-boto3-s3 (>=1.35.0,<1.36.0)", "mypy-boto3-s3control (>=1.35.0,<1.36.0)", "mypy-boto3-s3outposts (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-edge (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-geospatial (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-metrics (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-savingsplans (>=1.35.0,<1.36.0)", "mypy-boto3-scheduler (>=1.35.0,<1.36.0)", "mypy-boto3-schemas (>=1.35.0,<1.36.0)", "mypy-boto3-sdb (>=1.35.0,<1.36.0)", "mypy-boto3-secretsmanager (>=1.35.0,<1.36.0)", "mypy-boto3-securityhub (>=1.35.0,<1.36.0)", "mypy-boto3-securitylake (>=1.35.0,<1.36.0)", "mypy-boto3-serverlessrepo (>=1.35.0,<1.36.0)", "mypy-boto3-service-quotas (>=1.35.0,<1.36.0)", "mypy-boto3-servicecatalog (>=1.35.0,<1.36.0)", "mypy-boto3-servicecatalog-appregistry (>=1.35.0,<1.36.0)", "mypy-boto3-servicediscovery (>=1.35.0,<1.36.0)", "mypy-boto3-ses (>=1.35.0,<1.36.0)", "mypy-boto3-sesv2 (>=1.35.0,<1.36.0)", "mypy-boto3-shield (>=1.35.0,<1.36.0)", "mypy-boto3-signer (>=1.35.0,<1.36.0)", "mypy-boto3-simspaceweaver (>=1.35.0,<1.36.0)", "mypy-boto3-sms (>=1.35.0,<1.36.0)", "mypy-boto3-sms-voice (>=1.35.0,<1.36.0)", "mypy-boto3-snow-device-management (>=1.35.0,<1.36.0)", "mypy-boto3-snowball (>=1.35.0,<1.36.0)", "mypy-boto3-sns (>=1.35.0,<1.36.0)", "mypy-boto3-sqs (>=1.35.0,<1.36.0)", "mypy-boto3-ssm (>=1.35.0,<1.36.0)", "mypy-boto3-ssm-contacts (>=1.35.0,<1.36.0)", "mypy-boto3-ssm-incidents (>=1.35.0,<1.36.0)", "mypy-boto3-ssm-quicksetup (>=1.35.0,<1.36.0)", "mypy-boto3-ssm-sap (>=1.35.0,<1.36.0)", "mypy-boto3-sso (>=1.35.0,<1.36.0)", "mypy-boto3-sso-admin (>=1.35.0,<1.36.0)", "mypy-boto3-sso-oidc (>=1.35.0,<1.36.0)", "mypy-boto3-stepfunctions (>=1.35.0,<1.36.0)", "mypy-boto3-storagegateway (>=1.35.0,<1.36.0)", "mypy-boto3-sts (>=1.35.0,<1.36.0)", "mypy-boto3-supplychain (>=1.35.0,<1.36.0)", "mypy-boto3-support (>=1.35.0,<1.36.0)", "mypy-boto3-support-app (>=1.35.0,<1.36.0)", "mypy-boto3-swf (>=1.35.0,<1.36.0)", "mypy-boto3-synthetics (>=1.35.0,<1.36.0)", "mypy-boto3-taxsettings (>=1.35.0,<1.36.0)", "mypy-boto3-textract (>=1.35.0,<1.36.0)", "mypy-boto3-timestream-influxdb (>=1.35.0,<1.36.0)", "mypy-boto3-timestream-query (>=1.35.0,<1.36.0)", "mypy-boto3-timestream-write (>=1.35.0,<1.36.0)", "mypy-boto3-tnb (>=1.35.0,<1.36.0)", "mypy-boto3-transcribe (>=1.35.0,<1.36.0)", "mypy-boto3-transfer (>=1.35.0,<1.36.0)", "mypy-boto3-translate (>=1.35.0,<1.36.0)", "mypy-boto3-trustedadvisor (>=1.35.0,<1.36.0)", "mypy-boto3-verifiedpermissions (>=1.35.0,<1.36.0)", "mypy-boto3-voice-id (>=1.35.0,<1.36.0)", "mypy-boto3-vpc-lattice (>=1.35.0,<1.36.0)", "mypy-boto3-waf (>=1.35.0,<1.36.0)", "mypy-boto3-waf-regional (>=1.35.0,<1.36.0)", "mypy-boto3-wafv2 (>=1.35.0,<1.36.0)", "mypy-boto3-wellarchitected (>=1.35.0,<1.36.0)", "mypy-boto3-wisdom (>=1.35.0,<1.36.0)", "mypy-boto3-workdocs (>=1.35.0,<1.36.0)", "mypy-boto3-worklink (>=1.35.0,<1.36.0)", "mypy-boto3-workmail (>=1.35.0,<1.36.0)", "mypy-boto3-workmailmessageflow (>=1.35.0,<1.36.0)", "mypy-boto3-workspaces (>=1.35.0,<1.36.0)", "mypy-boto3-workspaces-thin-client (>=1.35.0,<1.36.0)", "mypy-boto3-workspaces-web (>=1.35.0,<1.36.0)", "mypy-boto3-xray (>=1.35.0,<1.36.0)"] +amp = ["mypy-boto3-amp (>=1.35.0,<1.36.0)"] +amplify = ["mypy-boto3-amplify (>=1.35.0,<1.36.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.35.0,<1.36.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.35.0,<1.36.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.35.0,<1.36.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.35.0,<1.36.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.35.0,<1.36.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.35.0,<1.36.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.35.0,<1.36.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.35.0,<1.36.0)"] +appflow = ["mypy-boto3-appflow (>=1.35.0,<1.36.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.35.0,<1.36.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.35.0,<1.36.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.35.0,<1.36.0)"] +application-signals = ["mypy-boto3-application-signals (>=1.35.0,<1.36.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.35.0,<1.36.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.35.0,<1.36.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.35.0,<1.36.0)"] +appstream = ["mypy-boto3-appstream (>=1.35.0,<1.36.0)"] +appsync = ["mypy-boto3-appsync (>=1.35.0,<1.36.0)"] +apptest = ["mypy-boto3-apptest (>=1.35.0,<1.36.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.35.0,<1.36.0)"] +artifact = ["mypy-boto3-artifact (>=1.35.0,<1.36.0)"] +athena = ["mypy-boto3-athena (>=1.35.0,<1.36.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.35.0,<1.36.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.35.0,<1.36.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.35.0,<1.36.0)"] +b2bi = ["mypy-boto3-b2bi (>=1.35.0,<1.36.0)"] +backup = ["mypy-boto3-backup (>=1.35.0,<1.36.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.35.0,<1.36.0)"] +batch = ["mypy-boto3-batch (>=1.35.0,<1.36.0)"] +bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.35.0,<1.36.0)"] +bedrock = ["mypy-boto3-bedrock (>=1.35.0,<1.36.0)"] +bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.35.0,<1.36.0)"] +bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.35.0,<1.36.0)"] +bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.35.0,<1.36.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.35.0,<1.36.0)"] +boto3 = ["boto3 (==1.35.2)", "botocore (==1.35.2)"] +braket = ["mypy-boto3-braket (>=1.35.0,<1.36.0)"] +budgets = ["mypy-boto3-budgets (>=1.35.0,<1.36.0)"] +ce = ["mypy-boto3-ce (>=1.35.0,<1.36.0)"] +chatbot = ["mypy-boto3-chatbot (>=1.35.0,<1.36.0)"] +chime = ["mypy-boto3-chime (>=1.35.0,<1.36.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.35.0,<1.36.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.35.0,<1.36.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.35.0,<1.36.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.35.0,<1.36.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.35.0,<1.36.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.35.0,<1.36.0)"] +cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.35.0,<1.36.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.35.0,<1.36.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.35.0,<1.36.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.35.0,<1.36.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.35.0,<1.36.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.35.0,<1.36.0)"] +cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.35.0,<1.36.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.35.0,<1.36.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.35.0,<1.36.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.35.0,<1.36.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.35.0,<1.36.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.35.0,<1.36.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.35.0,<1.36.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.35.0,<1.36.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.35.0,<1.36.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.35.0,<1.36.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.35.0,<1.36.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.35.0,<1.36.0)"] +codeconnections = ["mypy-boto3-codeconnections (>=1.35.0,<1.36.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.35.0,<1.36.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.35.0,<1.36.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.35.0,<1.36.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.35.0,<1.36.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.35.0,<1.36.0)"] +codestar = ["mypy-boto3-codestar (>=1.35.0,<1.36.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.35.0,<1.36.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.35.0,<1.36.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.35.0,<1.36.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.35.0,<1.36.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.35.0,<1.36.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.35.0,<1.36.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.35.0,<1.36.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.35.0,<1.36.0)"] +config = ["mypy-boto3-config (>=1.35.0,<1.36.0)"] +connect = ["mypy-boto3-connect (>=1.35.0,<1.36.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.35.0,<1.36.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.35.0,<1.36.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.35.0,<1.36.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.35.0,<1.36.0)"] +controlcatalog = ["mypy-boto3-controlcatalog (>=1.35.0,<1.36.0)"] +controltower = ["mypy-boto3-controltower (>=1.35.0,<1.36.0)"] +cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.35.0,<1.36.0)"] +cur = ["mypy-boto3-cur (>=1.35.0,<1.36.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.35.0,<1.36.0)"] +databrew = ["mypy-boto3-databrew (>=1.35.0,<1.36.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.35.0,<1.36.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.35.0,<1.36.0)"] +datasync = ["mypy-boto3-datasync (>=1.35.0,<1.36.0)"] +datazone = ["mypy-boto3-datazone (>=1.35.0,<1.36.0)"] +dax = ["mypy-boto3-dax (>=1.35.0,<1.36.0)"] +deadline = ["mypy-boto3-deadline (>=1.35.0,<1.36.0)"] +detective = ["mypy-boto3-detective (>=1.35.0,<1.36.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.35.0,<1.36.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.35.0,<1.36.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.35.0,<1.36.0)"] +discovery = ["mypy-boto3-discovery (>=1.35.0,<1.36.0)"] +dlm = ["mypy-boto3-dlm (>=1.35.0,<1.36.0)"] +dms = ["mypy-boto3-dms (>=1.35.0,<1.36.0)"] +docdb = ["mypy-boto3-docdb (>=1.35.0,<1.36.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.35.0,<1.36.0)"] +drs = ["mypy-boto3-drs (>=1.35.0,<1.36.0)"] +ds = ["mypy-boto3-ds (>=1.35.0,<1.36.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.35.0,<1.36.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.35.0,<1.36.0)"] +ebs = ["mypy-boto3-ebs (>=1.35.0,<1.36.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.35.0,<1.36.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.35.0,<1.36.0)"] +ecr = ["mypy-boto3-ecr (>=1.35.0,<1.36.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.35.0,<1.36.0)"] +ecs = ["mypy-boto3-ecs (>=1.35.0,<1.36.0)"] +efs = ["mypy-boto3-efs (>=1.35.0,<1.36.0)"] +eks = ["mypy-boto3-eks (>=1.35.0,<1.36.0)"] +eks-auth = ["mypy-boto3-eks-auth (>=1.35.0,<1.36.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.35.0,<1.36.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.35.0,<1.36.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.35.0,<1.36.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.35.0,<1.36.0)"] +elb = ["mypy-boto3-elb (>=1.35.0,<1.36.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.35.0,<1.36.0)"] +emr = ["mypy-boto3-emr (>=1.35.0,<1.36.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.35.0,<1.36.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.35.0,<1.36.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.35.0,<1.36.0)"] +es = ["mypy-boto3-es (>=1.35.0,<1.36.0)"] +essential = ["mypy-boto3-cloudformation (>=1.35.0,<1.36.0)", "mypy-boto3-dynamodb (>=1.35.0,<1.36.0)", "mypy-boto3-ec2 (>=1.35.0,<1.36.0)", "mypy-boto3-lambda (>=1.35.0,<1.36.0)", "mypy-boto3-rds (>=1.35.0,<1.36.0)", "mypy-boto3-s3 (>=1.35.0,<1.36.0)", "mypy-boto3-sqs (>=1.35.0,<1.36.0)"] +events = ["mypy-boto3-events (>=1.35.0,<1.36.0)"] +evidently = ["mypy-boto3-evidently (>=1.35.0,<1.36.0)"] +finspace = ["mypy-boto3-finspace (>=1.35.0,<1.36.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.35.0,<1.36.0)"] +firehose = ["mypy-boto3-firehose (>=1.35.0,<1.36.0)"] +fis = ["mypy-boto3-fis (>=1.35.0,<1.36.0)"] +fms = ["mypy-boto3-fms (>=1.35.0,<1.36.0)"] +forecast = ["mypy-boto3-forecast (>=1.35.0,<1.36.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.35.0,<1.36.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.35.0,<1.36.0)"] +freetier = ["mypy-boto3-freetier (>=1.35.0,<1.36.0)"] +fsx = ["mypy-boto3-fsx (>=1.35.0,<1.36.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.35.0,<1.36.0)"] +glacier = ["mypy-boto3-glacier (>=1.35.0,<1.36.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.35.0,<1.36.0)"] +glue = ["mypy-boto3-glue (>=1.35.0,<1.36.0)"] +grafana = ["mypy-boto3-grafana (>=1.35.0,<1.36.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.35.0,<1.36.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.35.0,<1.36.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.35.0,<1.36.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.35.0,<1.36.0)"] +health = ["mypy-boto3-health (>=1.35.0,<1.36.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.35.0,<1.36.0)"] +iam = ["mypy-boto3-iam (>=1.35.0,<1.36.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.35.0,<1.36.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.35.0,<1.36.0)"] +importexport = ["mypy-boto3-importexport (>=1.35.0,<1.36.0)"] +inspector = ["mypy-boto3-inspector (>=1.35.0,<1.36.0)"] +inspector-scan = ["mypy-boto3-inspector-scan (>=1.35.0,<1.36.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.35.0,<1.36.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.35.0,<1.36.0)"] +iot = ["mypy-boto3-iot (>=1.35.0,<1.36.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.35.0,<1.36.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.35.0,<1.36.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.35.0,<1.36.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.35.0,<1.36.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.35.0,<1.36.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.35.0,<1.36.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.35.0,<1.36.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.35.0,<1.36.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.35.0,<1.36.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.35.0,<1.36.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.35.0,<1.36.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.35.0,<1.36.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.35.0,<1.36.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.35.0,<1.36.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.35.0,<1.36.0)"] +ivs = ["mypy-boto3-ivs (>=1.35.0,<1.36.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.35.0,<1.36.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.35.0,<1.36.0)"] +kafka = ["mypy-boto3-kafka (>=1.35.0,<1.36.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.35.0,<1.36.0)"] +kendra = ["mypy-boto3-kendra (>=1.35.0,<1.36.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.35.0,<1.36.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.35.0,<1.36.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.35.0,<1.36.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.35.0,<1.36.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.35.0,<1.36.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.35.0,<1.36.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.35.0,<1.36.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.35.0,<1.36.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.35.0,<1.36.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.35.0,<1.36.0)"] +kms = ["mypy-boto3-kms (>=1.35.0,<1.36.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.35.0,<1.36.0)"] +lambda = ["mypy-boto3-lambda (>=1.35.0,<1.36.0)"] +launch-wizard = ["mypy-boto3-launch-wizard (>=1.35.0,<1.36.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.35.0,<1.36.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.35.0,<1.36.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.35.0,<1.36.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.35.0,<1.36.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.35.0,<1.36.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.35.0,<1.36.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.35.0,<1.36.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.35.0,<1.36.0)"] +location = ["mypy-boto3-location (>=1.35.0,<1.36.0)"] +logs = ["mypy-boto3-logs (>=1.35.0,<1.36.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.35.0,<1.36.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.35.0,<1.36.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.35.0,<1.36.0)"] +m2 = ["mypy-boto3-m2 (>=1.35.0,<1.36.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.35.0,<1.36.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.35.0,<1.36.0)"] +mailmanager = ["mypy-boto3-mailmanager (>=1.35.0,<1.36.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.35.0,<1.36.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.35.0,<1.36.0)"] +marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.35.0,<1.36.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.35.0,<1.36.0)"] +marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.35.0,<1.36.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.35.0,<1.36.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.35.0,<1.36.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.35.0,<1.36.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.35.0,<1.36.0)"] +medialive = ["mypy-boto3-medialive (>=1.35.0,<1.36.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.35.0,<1.36.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.35.0,<1.36.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.35.0,<1.36.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.35.0,<1.36.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.35.0,<1.36.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.35.0,<1.36.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.35.0,<1.36.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.35.0,<1.36.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.35.0,<1.36.0)"] +mgh = ["mypy-boto3-mgh (>=1.35.0,<1.36.0)"] +mgn = ["mypy-boto3-mgn (>=1.35.0,<1.36.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.35.0,<1.36.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.35.0,<1.36.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.35.0,<1.36.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.35.0,<1.36.0)"] +mq = ["mypy-boto3-mq (>=1.35.0,<1.36.0)"] +mturk = ["mypy-boto3-mturk (>=1.35.0,<1.36.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.35.0,<1.36.0)"] +neptune = ["mypy-boto3-neptune (>=1.35.0,<1.36.0)"] +neptune-graph = ["mypy-boto3-neptune-graph (>=1.35.0,<1.36.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.35.0,<1.36.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.35.0,<1.36.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.35.0,<1.36.0)"] +networkmonitor = ["mypy-boto3-networkmonitor (>=1.35.0,<1.36.0)"] +nimble = ["mypy-boto3-nimble (>=1.35.0,<1.36.0)"] +oam = ["mypy-boto3-oam (>=1.35.0,<1.36.0)"] +omics = ["mypy-boto3-omics (>=1.35.0,<1.36.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.35.0,<1.36.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.35.0,<1.36.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.35.0,<1.36.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.35.0,<1.36.0)"] +organizations = ["mypy-boto3-organizations (>=1.35.0,<1.36.0)"] +osis = ["mypy-boto3-osis (>=1.35.0,<1.36.0)"] +outposts = ["mypy-boto3-outposts (>=1.35.0,<1.36.0)"] +panorama = ["mypy-boto3-panorama (>=1.35.0,<1.36.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.35.0,<1.36.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.35.0,<1.36.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.35.0,<1.36.0)"] +pca-connector-scep = ["mypy-boto3-pca-connector-scep (>=1.35.0,<1.36.0)"] +personalize = ["mypy-boto3-personalize (>=1.35.0,<1.36.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.35.0,<1.36.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.35.0,<1.36.0)"] +pi = ["mypy-boto3-pi (>=1.35.0,<1.36.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.35.0,<1.36.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.35.0,<1.36.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.35.0,<1.36.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.35.0,<1.36.0)"] +pipes = ["mypy-boto3-pipes (>=1.35.0,<1.36.0)"] +polly = ["mypy-boto3-polly (>=1.35.0,<1.36.0)"] +pricing = ["mypy-boto3-pricing (>=1.35.0,<1.36.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.35.0,<1.36.0)"] +proton = ["mypy-boto3-proton (>=1.35.0,<1.36.0)"] +qapps = ["mypy-boto3-qapps (>=1.35.0,<1.36.0)"] +qbusiness = ["mypy-boto3-qbusiness (>=1.35.0,<1.36.0)"] +qconnect = ["mypy-boto3-qconnect (>=1.35.0,<1.36.0)"] +qldb = ["mypy-boto3-qldb (>=1.35.0,<1.36.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.35.0,<1.36.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.35.0,<1.36.0)"] +ram = ["mypy-boto3-ram (>=1.35.0,<1.36.0)"] +rbin = ["mypy-boto3-rbin (>=1.35.0,<1.36.0)"] +rds = ["mypy-boto3-rds (>=1.35.0,<1.36.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.35.0,<1.36.0)"] +redshift = ["mypy-boto3-redshift (>=1.35.0,<1.36.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.35.0,<1.36.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.35.0,<1.36.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.35.0,<1.36.0)"] +repostspace = ["mypy-boto3-repostspace (>=1.35.0,<1.36.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.35.0,<1.36.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.35.0,<1.36.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.35.0,<1.36.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.35.0,<1.36.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.35.0,<1.36.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.35.0,<1.36.0)"] +route53 = ["mypy-boto3-route53 (>=1.35.0,<1.36.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.35.0,<1.36.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.35.0,<1.36.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.35.0,<1.36.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.35.0,<1.36.0)"] +route53profiles = ["mypy-boto3-route53profiles (>=1.35.0,<1.36.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.35.0,<1.36.0)"] +rum = ["mypy-boto3-rum (>=1.35.0,<1.36.0)"] +s3 = ["mypy-boto3-s3 (>=1.35.0,<1.36.0)"] +s3control = ["mypy-boto3-s3control (>=1.35.0,<1.36.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.35.0,<1.36.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.35.0,<1.36.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.35.0,<1.36.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.35.0,<1.36.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.35.0,<1.36.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.35.0,<1.36.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.35.0,<1.36.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.35.0,<1.36.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.35.0,<1.36.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.35.0,<1.36.0)"] +schemas = ["mypy-boto3-schemas (>=1.35.0,<1.36.0)"] +sdb = ["mypy-boto3-sdb (>=1.35.0,<1.36.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.35.0,<1.36.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.35.0,<1.36.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.35.0,<1.36.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.35.0,<1.36.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.35.0,<1.36.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.35.0,<1.36.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.35.0,<1.36.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.35.0,<1.36.0)"] +ses = ["mypy-boto3-ses (>=1.35.0,<1.36.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.35.0,<1.36.0)"] +shield = ["mypy-boto3-shield (>=1.35.0,<1.36.0)"] +signer = ["mypy-boto3-signer (>=1.35.0,<1.36.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.35.0,<1.36.0)"] +sms = ["mypy-boto3-sms (>=1.35.0,<1.36.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.35.0,<1.36.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.35.0,<1.36.0)"] +snowball = ["mypy-boto3-snowball (>=1.35.0,<1.36.0)"] +sns = ["mypy-boto3-sns (>=1.35.0,<1.36.0)"] +sqs = ["mypy-boto3-sqs (>=1.35.0,<1.36.0)"] +ssm = ["mypy-boto3-ssm (>=1.35.0,<1.36.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.35.0,<1.36.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.35.0,<1.36.0)"] +ssm-quicksetup = ["mypy-boto3-ssm-quicksetup (>=1.35.0,<1.36.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.35.0,<1.36.0)"] +sso = ["mypy-boto3-sso (>=1.35.0,<1.36.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.35.0,<1.36.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.35.0,<1.36.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.35.0,<1.36.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.35.0,<1.36.0)"] +sts = ["mypy-boto3-sts (>=1.35.0,<1.36.0)"] +supplychain = ["mypy-boto3-supplychain (>=1.35.0,<1.36.0)"] +support = ["mypy-boto3-support (>=1.35.0,<1.36.0)"] +support-app = ["mypy-boto3-support-app (>=1.35.0,<1.36.0)"] +swf = ["mypy-boto3-swf (>=1.35.0,<1.36.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.35.0,<1.36.0)"] +taxsettings = ["mypy-boto3-taxsettings (>=1.35.0,<1.36.0)"] +textract = ["mypy-boto3-textract (>=1.35.0,<1.36.0)"] +timestream-influxdb = ["mypy-boto3-timestream-influxdb (>=1.35.0,<1.36.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.35.0,<1.36.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.35.0,<1.36.0)"] +tnb = ["mypy-boto3-tnb (>=1.35.0,<1.36.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.35.0,<1.36.0)"] +transfer = ["mypy-boto3-transfer (>=1.35.0,<1.36.0)"] +translate = ["mypy-boto3-translate (>=1.35.0,<1.36.0)"] +trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.35.0,<1.36.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.35.0,<1.36.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.35.0,<1.36.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.35.0,<1.36.0)"] +waf = ["mypy-boto3-waf (>=1.35.0,<1.36.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.35.0,<1.36.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.35.0,<1.36.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.35.0,<1.36.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.35.0,<1.36.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.35.0,<1.36.0)"] +worklink = ["mypy-boto3-worklink (>=1.35.0,<1.36.0)"] +workmail = ["mypy-boto3-workmail (>=1.35.0,<1.36.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.35.0,<1.36.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.35.0,<1.36.0)"] +workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.35.0,<1.36.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.35.0,<1.36.0)"] +xray = ["mypy-boto3-xray (>=1.35.0,<1.36.0)"] [[package]] name = "botocore" -version = "1.34.139" +version = "1.35.2" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.139-py3-none-any.whl", hash = "sha256:dd1e085d4caa2a4c1b7d83e3bc51416111c8238a35d498e9d3b04f3b63b086ba"}, - {file = "botocore-1.34.139.tar.gz", hash = "sha256:df023d8cf8999d574214dad4645cb90f9d2ccd1494f6ee2b57b1ab7522f6be77"}, + {file = "botocore-1.35.2-py3-none-any.whl", hash = "sha256:92b168d8be79055bb25754aa34d699866d8aa66abc69f8ce99b0c191bd9c6e70"}, + {file = "botocore-1.35.2.tar.gz", hash = "sha256:96c8eb6f0baed623a1b57ca9f24cb21d5508872cf0dfebb55527a85b6dbc76ba"}, ] [package.dependencies] @@ -607,115 +614,121 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.20.11)"] +crt = ["awscrt (==0.21.2)"] [[package]] name = "botocore-stubs" -version = "1.29.93" +version = "1.34.159" description = "Type annotations and code completion for botocore" optional = false -python-versions = ">=3.7,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "botocore_stubs-1.29.93-py3-none-any.whl", hash = "sha256:595d1105bb46d2647e98218f4aa68c504b41c6fed2633f6071debe041e3eed4d"}, - {file = "botocore_stubs-1.29.93.tar.gz", hash = "sha256:b0aeb3676ab6a5fbea23ff4cc0ba2033399dcef69eed762494372c00e2685882"}, + {file = "botocore_stubs-1.34.159-py3-none-any.whl", hash = "sha256:b2bf4ff8dd4a39556f6338bdc7e75485282dd7e2c0b7dacdcfc219352c7865ea"}, + {file = "botocore_stubs-1.34.159.tar.gz", hash = "sha256:f9f51612960c0fa1b01638816ee3d1ca031f9efc3e1328e4f4d52253b573067a"}, ] [package.dependencies] types-awscrt = "*" +[package.extras] +botocore = ["botocore"] + [[package]] name = "bracex" -version = "2.3.post1" +version = "2.5" description = "Bash style brace expander." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "bracex-2.3.post1-py3-none-any.whl", hash = "sha256:351b7f20d56fb9ea91f9b9e9e7664db466eb234188c175fd943f8f755c807e73"}, - {file = "bracex-2.3.post1.tar.gz", hash = "sha256:e7b23fc8b2cd06d3dec0692baabecb249dda94e06a617901ff03a6c56fd71693"}, + {file = "bracex-2.5-py3-none-any.whl", hash = "sha256:d2fcf4b606a82ac325471affe1706dd9bbaa3536c91ef86a31f6b766f3dad1d0"}, + {file = "bracex-2.5.tar.gz", hash = "sha256:0725da5045e8d37ea9592ab3614d8b561e22c3c5fde3964699be672e072ab611"}, ] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.17.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, + {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, + {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, + {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, + {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, ] [package.dependencies] @@ -914,63 +927,83 @@ cron = ["capturer (>=2.4)"] [[package]] name = "coverage" -version = "7.5.3" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] @@ -1033,20 +1066,6 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "dill" -version = "0.3.6" -description = "serialize all of python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, - {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - [[package]] name = "distlib" version = "0.3.6" @@ -1060,20 +1079,21 @@ files = [ [[package]] name = "doc8" -version = "0.11.2" +version = "1.1.1" description = "Style checker for Sphinx (or other) RST documentation" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "doc8-0.11.2-py3-none-any.whl", hash = "sha256:9187da8c9f115254bbe34f74e2bbbdd3eaa1b9e92efd19ccac7461e347b5055c"}, - {file = "doc8-0.11.2.tar.gz", hash = "sha256:c35a231f88f15c204659154ed3d499fa4d402d7e63d41cba7b54cf5e646123ab"}, + {file = "doc8-1.1.1-py3-none-any.whl", hash = "sha256:e493aa3f36820197c49f407583521bb76a0fde4fffbcd0e092be946ff95931ac"}, + {file = "doc8-1.1.1.tar.gz", hash = "sha256:d97a93e8f5a2efc4713a0804657dedad83745cca4cd1d88de9186f77f9776004"}, ] [package.dependencies] -docutils = "*" +docutils = ">=0.19,<0.21" Pygments = "*" restructuredtext-lint = ">=0.7" stevedore = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} [[package]] name = "docker" @@ -1099,46 +1119,28 @@ websockets = ["websocket-client (>=1.3.0)"] [[package]] name = "docutils" -version = "0.17.1" +version = "0.20.1" description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, - {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, -] - -[[package]] -name = "dunamai" -version = "1.21.1" -description = "Dynamic version generation" -optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "dunamai-1.21.1-py3-none-any.whl", hash = "sha256:fe303541463648b8197c495decf62cd8f15234fb6d891a5f295015e452f656c8"}, - {file = "dunamai-1.21.1.tar.gz", hash = "sha256:d7fea28ad2faf20a6ca5ec121e5c68e55eec6b8ada23d9c387e4e7a574cc559f"}, + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] -[package.dependencies] -packaging = ">=20.9" - [[package]] -name = "ecdsa" -version = "0.18.0" -description = "ECDSA cryptographic signature library (pure python)" +name = "eval-type-backport" +version = "0.2.0" +description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.8" files = [ - {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, - {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, + {file = "eval_type_backport-0.2.0-py3-none-any.whl", hash = "sha256:ac2f73d30d40c5a30a80b8739a789d6bb5e49fdffa66d7912667e2015d9c9933"}, + {file = "eval_type_backport-0.2.0.tar.gz", hash = "sha256:68796cfbc7371ebf923f03bdf7bef415f3ec098aeced24e054b253a0e78f7b37"}, ] -[package.dependencies] -six = ">=1.9.0" - [package.extras] -gmpy = ["gmpy"] -gmpy2 = ["gmpy2"] +tests = ["pytest"] [[package]] name = "exceptiongroup" @@ -1170,114 +1172,19 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" -version = "3.10.0" +version = "3.15.4" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" -files = [ - {file = "filelock-3.10.0-py3-none-any.whl", hash = "sha256:e90b34656470756edf8b19656785c5fea73afa1953f3e1b0d645cef11cab3182"}, - {file = "filelock-3.10.0.tar.gz", hash = "sha256:3199fd0d3faea8b911be52b663dfccceb84c95949dd13179aa21436d1a79c4ce"}, -] - -[package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.1)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "7.1.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, - {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.12.0,<2.13.0" -pyflakes = ">=3.2.0,<3.3.0" - -[[package]] -name = "flake8-bugbear" -version = "24.4.26" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8_bugbear-24.4.26-py3-none-any.whl", hash = "sha256:cb430dd86bc821d79ccc0b030789a9c87a47a369667f12ba06e80f11305e8258"}, - {file = "flake8_bugbear-24.4.26.tar.gz", hash = "sha256:ff8d4ba5719019ebf98e754624c30c05cef0dadcf18a65d91c7567300e52a130"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=6.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] - -[[package]] -name = "flake8-comprehensions" -version = "3.14.0" -description = "A flake8 plugin to help you write better list/set/dict comprehensions." -optional = false python-versions = ">=3.8" files = [ - {file = "flake8_comprehensions-3.14.0-py3-none-any.whl", hash = "sha256:7b9d07d94aa88e62099a6d1931ddf16c344d4157deedf90fe0d8ee2846f30e97"}, - {file = "flake8_comprehensions-3.14.0.tar.gz", hash = "sha256:81768c61bfc064e1a06222df08a2580d97de10cb388694becaf987c331c6c0cf"}, -] - -[package.dependencies] -flake8 = ">=3.0,<3.2.0 || >3.2.0" - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, - {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-print" -version = "5.0.0" -description = "print statement checker plugin for flake8" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-print-5.0.0.tar.gz", hash = "sha256:76915a2a389cc1c0879636c219eb909c38501d3a43cc8dae542081c9ba48bdf9"}, - {file = "flake8_print-5.0.0-py3-none-any.whl", hash = "sha256:84a1a6ea10d7056b804221ac5e62b1cee1aefc897ce16f2e5c42d3046068f5d8"}, -] - -[package.dependencies] -flake8 = ">=3.0" -pycodestyle = "*" - -[[package]] -name = "flake8-use-fstring" -version = "1.4" -description = "Flake8 plugin for string formatting style." -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8-use-fstring-1.4.tar.gz", hash = "sha256:6550bf722585eb97dffa8343b0f1c372101f5c4ab5b07ebf0edd1c79880cdd39"}, -] - -[package.dependencies] -flake8 = ">=3" - [package.extras] -ci = ["coverage (==4.*)", "coveralls", "flake8-builtins", "flake8-commas", "flake8-fixme", "flake8-print", "flake8-quotes", "flake8-todo", "pytest (>=4)", "pytest-cov (>=2)"] -dev = ["coverage (==4.*)", "flake8-builtins", "flake8-commas", "flake8-fixme", "flake8-print", "flake8-quotes", "flake8-todo", "pytest (>=4)", "pytest-cov (>=2)"] -test = ["coverage (==4.*)", "flake8-builtins", "flake8-commas", "flake8-fixme", "flake8-print", "flake8-quotes", "flake8-todo", "pytest (>=4)", "pytest-cov (>=2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "formic2" @@ -1291,6 +1198,23 @@ files = [ {file = "formic2-1.0.3.tar.gz", hash = "sha256:21bedd18fbd9010638b7449c7ce008b424a5f8835c224b81fbd07f666bfc6808"}, ] +[[package]] +name = "furo" +version = "2024.8.6" +description = "A clean customisable Sphinx documentation theme." +optional = false +python-versions = ">=3.8" +files = [ + {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"}, + {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +pygments = ">=2.7" +sphinx = ">=6.0,<9.0" +sphinx-basic-ng = ">=1.0.0.beta2" + [[package]] name = "gitdb" version = "4.0.10" @@ -1339,13 +1263,13 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve [[package]] name = "identify" -version = "2.5.21" +version = "2.6.0" description = "File identification library for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "identify-2.5.21-py2.py3-none-any.whl", hash = "sha256:69edcaffa8e91ae0f77d397af60f148b6b45a8044b2cc6d99cafa5b04793ff00"}, - {file = "identify-2.5.21.tar.gz", hash = "sha256:7671a05ef9cfaf8ff63b15d45a91a1147a03aaccb2976d4e9bd047cbbc508471"}, + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, ] [package.extras] @@ -1424,20 +1348,6 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - [[package]] name = "jinja2" version = "3.1.4" @@ -1484,13 +1394,13 @@ pbr = "*" [[package]] name = "jsonpatch" -version = "1.32" +version = "1.33" description = "Apply JSON-Patches (RFC 6902)" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ - {file = "jsonpatch-1.32-py2.py3-none-any.whl", hash = "sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397"}, - {file = "jsonpatch-1.32.tar.gz", hash = "sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"}, + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, ] [package.dependencies] @@ -1498,63 +1408,79 @@ jsonpointer = ">=1.9" [[package]] name = "jsonpickle" -version = "3.0.1" -description = "Python library for serializing any arbitrary object graph into JSON" +version = "3.2.2" +description = "Python library for serializing arbitrary object graphs into JSON" optional = false python-versions = ">=3.7" files = [ - {file = "jsonpickle-3.0.1-py2.py3-none-any.whl", hash = "sha256:130d8b293ea0add3845de311aaba55e6d706d0bb17bc123bd2c8baf8a39ac77c"}, - {file = "jsonpickle-3.0.1.tar.gz", hash = "sha256:032538804795e73b94ead410800ac387fdb6de98f8882ac957fcd247e3a85200"}, + {file = "jsonpickle-3.2.2-py3-none-any.whl", hash = "sha256:87cd82d237fd72c5a34970e7222dddc0accc13fddf49af84111887ed9a9445aa"}, + {file = "jsonpickle-3.2.2.tar.gz", hash = "sha256:d425fd2b8afe9f5d7d57205153403fbf897782204437882a477e8eed60930f8c"}, ] [package.extras] -docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] -testing-libs = ["simplejson", "ujson"] +docs = ["furo", "rst.linker (>=1.9)", "sphinx"] +packaging = ["build", "twine"] +testing = ["bson", "ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-benchmark", "pytest-benchmark[histogram]", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-ruff (>=0.2.1)", "scikit-learn", "scipy", "scipy (>=1.9.3)", "simplejson", "sqlalchemy", "ujson"] [[package]] name = "jsonpointer" -version = "2.3" +version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" files = [ - {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, - {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, ] [[package]] name = "jsonschema" -version = "4.17.3" +version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, ] [package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +referencing = ">=0.31.0" [[package]] name = "jsx-lexer" -version = "1.0.0" +version = "2.0.1" description = "A JSX lexer for Pygments" optional = false python-versions = "*" files = [ - {file = "jsx-lexer-1.0.0.tar.gz", hash = "sha256:b5f5270cad47b065417fd7bdb780199d5166bd4a88a2a0fd7412e90e7a59b5cc"}, - {file = "jsx_lexer-1.0.0-py2.py3-none-any.whl", hash = "sha256:491783c7ae75f2cbde18f66c1362e93afd6e281078482215b70c1a8dfa61e57b"}, + {file = "jsx-lexer-2.0.1.tar.gz", hash = "sha256:0d9aa653e74d7973d74021dde8349896c0df094d8e40349b92b35e0930ed7f71"}, + {file = "jsx_lexer-2.0.1-py2.py3-none-any.whl", hash = "sha256:508a08757764356aa36fd703596fdd59f789104f44b6568c7a14e27e62e57ad4"}, ] [package.dependencies] -Pygments = ">=2.7" +Pygments = ">=2.12.0" [[package]] name = "junit-xml" @@ -1586,51 +1512,6 @@ atomic-cache = ["atomicwrites"] nearley = ["js2py"] regex = ["regex"] -[[package]] -name = "lazy-object-proxy" -version = "1.9.0" -description = "A fast and thorough lazy object proxy." -optional = false -python-versions = ">=3.7" -files = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, -] - [[package]] name = "lib-detect-testenv" version = "2.0.3" @@ -1646,151 +1527,120 @@ files = [ cli-exit-tools = "*" click = "*" -[[package]] -name = "macholib" -version = "1.16.3" -description = "Mach-O header analysis and editing" -optional = false -python-versions = "*" -files = [ - {file = "macholib-1.16.3-py2.py3-none-any.whl", hash = "sha256:0e315d7583d38b8c77e815b1ecbdbf504a8258d8b3e17b61165c6feb60d18f2c"}, - {file = "macholib-1.16.3.tar.gz", hash = "sha256:07ae9e15e8e4cd9a788013d81f5908b3609aa76f9b1421bae9c4d7606ec86a30"}, -] - -[package.dependencies] -altgraph = ">=0.17" - [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, -] - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] -[[package]] -name = "mock" -version = "5.1.0" -description = "Rolling backport of unittest.mock for all Pythons" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mock-5.1.0-py3-none-any.whl", hash = "sha256:18c694e5ae8a208cdb3d2c20a993ca1a7b0efa258c247a1e565150f477f83744"}, - {file = "mock-5.1.0.tar.gz", hash = "sha256:5e96aad5ccda4718e0a229ed94b2024df75cc2d55575ba5762d31f5767b8767d"}, -] - -[package.extras] -build = ["blurb", "twine", "wheel"] -docs = ["sphinx"] -test = ["pytest", "pytest-cov"] - [[package]] name = "moto" -version = "3.0.5" -description = "A library that allows your python tests to easily mock out the boto library" +version = "5.0.13" +description = "" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "moto-3.0.5-py2.py3-none-any.whl", hash = "sha256:077dd5b96267bbb81fc7677b6d5cc3099ca99a371bd7e90932b6d3ff997106c6"}, - {file = "moto-3.0.5.tar.gz", hash = "sha256:85f2ece0ad0305aa13a39139ce648ab3afe1c04cb32ac1db8d5e5e9117d4d10e"}, + {file = "moto-5.0.13-py2.py3-none-any.whl", hash = "sha256:984377a9c4536543fc09f49a1d5210c61c4a4f55c79719f7d9f8dcdd9bf55ea5"}, + {file = "moto-5.0.13.tar.gz", hash = "sha256:ddf8864f0d61af88fd07a4e5eac428c6bebf4fcd10023f8e756e65e9e7b7e4a5"}, ] [package.dependencies] boto3 = ">=1.9.201" -botocore = ">=1.12.201" +botocore = ">=1.14.0" cryptography = ">=3.3.1" Jinja2 = ">=2.10.1" -MarkupSafe = "!=2.0.0a1" +py-partiql-parser = {version = "0.5.5", optional = true, markers = "extra == \"s3\""} python-dateutil = ">=2.1,<3.0.0" -pytz = "*" PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"s3\" or extra == \"ssm\""} requests = ">=2.5" -responses = ">=0.9.0" -sshpubkeys = {version = ">=3.1.0", optional = true, markers = "extra == \"ec2\""} -werkzeug = "*" +responses = ">=0.15.0" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.4.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "idna (>=2.5,<4)", "jsondiff (>=1.1.2)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] -apigatewayv2 = ["PyYAML (>=5.1)"] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] +apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] appsync = ["graphql-core"] -awslambda = ["docker (>=2.5.1)"] -batch = ["docker (>=2.5.1)"] -cloudformation = ["PyYAML (>=5.1)", "cfn-lint (>=0.4.0)", "docker (>=2.5.1)"] -cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] -ds = ["sshpubkeys (>=3.1.0)"] -dynamodb2 = ["docker (>=2.5.1)"] -dynamodbstreams = ["docker (>=2.5.1)"] -ec2 = ["sshpubkeys (>=3.1.0)"] -efs = ["sshpubkeys (>=3.1.0)"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.5)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.5)"] +events = ["jsonpath-ng"] +glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] -route53resolver = ["sshpubkeys (>=3.1.0)"] -s3 = ["PyYAML (>=5.1)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.4.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "flask", "flask-cors", "graphql-core", "idna (>=2.5,<4)", "jsondiff (>=1.1.2)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -ssm = ["PyYAML (>=5.1)", "dataclasses"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.5)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.5)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +ssm = ["PyYAML (>=5.1)"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath-ng"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] [[package]] @@ -1812,13 +1662,13 @@ tests = ["pytest (>=4.6)"] [[package]] name = "mypy-boto3" -version = "1.34.139" -description = "Type annotations for boto3 1.34.139 master module generated with mypy-boto3-builder 7.25.0" +version = "1.35.2" +description = "Type annotations for boto3 1.35.2 master module generated with mypy-boto3-builder 7.26.0" optional = false python-versions = ">=3.8" files = [ - {file = "mypy_boto3-1.34.139-py3-none-any.whl", hash = "sha256:90ec410b50fb2a826b4e2f0fbfb3ac14ebebcc433346579ca5aad5b9e5b6527d"}, - {file = "mypy_boto3-1.34.139.tar.gz", hash = "sha256:b3c01bc3f1805bda0f17933697604a96a959f647cf076934cd2b1fcdae30bb43"}, + {file = "mypy_boto3-1.35.2-py3-none-any.whl", hash = "sha256:9f4d433b1330ee8808904a46b96db0ab618dd8168bcc35aa29ae244579b05f28"}, + {file = "mypy_boto3-1.35.2.tar.gz", hash = "sha256:296e65b9a6c0d789e4cb303e5eebd31681d8c5986bb428fb5d98c76adbccaf27"}, ] [package.dependencies] @@ -1827,13 +1677,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-acm" -version = "1.34.0" -description = "Type annotations for boto3.ACM 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.ACM 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-acm-1.34.0.tar.gz", hash = "sha256:4af0c18bc03de35e07c5bf2dc1c33fe98d8ea14cfefcb6d9649f3a96eff39a6a"}, - {file = "mypy_boto3_acm-1.34.0-py3-none-any.whl", hash = "sha256:3b25cd742c07a536f406f236a1e5fa4d583f7143da09b172e71b4c3ced281c33"}, + {file = "mypy_boto3_acm-1.35.0-py3-none-any.whl", hash = "sha256:04f3a4718cde0f8debe87c72121250c2b0986f5f8aaf7ef9f7f8de2c7b4da52a"}, + {file = "mypy_boto3_acm-1.35.0.tar.gz", hash = "sha256:91d6a47c1d6e460e4af8567278d490c6f0588f4fb3b49ac34eae03921e9da800"}, ] [package.dependencies] @@ -1841,13 +1691,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-cloudformation" -version = "1.34.0" -description = "Type annotations for boto3.CloudFormation 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.CloudFormation 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-cloudformation-1.34.0.tar.gz", hash = "sha256:9b25df9ef15d9dc8e4e892cc07aa9343f15f2ed5eb7d33eb5eb65adfa63f538f"}, - {file = "mypy_boto3_cloudformation-1.34.0-py3-none-any.whl", hash = "sha256:4e63a2bca1882971881d65983acd774c2fc636bbc5dc8c3e1f4a41c539cf3c90"}, + {file = "mypy_boto3_cloudformation-1.35.0-py3-none-any.whl", hash = "sha256:5da07e14a206a7f0015434d1730a6a68a33167ea6746343189dd1742cfcfdb7d"}, + {file = "mypy_boto3_cloudformation-1.35.0.tar.gz", hash = "sha256:0d037d9d6bdb439a84e2391ba987a4e03fcedfad0e881db1cf0f7861d275907c"}, ] [package.dependencies] @@ -1855,13 +1705,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-cloudfront" -version = "1.34.0" -description = "Type annotations for boto3.CloudFront 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.CloudFront 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-cloudfront-1.34.0.tar.gz", hash = "sha256:de7fd612241d704db9dc9f8216cb24a1894d3170127739213d35a77921c32a83"}, - {file = "mypy_boto3_cloudfront-1.34.0-py3-none-any.whl", hash = "sha256:1f6c8f8c07c23715ef61bc9a4a8248454aa495ee34d6abf284ae6cbc24ad1756"}, + {file = "mypy_boto3_cloudfront-1.35.0-py3-none-any.whl", hash = "sha256:bf16f510be038f8f86311aa22f94a0d2e701df734e8f6680fbcd094a49adc4ba"}, + {file = "mypy_boto3_cloudfront-1.35.0.tar.gz", hash = "sha256:7b066606e340b37612dad1b95836cd051af5cb506698e4f1bd7ad2012361029e"}, ] [package.dependencies] @@ -1869,13 +1719,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-cognito-idp" -version = "1.34.3" -description = "Type annotations for boto3.CognitoIdentityProvider 1.34.3 service generated with mypy-boto3-builder 7.22.0" +version = "1.35.0" +description = "Type annotations for boto3.CognitoIdentityProvider 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-cognito-idp-1.34.3.tar.gz", hash = "sha256:33de6ed481ff6709279bf9ed2097f76761180747596c6d759c82c7817c378e5c"}, - {file = "mypy_boto3_cognito_idp-1.34.3-py3-none-any.whl", hash = "sha256:0eab4509f5018421eab96ccc93e529eb554a13901903038275a6fab0d68f1a01"}, + {file = "mypy_boto3_cognito_idp-1.35.0-py3-none-any.whl", hash = "sha256:27a895e147b80237ecfdcfad230f4ecb70a4c0e3931bae1b8ed41dac1a313354"}, + {file = "mypy_boto3_cognito_idp-1.35.0.tar.gz", hash = "sha256:626d359599c300636a14d06460a8f7e330fae822e9592e3bd5c54608b9a9d7b0"}, ] [package.dependencies] @@ -1883,13 +1733,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-dynamodb" -version = "1.34.0" -description = "Type annotations for boto3.DynamoDB 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.DynamoDB 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-dynamodb-1.34.0.tar.gz", hash = "sha256:c0d98d7e83b0bc22e5039f703889fb96202d818171c4206fd31e665a37654e84"}, - {file = "mypy_boto3_dynamodb-1.34.0-py3-none-any.whl", hash = "sha256:76869c3fec882ddeeaca485074e302bf38c3b61103664d665dfed9425234ff75"}, + {file = "mypy_boto3_dynamodb-1.35.0-py3-none-any.whl", hash = "sha256:1e503c89a5aa65f2b90fc7c861d3630a21544822f30b38e67e4f52463111abb9"}, + {file = "mypy_boto3_dynamodb-1.35.0.tar.gz", hash = "sha256:75f224d8b78f6d3126eead645aea6c0a8bc2828614f302c168de1d3dad490d11"}, ] [package.dependencies] @@ -1897,13 +1747,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-ec2" -version = "1.34.0" -description = "Type annotations for boto3.EC2 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.EC2 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-ec2-1.34.0.tar.gz", hash = "sha256:f19d4fe9b4ae4173af6ec841d1d2d38e53b0c5eec4f4e6fcbad06d0658e13070"}, - {file = "mypy_boto3_ec2-1.34.0-py3-none-any.whl", hash = "sha256:678f58876bcbb21fcae455ed1ba75a542c026a36b0dd464dae7b379afdcecd52"}, + {file = "mypy_boto3_ec2-1.35.0-py3-none-any.whl", hash = "sha256:eebc6cd11be9deb7e29925e64cba09fcbc8b9dd6201176a80c50bd08ff0e87c4"}, + {file = "mypy_boto3_ec2-1.35.0.tar.gz", hash = "sha256:de19149ff00e26a953adea9d9f8e901e531dc70f51cdeabdb68995d0c71b4724"}, ] [package.dependencies] @@ -1911,13 +1761,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-ecr" -version = "1.34.0" -description = "Type annotations for boto3.ECR 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.ECR 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-ecr-1.34.0.tar.gz", hash = "sha256:b83fb0311e968a42d4ca821b006c18d4a3e3e364b8cebee758ea4fa97c5ac345"}, - {file = "mypy_boto3_ecr-1.34.0-py3-none-any.whl", hash = "sha256:3346c02c22b16068c9acc67d990e32060bf7cdaea5f3d43c50e44308fbbc3439"}, + {file = "mypy_boto3_ecr-1.35.0-py3-none-any.whl", hash = "sha256:5783050b91584ecc19248aa2fe176a8ebfeea5bf908cfe83f149c7d49aa0c07f"}, + {file = "mypy_boto3_ecr-1.35.0.tar.gz", hash = "sha256:1886486ace346aaaa63e50e1d7228bcf85c689c2fb92a1ec630741d68c31dab5"}, ] [package.dependencies] @@ -1925,13 +1775,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-ecs" -version = "1.34.0" -description = "Type annotations for boto3.ECS 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.2" +description = "Type annotations for boto3.ECS 1.35.2 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-ecs-1.34.0.tar.gz", hash = "sha256:a35e8bba2e423ffc8db3614f5a4ebebcc8745a1c3d2cd726ca539ce461a853ea"}, - {file = "mypy_boto3_ecs-1.34.0-py3-none-any.whl", hash = "sha256:26ecdf7b77bf456c0c11bcd866dc04f932222eb262c68adecc9a0c7085d0c07a"}, + {file = "mypy_boto3_ecs-1.35.2-py3-none-any.whl", hash = "sha256:8081cb085a28c8adec161a51c58d400f2ecc89814b3d9f3990f76f503bde0c87"}, + {file = "mypy_boto3_ecs-1.35.2.tar.gz", hash = "sha256:0c42c6c72143d14ec812811765e6f2b85b9a73de295715a053638f2b76a22a00"}, ] [package.dependencies] @@ -1939,13 +1789,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-iam" -version = "1.34.0" -description = "Type annotations for boto3.IAM 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.IAM 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-iam-1.34.0.tar.gz", hash = "sha256:2485e753cfe138ece94bab1c4983d0db4dc76e25314d3ffdb9bde5c72ad1a92c"}, - {file = "mypy_boto3_iam-1.34.0-py3-none-any.whl", hash = "sha256:7edeac8ad54fc5f669d7d4b30fd9c744ed58c3ed36a55ad44179cf450d48e605"}, + {file = "mypy_boto3_iam-1.35.0-py3-none-any.whl", hash = "sha256:aaa7608799500e2a2ee241d8c3c123f6d1c2ef2d29025c5dff3ac2720a555ccc"}, + {file = "mypy_boto3_iam-1.35.0.tar.gz", hash = "sha256:b379a01c3ca17a367cb7a460905f9ce1ab7830a9abb8c8a56f28a5ff1087657f"}, ] [package.dependencies] @@ -1953,13 +1803,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-kms" -version = "1.34.2" -description = "Type annotations for boto3.KMS 1.34.2 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.KMS 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-kms-1.34.2.tar.gz", hash = "sha256:1578bff69f6720abaef37639925f5c5a0785b9be208987fdf782a9c6ff05d825"}, - {file = "mypy_boto3_kms-1.34.2-py3-none-any.whl", hash = "sha256:146d0124295ec3ae9751865091cca0222babcda2e557c041a6878d5581bfce73"}, + {file = "mypy_boto3_kms-1.35.0-py3-none-any.whl", hash = "sha256:d77e11b6d9bc52e939153a18cfb0e87104365a319b4901ffd230006f46714a51"}, + {file = "mypy_boto3_kms-1.35.0.tar.gz", hash = "sha256:a06a5e549e2eb8d50022c67073693440a2edccb4870be50b3357f73dc9be64f9"}, ] [package.dependencies] @@ -1967,13 +1817,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-lambda" -version = "1.34.0" -description = "Type annotations for boto3.Lambda 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.1" +description = "Type annotations for boto3.Lambda 1.35.1 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-lambda-1.34.0.tar.gz", hash = "sha256:e74c0ce548da747a8c6e643c39dad8aa54d67e057f57740ec780a7e565590627"}, - {file = "mypy_boto3_lambda-1.34.0-py3-none-any.whl", hash = "sha256:109a7e126e84d6da6cacf8ab5c7c6f2be022417fe7bfb7f9b019767d7034f73b"}, + {file = "mypy_boto3_lambda-1.35.1-py3-none-any.whl", hash = "sha256:ad5217045006bebb8523ca4822c8c45bf91182b52369862abdb9387328270ef1"}, + {file = "mypy_boto3_lambda-1.35.1.tar.gz", hash = "sha256:f26e3ecd0b05a1fa2241bded7878d06fabc4d162cf259f5d868b567bbcd1c517"}, ] [package.dependencies] @@ -1981,13 +1831,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-route53" -version = "1.34.0" -description = "Type annotations for boto3.Route53 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.Route53 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-route53-1.34.0.tar.gz", hash = "sha256:a570e6aab4f9b0dee97e4b954ad0440b9f5bf7e94b84cb9720b893796955145b"}, - {file = "mypy_boto3_route53-1.34.0-py3-none-any.whl", hash = "sha256:8919b60923db524354fb0410513b78e62544119939cf14b0cf6c70e52ba5edf0"}, + {file = "mypy_boto3_route53-1.35.0-py3-none-any.whl", hash = "sha256:8ade11f7bd84d7d10d41e2547e7a7b6bc8cfbe33535bf924d04257931312327d"}, + {file = "mypy_boto3_route53-1.35.0.tar.gz", hash = "sha256:9d1c4cce38b325cfc011ac7a2156784efa5d0070dd903adb079eb901ba588ca8"}, ] [package.dependencies] @@ -1995,13 +1845,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-s3" -version = "1.34.0" -description = "Type annotations for boto3.S3 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.2" +description = "Type annotations for boto3.S3 1.35.2 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-s3-1.34.0.tar.gz", hash = "sha256:7644a00e096ebb1c3292551059f64ff8329625dacd40827ced9481b14d64c733"}, - {file = "mypy_boto3_s3-1.34.0-py3-none-any.whl", hash = "sha256:633876d2a96dbb924f9667084316c1759bff40c19a9a38313d5a4e825c5fc641"}, + {file = "mypy_boto3_s3-1.35.2-py3-none-any.whl", hash = "sha256:f7300b559dee5435872625448becf159abe36b19cd7006dd78e0d51610312183"}, + {file = "mypy_boto3_s3-1.35.2.tar.gz", hash = "sha256:74d8f3492eeff768ff6f69ac6d40bf68b40aa6e54ebe10a8d098fc3d24a54abf"}, ] [package.dependencies] @@ -2009,13 +1859,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-ssm" -version = "1.34.0" -description = "Type annotations for boto3.SSM 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.SSM 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-ssm-1.34.0.tar.gz", hash = "sha256:ae82936d77496f7958dc25d5db7d48f63b164cac03686c60475c709107deafec"}, - {file = "mypy_boto3_ssm-1.34.0-py3-none-any.whl", hash = "sha256:e2c34db563851939cab1f12fb392be904f83146af88f515c5cd50bf6c612dda4"}, + {file = "mypy_boto3_ssm-1.35.0-py3-none-any.whl", hash = "sha256:ee4bfdf91e7e59d556c172d1de8898cb8fd05893be089ac59a1d69a406d45b55"}, + {file = "mypy_boto3_ssm-1.35.0.tar.gz", hash = "sha256:d3bc98ee5cc4da149a4ef210094f985a84c4d4f7a7c499ec5c6b041df27a1097"}, ] [package.dependencies] @@ -2023,13 +1873,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-sts" -version = "1.34.0" -description = "Type annotations for boto3.STS 1.34.0 service generated with mypy-boto3-builder 7.21.0" +version = "1.35.0" +description = "Type annotations for boto3.STS 1.35.0 service generated with mypy-boto3-builder 7.26.0" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-boto3-sts-1.34.0.tar.gz", hash = "sha256:b347e0a336d60162dd94074d9d10f614f2b09a455c9b42415850d54d676e2067"}, - {file = "mypy_boto3_sts-1.34.0-py3-none-any.whl", hash = "sha256:3ba1875c2792b4f35fd918dca957ce09ad197beb7cfbda61f42144ffa9cda05d"}, + {file = "mypy_boto3_sts-1.35.0-py3-none-any.whl", hash = "sha256:50c6cd996dcff91d58295b6afd4e27201d1e4bfc75d0190eadee052f105bc602"}, + {file = "mypy_boto3_sts-1.35.0.tar.gz", hash = "sha256:619580c0bcf4d7f79808c8328a7894a0eeac56f94541833c5a329cbc708f7678"}, ] [package.dependencies] @@ -2066,18 +1916,15 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "nodeenv" -version = "1.7.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, - {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "packaging" version = "24.1" @@ -2091,51 +1938,26 @@ files = [ [[package]] name = "pathspec" -version = "0.11.1" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "pbr" -version = "5.11.1" +version = "6.0.0" description = "Python Build Reasonableness" optional = false python-versions = ">=2.6" files = [ - {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, - {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, ] -[[package]] -name = "pefile" -version = "2023.2.7" -description = "Python PE parsing module" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "pefile-2023.2.7-py3-none-any.whl", hash = "sha256:da185cd2af68c08a6cd4481f7325ed600a88f6a813bad9dea07ab3ef73d8d8d6"}, - {file = "pefile-2023.2.7.tar.gz", hash = "sha256:82e6114004b3d6911c77c3953e3838654b04511b8b66e8583db70c65998017dc"}, -] - -[[package]] -name = "pep8-naming" -version = "0.14.1" -description = "Check PEP-8 naming conventions, plugin for flake8" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pep8-naming-0.14.1.tar.gz", hash = "sha256:1ef228ae80875557eb6c1549deafed4dabbf3261cfcafa12f773fe0db9be8a36"}, - {file = "pep8_naming-0.14.1-py3-none-any.whl", hash = "sha256:63f514fc777d715f935faf185dedd679ab99526a7f2f503abb61587877f7b1c5"}, -] - -[package.dependencies] -flake8 = ">=5.0.0" - [[package]] name = "pip" version = "23.3.1" @@ -2201,13 +2023,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.7.1" +version = "3.8.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" files = [ - {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, - {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, ] [package.dependencies] @@ -2218,16 +2040,48 @@ pyyaml = ">=5.1" virtualenv = ">=20.10.0" [[package]] -name = "pycodestyle" -version = "2.12.0" -description = "Python style guide checker" +name = "psutil" +version = "6.0.0" +description = "Cross-platform lib for process and system monitoring in Python." optional = false -python-versions = ">=3.8" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, + {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, + {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, + {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, + {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, + {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, + {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, + {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, + {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, + {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "py-partiql-parser" +version = "0.5.5" +description = "Pure Python PartiQL Parser" +optional = false +python-versions = "*" files = [ - {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, - {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, + {file = "py_partiql_parser-0.5.5-py2.py3-none-any.whl", hash = "sha256:90d278818385bd60c602410c953ee78f04ece599d8cd21c656fc5e47399577a1"}, + {file = "py_partiql_parser-0.5.5.tar.gz", hash = "sha256:ed07f8edf4b55e295cab4f5fd3e2ba3196cee48a43fe210d53ddd6ffce1cf1ff"}, ] +[package.extras] +dev = ["black (==22.6.0)", "flake8", "mypy", "pytest"] + [[package]] name = "pycparser" version = "2.21" @@ -2241,97 +2095,136 @@ files = [ [[package]] name = "pydantic" -version = "1.10.6" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9289065611c48147c1dd1fd344e9d57ab45f1d99b0fb26c51f1cf72cd9bcd31"}, - {file = "pydantic-1.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c32b6bba301490d9bb2bf5f631907803135e8085b6aa3e5fe5a770d46dd0160"}, - {file = "pydantic-1.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd9b9e98068fa1068edfc9eabde70a7132017bdd4f362f8b4fd0abed79c33083"}, - {file = "pydantic-1.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c84583b9df62522829cbc46e2b22e0ec11445625b5acd70c5681ce09c9b11c4"}, - {file = "pydantic-1.10.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b41822064585fea56d0116aa431fbd5137ce69dfe837b599e310034171996084"}, - {file = "pydantic-1.10.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61f1f08adfaa9cc02e0cbc94f478140385cbd52d5b3c5a657c2fceb15de8d1fb"}, - {file = "pydantic-1.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:32937835e525d92c98a1512218db4eed9ddc8f4ee2a78382d77f54341972c0e7"}, - {file = "pydantic-1.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bbd5c531b22928e63d0cb1868dee76123456e1de2f1cb45879e9e7a3f3f1779b"}, - {file = "pydantic-1.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e277bd18339177daa62a294256869bbe84df1fb592be2716ec62627bb8d7c81d"}, - {file = "pydantic-1.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f15277d720aa57e173954d237628a8d304896364b9de745dcb722f584812c7"}, - {file = "pydantic-1.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b243b564cea2576725e77aeeda54e3e0229a168bc587d536cd69941e6797543d"}, - {file = "pydantic-1.10.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3ce13a558b484c9ae48a6a7c184b1ba0e5588c5525482681db418268e5f86186"}, - {file = "pydantic-1.10.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3ac1cd4deed871dfe0c5f63721e29debf03e2deefa41b3ed5eb5f5df287c7b70"}, - {file = "pydantic-1.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:b1eb6610330a1dfba9ce142ada792f26bbef1255b75f538196a39e9e90388bf4"}, - {file = "pydantic-1.10.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4ca83739c1263a044ec8b79df4eefc34bbac87191f0a513d00dd47d46e307a65"}, - {file = "pydantic-1.10.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea4e2a7cb409951988e79a469f609bba998a576e6d7b9791ae5d1e0619e1c0f2"}, - {file = "pydantic-1.10.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53de12b4608290992a943801d7756f18a37b7aee284b9ffa794ee8ea8153f8e2"}, - {file = "pydantic-1.10.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:60184e80aac3b56933c71c48d6181e630b0fbc61ae455a63322a66a23c14731a"}, - {file = "pydantic-1.10.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:415a3f719ce518e95a92effc7ee30118a25c3d032455d13e121e3840985f2efd"}, - {file = "pydantic-1.10.6-cp37-cp37m-win_amd64.whl", hash = "sha256:72cb30894a34d3a7ab6d959b45a70abac8a2a93b6480fc5a7bfbd9c935bdc4fb"}, - {file = "pydantic-1.10.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3091d2eaeda25391405e36c2fc2ed102b48bac4b384d42b2267310abae350ca6"}, - {file = "pydantic-1.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:751f008cd2afe812a781fd6aa2fb66c620ca2e1a13b6a2152b1ad51553cb4b77"}, - {file = "pydantic-1.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12e837fd320dd30bd625be1b101e3b62edc096a49835392dcf418f1a5ac2b832"}, - {file = "pydantic-1.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d92831d0115874d766b1f5fddcdde0c5b6c60f8c6111a394078ec227fca6d"}, - {file = "pydantic-1.10.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:476f6674303ae7965730a382a8e8d7fae18b8004b7b69a56c3d8fa93968aa21c"}, - {file = "pydantic-1.10.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a2be0a0f32c83265fd71a45027201e1278beaa82ea88ea5b345eea6afa9ac7f"}, - {file = "pydantic-1.10.6-cp38-cp38-win_amd64.whl", hash = "sha256:0abd9c60eee6201b853b6c4be104edfba4f8f6c5f3623f8e1dba90634d63eb35"}, - {file = "pydantic-1.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6195ca908045054dd2d57eb9c39a5fe86409968b8040de8c2240186da0769da7"}, - {file = "pydantic-1.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43cdeca8d30de9a897440e3fb8866f827c4c31f6c73838e3a01a14b03b067b1d"}, - {file = "pydantic-1.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c19eb5163167489cb1e0161ae9220dadd4fc609a42649e7e84a8fa8fff7a80f"}, - {file = "pydantic-1.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:012c99a9c0d18cfde7469aa1ebff922e24b0c706d03ead96940f5465f2c9cf62"}, - {file = "pydantic-1.10.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:528dcf7ec49fb5a84bf6fe346c1cc3c55b0e7603c2123881996ca3ad79db5bfc"}, - {file = "pydantic-1.10.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:163e79386c3547c49366e959d01e37fc30252285a70619ffc1b10ede4758250a"}, - {file = "pydantic-1.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:189318051c3d57821f7233ecc94708767dd67687a614a4e8f92b4a020d4ffd06"}, - {file = "pydantic-1.10.6-py3-none-any.whl", hash = "sha256:acc6783751ac9c9bc4680379edd6d286468a1dc8d7d9906cd6f1186ed682b2b0"}, - {file = "pydantic-1.10.6.tar.gz", hash = "sha256:cf95adb0d1671fc38d8c43dd921ad5814a735e7d9b4d9e437c088002863854fd"}, -] - -[package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" +version = "2.8.2" +description = "Data validation using Python type hints" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, ] [package.dependencies] -snowballstemmer = ">=2.2.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.20.1" +typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} [package.extras] -toml = ["tomli (>=1.2.3)"] +email = ["email-validator (>=2.0.0)"] [[package]] -name = "pyflakes" -version = "3.2.0" -description = "passive checker of Python programs" +name = "pydantic-core" +version = "2.20.1" +description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, - {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, -] + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" -version = "2.17.2" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] @@ -2345,99 +2238,19 @@ files = [ {file = "pyhcl-0.4.5.tar.gz", hash = "sha256:c47293a51ccdd25e18bb5c8c0ab0ffe355b37c87f8d6f9d3280dc41efd4740bc"}, ] -[[package]] -name = "pyinstaller" -version = "6.7.0" -description = "PyInstaller bundles a Python application and all its dependencies into a single package." -optional = false -python-versions = "<3.13,>=3.8" -files = [ - {file = "pyinstaller-6.7.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:6decedba07031d1318528cb76d8400ae1572f7b08197f771ceca9e454e0060bf"}, - {file = "pyinstaller-6.7.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0756b3d4d3283ae2a5bda56abe479b80801ecafecdb3a96cd928542c2c75d016"}, - {file = "pyinstaller-6.7.0-py3-none-manylinux2014_i686.whl", hash = "sha256:df1b66500a7def997790bdadc23c142a2f96585ccd440beac63b72a4f3e41684"}, - {file = "pyinstaller-6.7.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:fa552214a8cbb5bfe4621c46a73c3cce12f299a520aa5ac397dc18718278f03a"}, - {file = "pyinstaller-6.7.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:5263ecbfd34a2297f0e5d41ecfcf7a6fb1ebbf60dbe0dc7c2d64f4a55871a99d"}, - {file = "pyinstaller-6.7.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:4ff8ce04f1e5ab3a65d4a1ee6036cba648d0cdae6a7a33c6f0ca4ace46cdd43c"}, - {file = "pyinstaller-6.7.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:95efc2de7722213f376c5bac9620f390899f9a3c9eed70bd65adf29e2a085d5f"}, - {file = "pyinstaller-6.7.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:1b6dd6a50a7315214d345875cd08f8aa71025e7ba6bfa0f95c09285585e8d372"}, - {file = "pyinstaller-6.7.0-py3-none-win32.whl", hash = "sha256:73b94ce02b208c34eaabd032dd1522a3c03c0b3118a31bf7e4eafe7a9f4af2da"}, - {file = "pyinstaller-6.7.0-py3-none-win_amd64.whl", hash = "sha256:a3f85935b40f89e717f1e67377d3bfc953060e5795828ecf5357e2c1f7aa52bf"}, - {file = "pyinstaller-6.7.0-py3-none-win_arm64.whl", hash = "sha256:53038419ca09eea59de02dfb52453dd327983b0957821be610fb04cfd84676d0"}, - {file = "pyinstaller-6.7.0.tar.gz", hash = "sha256:8f09179c5f3d1b4b8453ac61adfe394dd416f9fc33abd7553f77d4897bc3a582"}, -] - -[package.dependencies] -altgraph = "*" -importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} -macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""} -packaging = ">=22.0" -pefile = {version = ">=2022.5.30", markers = "sys_platform == \"win32\""} -pyinstaller-hooks-contrib = ">=2024.6" -pywin32-ctypes = {version = ">=0.2.1", markers = "sys_platform == \"win32\""} -setuptools = ">=42.0.0" - -[package.extras] -completion = ["argcomplete"] -hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"] - -[[package]] -name = "pyinstaller-hooks-contrib" -version = "2024.6" -description = "Community maintained hooks for PyInstaller" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyinstaller_hooks_contrib-2024.6-py2.py3-none-any.whl", hash = "sha256:6cc88dad75261d9e1a7e0c6385139f35dcdbb16640c911a27f6078fe924a38cf"}, - {file = "pyinstaller_hooks_contrib-2024.6.tar.gz", hash = "sha256:3c188b3a79f5cd46d96520df3934642556a1b6ce8988ec5bbce820ada424bc2b"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} -packaging = ">=22.0" -setuptools = ">=42.0.0" - -[[package]] -name = "pylint" -version = "2.17.4" -description = "python code static checker" -optional = false -python-versions = ">=3.7.2" -files = [ - {file = "pylint-2.17.4-py3-none-any.whl", hash = "sha256:7a1145fb08c251bdb5cca11739722ce64a63db479283d10ce718b2460e54123c"}, - {file = "pylint-2.17.4.tar.gz", hash = "sha256:5dcf1d9e19f41f38e4e85d10f511e5b9c35e1aa74251bf95cdd8cb23584e2db1"}, -] - -[package.dependencies] -astroid = ">=2.15.4,<=2.17.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, -] -isort = ">=4.2.5,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - [[package]] name = "pyopenssl" -version = "24.1.0" +version = "24.2.1" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-24.1.0-py3-none-any.whl", hash = "sha256:17ed5be5936449c5418d1cd269a1a9e9081bc54c17aed272b45856a3d3dc86ad"}, - {file = "pyOpenSSL-24.1.0.tar.gz", hash = "sha256:cabed4bfaa5df9f1a16c0ef64a0cb65318b5cd077a7eda7d6970131ca2f41a6f"}, + {file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"}, + {file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"}, ] [package.dependencies] -cryptography = ">=41.0.5,<43" +cryptography = ">=41.0.5,<44" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] @@ -2454,51 +2267,15 @@ files = [ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, ] -[[package]] -name = "pyrsistent" -version = "0.19.3" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, - {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, - {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, - {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, - {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, - {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, -] - [[package]] name = "pytest" -version = "8.2.2" +version = "8.3.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, - {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, ] [package.dependencies] @@ -2506,7 +2283,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.5,<2.0" +pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] @@ -2566,13 +2343,13 @@ pytest = [ [[package]] name = "pytest-subprocess" -version = "1.5.0" +version = "1.5.2" description = "A plugin to fake subprocess for pytest" optional = false python-versions = ">=3.6" files = [ - {file = "pytest-subprocess-1.5.0.tar.gz", hash = "sha256:d7693b96f588f39b84c7b2b5c04287459246dfae6be1dd4098937a728ad4fbe3"}, - {file = "pytest_subprocess-1.5.0-py3-none-any.whl", hash = "sha256:dfd75b10af6800a89a9b758f2e2eceff9de082a27bd1388521271b6e8bde298b"}, + {file = "pytest_subprocess-1.5.2-py3-none-any.whl", hash = "sha256:23ac7732aa8bd45f1757265b1316eb72a7f55b41fb21e2ca22e149ba3629fa46"}, + {file = "pytest_subprocess-1.5.2.tar.gz", hash = "sha256:ad3ca8a35e798bf9c82d9f16d88700b30d98c5a28236117b86c5d6e581a8ed97"}, ] [package.dependencies] @@ -2581,7 +2358,7 @@ pytest = ">=4.0.0" [package.extras] dev = ["changelogd", "nox"] docs = ["changelogd", "furo", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-napoleon"] -test = ["Pygments (>=2.0)", "anyio", "coverage", "docutils (>=0.12)", "pytest (>=4.0)", "pytest-asyncio (>=0.15.1)", "pytest-rerunfailures"] +test = ["Pygments (>=2.0)", "anyio", "coverage", "docutils (>=0.12)", "pytest (>=4.0)", "pytest-asyncio (>=0.15.1)", "pytest-rerunfailures", "pytest-timeout"] [[package]] name = "pytest-sugar" @@ -2615,6 +2392,7 @@ files = [ [package.dependencies] execnet = ">=2.1" +psutil = {version = ">=3.0", optional = true, markers = "extra == \"psutil\""} pytest = ">=7.0.0" [package.extras] @@ -2638,218 +2416,204 @@ six = ">=1.5" [[package]] name = "python-hcl2" -version = "4.3.4" +version = "4.3.5" description = "A parser for HCL2" optional = false python-versions = ">=3.7.0" files = [ - {file = "python-hcl2-4.3.4.tar.gz", hash = "sha256:ef1b9bad018bcfc1fe2792044974299e559145fe96e3ca298c1e5e9500c8de66"}, - {file = "python_hcl2-4.3.4-py3-none-any.whl", hash = "sha256:00f4f0dfa177b1217a5478d5d36b4a70ec01228ad59fc9f2a79eadd4b16676cd"}, + {file = "python-hcl2-4.3.5.tar.gz", hash = "sha256:71fbe48ee9a13335828f04adff2b267e06045c44c99c737b13d940aa1468d101"}, + {file = "python_hcl2-4.3.5-py3-none-any.whl", hash = "sha256:e33795eed675ae59545e725d1de0a8d78b4166b30b6e0bc7699365bd06334068"}, ] [package.dependencies] lark = ">=1,<2" -[[package]] -name = "pytz" -version = "2022.7.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, -] - [[package]] name = "pywin32" -version = "305" +version = "306" description = "Python for Window Extensions" optional = false python-versions = "*" files = [ - {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, - {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, - {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, - {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"}, - {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"}, - {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"}, - {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"}, - {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"}, - {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"}, - {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"}, - {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"}, - {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"}, - {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, - {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, -] - -[[package]] -name = "pywin32-ctypes" -version = "0.2.2" -description = "A (partial) reimplementation of pywin32 using ctypes/cffi" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, - {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "referencing" +version = "0.35.1" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, + {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, ] +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + [[package]] name = "regex" -version = "2023.5.5" +version = "2024.7.24" description = "Alternative regular expression module, to replace re." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "regex-2023.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48c9ec56579d4ba1c88f42302194b8ae2350265cb60c64b7b9a88dcb7fbde309"}, - {file = "regex-2023.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f4541550459c08fdd6f97aa4e24c6f1932eec780d58a2faa2068253df7d6ff"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e22e4460f0245b468ee645156a4f84d0fc35a12d9ba79bd7d79bdcd2f9629d"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b870b6f632fc74941cadc2a0f3064ed8409e6f8ee226cdfd2a85ae50473aa94"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:171c52e320fe29260da550d81c6b99f6f8402450dc7777ef5ced2e848f3b6f8f"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad5524c2aedaf9aa14ef1bc9327f8abd915699dea457d339bebbe2f0d218f86"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a0f874ee8c0bc820e649c900243c6d1e6dc435b81da1492046716f14f1a2a96"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e645c757183ee0e13f0bbe56508598e2d9cd42b8abc6c0599d53b0d0b8dd1479"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a4c5da39bca4f7979eefcbb36efea04471cd68db2d38fcbb4ee2c6d440699833"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5e3f4468b8c6fd2fd33c218bbd0a1559e6a6fcf185af8bb0cc43f3b5bfb7d636"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:59e4b729eae1a0919f9e4c0fc635fbcc9db59c74ad98d684f4877be3d2607dd6"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ba73a14e9c8f9ac409863543cde3290dba39098fc261f717dc337ea72d3ebad2"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0bbd5dcb19603ab8d2781fac60114fb89aee8494f4505ae7ad141a3314abb1f9"}, - {file = "regex-2023.5.5-cp310-cp310-win32.whl", hash = "sha256:40005cbd383438aecf715a7b47fe1e3dcbc889a36461ed416bdec07e0ef1db66"}, - {file = "regex-2023.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:59597cd6315d3439ed4b074febe84a439c33928dd34396941b4d377692eca810"}, - {file = "regex-2023.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f08276466fedb9e36e5193a96cb944928301152879ec20c2d723d1031cd4ddd"}, - {file = "regex-2023.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cd46f30e758629c3ee91713529cfbe107ac50d27110fdcc326a42ce2acf4dafc"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2910502f718828cecc8beff004917dcf577fc5f8f5dd40ffb1ea7612124547b"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:445d6f4fc3bd9fc2bf0416164454f90acab8858cd5a041403d7a11e3356980e8"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18196c16a584619c7c1d843497c069955d7629ad4a3fdee240eb347f4a2c9dbe"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d430a23b661629661f1fe8395be2004006bc792bb9fc7c53911d661b69dd7e"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72a28979cc667e5f82ef433db009184e7ac277844eea0f7f4d254b789517941d"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f764e4dfafa288e2eba21231f455d209f4709436baeebb05bdecfb5d8ddc3d35"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23d86ad2121b3c4fc78c58f95e19173790e22ac05996df69b84e12da5816cb17"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:690a17db524ee6ac4a27efc5406530dd90e7a7a69d8360235323d0e5dafb8f5b"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1ecf3dcff71f0c0fe3e555201cbe749fa66aae8d18f80d2cc4de8e66df37390a"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:811040d7f3dd9c55eb0d8b00b5dcb7fd9ae1761c454f444fd9f37fe5ec57143a"}, - {file = "regex-2023.5.5-cp311-cp311-win32.whl", hash = "sha256:c8c143a65ce3ca42e54d8e6fcaf465b6b672ed1c6c90022794a802fb93105d22"}, - {file = "regex-2023.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:586a011f77f8a2da4b888774174cd266e69e917a67ba072c7fc0e91878178a80"}, - {file = "regex-2023.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b6365703e8cf1644b82104cdd05270d1a9f043119a168d66c55684b1b557d008"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a56c18f21ac98209da9c54ae3ebb3b6f6e772038681d6cb43b8d53da3b09ee81"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b942d8b3ce765dbc3b1dad0a944712a89b5de290ce8f72681e22b3c55f3cc8"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:844671c9c1150fcdac46d43198364034b961bd520f2c4fdaabfc7c7d7138a2dd"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2ce65bdeaf0a386bb3b533a28de3994e8e13b464ac15e1e67e4603dd88787fa"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fee0016cc35a8a91e8cc9312ab26a6fe638d484131a7afa79e1ce6165328a135"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:18f05d14f14a812fe9723f13afafefe6b74ca042d99f8884e62dbd34dcccf3e2"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:941b3f1b2392f0bcd6abf1bc7a322787d6db4e7457be6d1ffd3a693426a755f2"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:921473a93bcea4d00295799ab929522fc650e85c6b9f27ae1e6bb32a790ea7d3"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:e2205a81f815b5bb17e46e74cc946c575b484e5f0acfcb805fb252d67e22938d"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:385992d5ecf1a93cb85adff2f73e0402dd9ac29b71b7006d342cc920816e6f32"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:890a09cb0a62198bff92eda98b2b507305dd3abf974778bae3287f98b48907d3"}, - {file = "regex-2023.5.5-cp36-cp36m-win32.whl", hash = "sha256:821a88b878b6589c5068f4cc2cfeb2c64e343a196bc9d7ac68ea8c2a776acd46"}, - {file = "regex-2023.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:7918a1b83dd70dc04ab5ed24c78ae833ae8ea228cef84e08597c408286edc926"}, - {file = "regex-2023.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:338994d3d4ca4cf12f09822e025731a5bdd3a37aaa571fa52659e85ca793fb67"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a69cf0c00c4d4a929c6c7717fd918414cab0d6132a49a6d8fc3ded1988ed2ea"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f5e06df94fff8c4c85f98c6487f6636848e1dc85ce17ab7d1931df4a081f657"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8906669b03c63266b6a7693d1f487b02647beb12adea20f8840c1a087e2dfb5"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fda3e50abad8d0f48df621cf75adc73c63f7243cbe0e3b2171392b445401550"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ac2b7d341dc1bd102be849d6dd33b09701223a851105b2754339e390be0627a"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fb2b495dd94b02de8215625948132cc2ea360ae84fe6634cd19b6567709c8ae2"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aa7d032c1d84726aa9edeb6accf079b4caa87151ca9fabacef31fa028186c66d"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d45864693351c15531f7e76f545ec35000d50848daa833cead96edae1665559"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21e90a288e6ba4bf44c25c6a946cb9b0f00b73044d74308b5e0afd190338297c"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:10250a093741ec7bf74bcd2039e697f519b028518f605ff2aa7ac1e9c9f97423"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6b8d0c153f07a953636b9cdb3011b733cadd4178123ef728ccc4d5969e67f3c2"}, - {file = "regex-2023.5.5-cp37-cp37m-win32.whl", hash = "sha256:10374c84ee58c44575b667310d5bbfa89fb2e64e52349720a0182c0017512f6c"}, - {file = "regex-2023.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9b320677521aabf666cdd6e99baee4fb5ac3996349c3b7f8e7c4eee1c00dfe3a"}, - {file = "regex-2023.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:afb1c70ec1e594a547f38ad6bf5e3d60304ce7539e677c1429eebab115bce56e"}, - {file = "regex-2023.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf123225945aa58b3057d0fba67e8061c62d14cc8a4202630f8057df70189051"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99757ad7fe5c8a2bb44829fc57ced11253e10f462233c1255fe03888e06bc19"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a623564d810e7a953ff1357f7799c14bc9beeab699aacc8b7ab7822da1e952b8"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced02e3bd55e16e89c08bbc8128cff0884d96e7f7a5633d3dc366b6d95fcd1d6"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cbe6b5be3b9b698d8cc4ee4dee7e017ad655e83361cd0ea8e653d65e469468"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a6e4b0e0531223f53bad07ddf733af490ba2b8367f62342b92b39b29f72735a"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e9c4f778514a560a9c9aa8e5538bee759b55f6c1dcd35613ad72523fd9175b8"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:256f7f4c6ba145f62f7a441a003c94b8b1af78cee2cccacfc1e835f93bc09426"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd7b68fd2e79d59d86dcbc1ccd6e2ca09c505343445daaa4e07f43c8a9cc34da"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4a5059bd585e9e9504ef9c07e4bc15b0a621ba20504388875d66b8b30a5c4d18"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:6893544e06bae009916a5658ce7207e26ed17385149f35a3125f5259951f1bbe"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c64d5abe91a3dfe5ff250c6bb267ef00dbc01501518225b45a5f9def458f31fb"}, - {file = "regex-2023.5.5-cp38-cp38-win32.whl", hash = "sha256:7923470d6056a9590247ff729c05e8e0f06bbd4efa6569c916943cb2d9b68b91"}, - {file = "regex-2023.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:4035d6945cb961c90c3e1c1ca2feb526175bcfed44dfb1cc77db4fdced060d3e"}, - {file = "regex-2023.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50fd2d9b36938d4dcecbd684777dd12a407add4f9f934f235c66372e630772b0"}, - {file = "regex-2023.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d19e57f888b00cd04fc38f5e18d0efbd91ccba2d45039453ab2236e6eec48d4d"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd966475e963122ee0a7118ec9024388c602d12ac72860f6eea119a3928be053"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db09e6c18977a33fea26fe67b7a842f706c67cf8bda1450974d0ae0dd63570df"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6164d4e2a82f9ebd7752a06bd6c504791bedc6418c0196cd0a23afb7f3e12b2d"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84397d3f750d153ebd7f958efaa92b45fea170200e2df5e0e1fd4d85b7e3f58a"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c3efee9bb53cbe7b285760c81f28ac80dc15fa48b5fe7e58b52752e642553f1"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:144b5b017646b5a9392a5554a1e5db0000ae637be4971c9747566775fc96e1b2"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1189fbbb21e2c117fda5303653b61905aeeeea23de4a94d400b0487eb16d2d60"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f83fe9e10f9d0b6cf580564d4d23845b9d692e4c91bd8be57733958e4c602956"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:72aa4746993a28c841e05889f3f1b1e5d14df8d3daa157d6001a34c98102b393"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:de2f780c3242ea114dd01f84848655356af4dd561501896c751d7b885ea6d3a1"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:290fd35219486dfbc00b0de72f455ecdd63e59b528991a6aec9fdfc0ce85672e"}, - {file = "regex-2023.5.5-cp39-cp39-win32.whl", hash = "sha256:732176f5427e72fa2325b05c58ad0b45af341c459910d766f814b0584ac1f9ac"}, - {file = "regex-2023.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:1307aa4daa1cbb23823d8238e1f61292fd07e4e5d8d38a6efff00b67a7cdb764"}, - {file = "regex-2023.5.5.tar.gz", hash = "sha256:7d76a8a1fc9da08296462a18f16620ba73bcbf5909e42383b253ef34d9d5141e"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa"}, + {file = "regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66"}, + {file = "regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e"}, + {file = "regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c"}, + {file = "regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38"}, + {file = "regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc"}, + {file = "regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:66b4c0731a5c81921e938dcf1a88e978264e26e6ac4ec96a4d21ae0354581ae0"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:88ecc3afd7e776967fa16c80f974cb79399ee8dc6c96423321d6f7d4b881c92b"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64bd50cf16bcc54b274e20235bf8edbb64184a30e1e53873ff8d444e7ac656b2"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb462f0e346fcf41a901a126b50f8781e9a474d3927930f3490f38a6e73b6950"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a82465ebbc9b1c5c50738536fdfa7cab639a261a99b469c9d4c7dcbb2b3f1e57"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68a8f8c046c6466ac61a36b65bb2395c74451df2ffb8458492ef49900efed293"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac8e84fff5d27420f3c1e879ce9929108e873667ec87e0c8eeb413a5311adfe"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba2537ef2163db9e6ccdbeb6f6424282ae4dea43177402152c67ef869cf3978b"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:43affe33137fcd679bdae93fb25924979517e011f9dea99163f80b82eadc7e53"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c9bb87fdf2ab2370f21e4d5636e5317775e5d51ff32ebff2cf389f71b9b13750"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:945352286a541406f99b2655c973852da7911b3f4264e010218bbc1cc73168f2"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8bc593dcce679206b60a538c302d03c29b18e3d862609317cb560e18b66d10cf"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3f3b6ca8eae6d6c75a6cff525c8530c60e909a71a15e1b731723233331de4169"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c51edc3541e11fbe83f0c4d9412ef6c79f664a3745fab261457e84465ec9d5a8"}, + {file = "regex-2024.7.24-cp38-cp38-win32.whl", hash = "sha256:d0a07763776188b4db4c9c7fb1b8c494049f84659bb387b71c73bbc07f189e96"}, + {file = "regex-2024.7.24-cp38-cp38-win_amd64.whl", hash = "sha256:8fd5afd101dcf86a270d254364e0e8dddedebe6bd1ab9d5f732f274fa00499a5"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9"}, + {file = "regex-2024.7.24-cp39-cp39-win32.whl", hash = "sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1"}, + {file = "regex-2024.7.24-cp39-cp39-win_amd64.whl", hash = "sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9"}, + {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, ] [[package]] @@ -2875,23 +2639,22 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "responses" -version = "0.23.1" +version = "0.25.3" description = "A utility library for mocking out the `requests` Python library." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "responses-0.23.1-py3-none-any.whl", hash = "sha256:8a3a5915713483bf353b6f4079ba8b2a29029d1d1090a503c70b0dc5d9d0c7bd"}, - {file = "responses-0.23.1.tar.gz", hash = "sha256:c4d9aa9fc888188f0c673eff79a8dadbe2e75b7fe879dc80a221a06e0a68138f"}, + {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, + {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, ] [package.dependencies] pyyaml = "*" -requests = ">=2.22.0,<3.0" -types-PyYAML = "*" -urllib3 = ">=1.25.10" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-requests"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] [[package]] name = "restructuredtext-lint" @@ -2906,15 +2669,154 @@ files = [ [package.dependencies] docutils = ">=0.11,<1.0" +[[package]] +name = "rpds-py" +version = "0.20.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, + {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, + {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, + {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, + {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, + {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, + {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, + {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, + {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, + {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, + {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, + {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, + {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, + {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, +] + +[[package]] +name = "ruff" +version = "0.5.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, +] + [[package]] name = "s3transfer" -version = "0.10.0" +version = "0.10.2" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e"}, - {file = "s3transfer-0.10.0.tar.gz", hash = "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"}, + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, ] [package.dependencies] @@ -2938,17 +2840,6 @@ files = [ attrs = "*" pbr = "*" -[[package]] -name = "semver" -version = "2.13.0" -description = "Python helper for Semantic Versioning (http://semver.org/)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, - {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, -] - [[package]] name = "send2trash" version = "1.8.3" @@ -2967,19 +2858,19 @@ win32 = ["pywin32"] [[package]] name = "setuptools" -version = "67.6.0" +version = "71.0.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, - {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, + {file = "setuptools-71.0.2-py3-none-any.whl", hash = "sha256:f6640114f96be808024fbd1f721161215543796d3a68da4524349de700604ce8"}, + {file = "setuptools-71.0.2.tar.gz", hash = "sha256:ca359bea0cd5c8ce267d7463239107e87f312f2e2a11b6ca6357565d82b6c0d7"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (<7.4)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -3014,154 +2905,222 @@ files = [ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] +[[package]] +name = "soupsieve" +version = "2.6" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, +] + [[package]] name = "sphinx" -version = "4.5.0" +version = "7.4.7" description = "Python documentation generator" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" files = [ - {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, - {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, ] [package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=1.3" -colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.18" -imagesize = "*" -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} -Jinja2 = ">=2.3" -packaging = "*" -Pygments = ">=2.0" -requests = ">=2.5.0" -snowballstemmer = ">=1.1" +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +snowballstemmer = ">=2.2" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] +lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] -name = "sphinx-github-changelog" -version = "1.3.0" -description = "Build a sphinx changelog from GitHub Releases" +name = "sphinx-basic-ng" +version = "1.0.0b2" +description = "A modern skeleton for Sphinx themes." optional = false -python-versions = ">=3.8,<4.0" +python-versions = ">=3.7" files = [ - {file = "sphinx_github_changelog-1.3.0-py3-none-any.whl", hash = "sha256:eb5424d590ae7866e77b8db7eecf283678cba76b74d90b17bc4f3872976407eb"}, - {file = "sphinx_github_changelog-1.3.0.tar.gz", hash = "sha256:b898adc52131147305b9cb893c2a4cad0ba2912178ed8f88b62bf6f43a2baaa4"}, + {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, + {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, ] [package.dependencies] -docutils = "*" -requests = "*" -Sphinx = "*" +sphinx = ">=4.0" + +[package.extras] +docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] [[package]] -name = "sphinx-rtd-theme" -version = "1.3.0" -description = "Read the Docs theme for Sphinx" +name = "sphinx-copybutton" +version = "0.5.2" +description = "Add a copy button to each of your code cells." optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.7" files = [ - {file = "sphinx_rtd_theme-1.3.0-py2.py3-none-any.whl", hash = "sha256:46ddef89cc2416a81ecfbeaceab1881948c014b1b6e4450b815311a89fb977b0"}, - {file = "sphinx_rtd_theme-1.3.0.tar.gz", hash = "sha256:590b030c7abb9cf038ec053b95e5380b5c70d61591eb0b552063fbe7c41f0931"}, + {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, + {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, ] [package.dependencies] -docutils = "<0.19" -sphinx = ">=1.6,<8" -sphinxcontrib-jquery = ">=4,<5" +sphinx = ">=1.8" [package.extras] -dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] +code-style = ["pre-commit (==2.12.1)"] +rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] [[package]] -name = "sphinx-tabs" -version = "3.4.5" -description = "Tabbed views for Sphinx" +name = "sphinx-design" +version = "0.6.1" +description = "A sphinx extension for designing beautiful, view size responsive web components." optional = false -python-versions = "~=3.7" +python-versions = ">=3.9" files = [ - {file = "sphinx-tabs-3.4.5.tar.gz", hash = "sha256:ba9d0c1e3e37aaadd4b5678449eb08176770e0fc227e769b6ce747df3ceea531"}, - {file = "sphinx_tabs-3.4.5-py3-none-any.whl", hash = "sha256:92cc9473e2ecf1828ca3f6617d0efc0aa8acb06b08c56ba29d1413f2f0f6cf09"}, + {file = "sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c"}, + {file = "sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632"}, +] + +[package.dependencies] +sphinx = ">=6,<9" + +[package.extras] +code-style = ["pre-commit (>=3,<4)"] +rtd = ["myst-parser (>=2,<4)"] +testing = ["defusedxml", "myst-parser (>=2,<4)", "pytest (>=8.3,<9.0)", "pytest-cov", "pytest-regressions"] +testing-no-myst = ["defusedxml", "pytest (>=8.3,<9.0)", "pytest-cov", "pytest-regressions"] +theme-furo = ["furo (>=2024.7.18,<2024.8.0)"] +theme-im = ["sphinx-immaterial (>=0.12.2,<0.13.0)"] +theme-pydata = ["pydata-sphinx-theme (>=0.15.2,<0.16.0)"] +theme-rtd = ["sphinx-rtd-theme (>=2.0,<3.0)"] +theme-sbt = ["sphinx-book-theme (>=1.1,<2.0)"] + +[[package]] +name = "sphinx-github-changelog" +version = "1.4.0" +description = "Build a sphinx changelog from GitHub Releases" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "sphinx_github_changelog-1.4.0-py3-none-any.whl", hash = "sha256:cdf2099ea3e4587ae8637be7ba609738bfdeca4bd80c5df6fc45046735ae5c2f"}, + {file = "sphinx_github_changelog-1.4.0.tar.gz", hash = "sha256:204745e93a1f280e4664977b5fee526b0a011c92ca19c304bd01fd641ddb6393"}, ] [package.dependencies] docutils = "*" -pygments = "*" -sphinx = "*" +requests = "*" +Sphinx = "*" + +[[package]] +name = "sphinx-notfound-page" +version = "1.0.4" +description = "Sphinx extension to build a 404 page with absolute URLs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx_notfound_page-1.0.4-py3-none-any.whl", hash = "sha256:f7c26ae0df3cf3d6f38f56b068762e6203d0ebb7e1c804de1059598d7dd8b9d8"}, + {file = "sphinx_notfound_page-1.0.4.tar.gz", hash = "sha256:2a52f49cd367b5c4e64072de1591cc367714098500abf4ecb9a3ecb4fec25aae"}, +] + +[package.dependencies] +sphinx = ">=5" [package.extras] -code-style = ["pre-commit (==2.13.0)"] -testing = ["bs4", "coverage", "pygments", "pytest (>=7.1,<8)", "pytest-cov", "pytest-regressions", "rinohtype"] +doc = ["sphinx-autoapi", "sphinx-rtd-theme", "sphinx-tabs", "sphinxemoji"] +test = ["tox"] [[package]] name = "sphinxcontrib-apidoc" -version = "0.3.0" +version = "0.5.0" description = "A Sphinx extension for running 'sphinx-apidoc' on each build" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "sphinxcontrib-apidoc-0.3.0.tar.gz", hash = "sha256:729bf592cf7b7dd57c4c05794f732dc026127275d785c2a5494521fdde773fb9"}, - {file = "sphinxcontrib_apidoc-0.3.0-py2.py3-none-any.whl", hash = "sha256:6671a46b2c6c5b0dca3d8a147849d159065e50443df79614f921b42fbd15cb09"}, + {file = "sphinxcontrib-apidoc-0.5.0.tar.gz", hash = "sha256:65efcd92212a5f823715fb95ee098b458a6bb09a5ee617d9ed3dead97177cd55"}, + {file = "sphinxcontrib_apidoc-0.5.0-py3-none-any.whl", hash = "sha256:c671d644d6dc468be91b813dcddf74d87893bff74fe8f1b8b01b69408f0fb776"}, ] [package.dependencies] pbr = "*" -Sphinx = ">=1.6.0" +Sphinx = ">=5.0.0" [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.4" +version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, - {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] +[[package]] +name = "sphinxcontrib-external-links" +version = "0.1.1" +description = "Sphinx extension for easily adding reusable external links." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "sphinxcontrib_external_links-0.1.1-py3-none-any.whl", hash = "sha256:d2ec5572179fa004eeec7b5450d2c0bc3e52022c51871b684a7655069a223064"}, + {file = "sphinxcontrib_external_links-0.1.1.tar.gz", hash = "sha256:1b982e9fb960b2983e87ce64892f28cd0b4fa918560f8e63426641eb16558812"}, +] + +[package.dependencies] +sphinx = ">=7.2,<9" + [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.1" +version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, - {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] @@ -3208,61 +3167,45 @@ Sphinx = ">=1.7.0" [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] -[[package]] -name = "sshpubkeys" -version = "3.3.1" -description = "SSH public key parser" -optional = false -python-versions = ">=3" -files = [ - {file = "sshpubkeys-3.3.1-py2.py3-none-any.whl", hash = "sha256:946f76b8fe86704b0e7c56a00d80294e39bc2305999844f079a217885060b1ac"}, - {file = "sshpubkeys-3.3.1.tar.gz", hash = "sha256:3020ed4f8c846849299370fbe98ff4157b0ccc1accec105e07cfa9ae4bb55064"}, -] - -[package.dependencies] -cryptography = ">=2.1.4" -ecdsa = ">=0.13" - -[package.extras] -dev = ["twine", "wheel", "yapf"] - [[package]] name = "stevedore" -version = "5.0.0" +version = "5.2.0" description = "Manage dynamic plugins for Python applications" optional = false python-versions = ">=3.8" files = [ - {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, - {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, + {file = "stevedore-5.2.0-py3-none-any.whl", hash = "sha256:1c15d95766ca0569cad14cb6272d4d31dae66b011a929d7c18219c176ea1b5c9"}, + {file = "stevedore-5.2.0.tar.gz", hash = "sha256:46b93ca40e1114cea93d738a6c1e365396981bb6bb78c27045b7587c9473544d"}, ] [package.dependencies] @@ -3284,13 +3227,13 @@ mpmath = ">=0.19" [[package]] name = "termcolor" -version = "2.2.0" +version = "2.4.0" description = "ANSI color formatting for output in terminal" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "termcolor-2.2.0-py3-none-any.whl", hash = "sha256:91ddd848e7251200eac969846cbae2dacd7d71c2871e92733289e7e3666f48e7"}, - {file = "termcolor-2.2.0.tar.gz", hash = "sha256:dfc8ac3f350788f23b2947b3e6cfa5a53b630b612e6cd8965a015a776020b99a"}, + {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"}, + {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"}, ] [package.extras] @@ -3334,26 +3277,15 @@ files = [ {file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"}, ] -[[package]] -name = "tomlkit" -version = "0.11.6" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"}, - {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"}, -] - [[package]] name = "troposphere" -version = "4.8.0" +version = "4.8.1" description = "AWS CloudFormation creation library" optional = false python-versions = ">=3.8" files = [ - {file = "troposphere-4.8.0-py3-none-any.whl", hash = "sha256:6d6eb50460aa8bd72a2caf6c41eef79731ac1a202e8e07114948478490da3d8d"}, - {file = "troposphere-4.8.0.tar.gz", hash = "sha256:09edd355157830b2efd156accd3fa195128083bf3cc2db6f610ec653ee3531f0"}, + {file = "troposphere-4.8.1-py3-none-any.whl", hash = "sha256:9a3696327102a1e2fe743cb235beeaf171361d9a828ad6b602d840eb93d9c03b"}, + {file = "troposphere-4.8.1.tar.gz", hash = "sha256:05676e0e7be7b9d05d2a4126777a3f017d8a140d5bb810cab826a905ff4b1b7d"}, ] [package.dependencies] @@ -3373,31 +3305,17 @@ files = [ {file = "types_awscrt-0.16.12.tar.gz", hash = "sha256:fca34b9c30e39567838adc51cc186fae92d702741c7aaf835e19c3eb1bd4d8dc"}, ] -[[package]] -name = "types-pyyaml" -version = "6.0.12.8" -description = "Typing stubs for PyYAML" -optional = false -python-versions = "*" -files = [ - {file = "types-PyYAML-6.0.12.8.tar.gz", hash = "sha256:19304869a89d49af00be681e7b267414df213f4eb89634c4495fa62e8f942b9f"}, - {file = "types_PyYAML-6.0.12.8-py3-none-any.whl", hash = "sha256:5314a4b2580999b2ea06b2e5f9a7763d860d6e09cdf21c0e9561daa9cbd60178"}, -] - [[package]] name = "types-s3transfer" -version = "0.6.0.post6" +version = "0.10.1" description = "Type annotations and code completion for s3transfer" optional = false -python-versions = ">=3.7,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "types_s3transfer-0.6.0.post6-py3-none-any.whl", hash = "sha256:03af77eef369a9901422cadf3ad270709876421d8eb445ddbd78a58ac802a464"}, - {file = "types_s3transfer-0.6.0.post6.tar.gz", hash = "sha256:e05c6d86c2d5271a5170a21f8a0222ca5f2d5b91aad7e9d436cca24c8e1ae13c"}, + {file = "types_s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:49a7c81fa609ac1532f8de3756e64b58afcecad8767933310228002ec7adff74"}, + {file = "types_s3transfer-0.10.1.tar.gz", hash = "sha256:02154cce46528287ad76ad1a0153840e0492239a0887e8833466eccf84b98da0"}, ] -[package.dependencies] -types-awscrt = "*" - [[package]] name = "typing-extensions" version = "4.12.2" @@ -3458,13 +3376,13 @@ files = [ [[package]] name = "wcmatch" -version = "8.4.1" +version = "9.0" description = "Wildcard/glob file name matcher." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "wcmatch-8.4.1-py3-none-any.whl", hash = "sha256:3476cd107aba7b25ba1d59406938a47dc7eec6cfd0ad09ff77193f21a964dee7"}, - {file = "wcmatch-8.4.1.tar.gz", hash = "sha256:b1f042a899ea4c458b7321da1b5e3331e3e0ec781583434de1301946ceadb943"}, + {file = "wcmatch-9.0-py3-none-any.whl", hash = "sha256:af25922e2b6dbd1550fa37a4c8de7dd558d6c1bb330c641de9b907b9776cb3c4"}, + {file = "wcmatch-9.0.tar.gz", hash = "sha256:567d66b11ad74384954c8af86f607857c3bdf93682349ad32066231abd556c92"}, ] [package.dependencies] @@ -3487,104 +3405,6 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog (>=2.3)"] -[[package]] -name = "wheel" -version = "0.42.0" -description = "A built-package format for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, - {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, -] - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - -[[package]] -name = "wrapt" -version = "1.15.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, - {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, - {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, - {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, - {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, - {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, - {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, - {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, - {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, - {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, - {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, - {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, - {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, - {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, - {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, - {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, - {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, - {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, - {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, -] - [[package]] name = "xmltodict" version = "0.13.0" @@ -3616,20 +3436,20 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] [[package]] name = "zipp" -version = "3.15.0" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.13" -content-hash = "0c57981b4583118568443d4438c10a1bac20399d10faac78989c209b16fc904f" +content-hash = "e86930f251e333f695b226a3bb3150a2d3fe23d0243dfd8875ad02e18220e32e" diff --git a/pyproject.toml b/pyproject.toml index a241026b0..92297d14e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,53 +1,53 @@ [tool.poetry] name = "runway" -version = "2.0.0-dev" # do not change -description = "Simplify infrastructure/app testing/deployment" -license = "Apache-2.0" +version = "0.0.0" authors = [ "Onica Group LLC ", ] -maintainers = [ - "Kyle Finley ", "Sam Fakhreddine " -] -readme = "README.md" -homepage = "https://github.com/onicagroup/runway" -repository = "https://github.com/onicagroup/runway" -documentation = "https://docs.onica.com/projects/runway" -keywords = ["cli"] classifiers = [ "Intended Audience :: Developers", - "Topic :: Utilities", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12" + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.9", + "Topic :: Utilities", +] +description = "Simplify infrastructure/app testing/deployment" +documentation = "https://runway.readthedocs.io" +homepage = "https://github.com/onicagroup/runway" +keywords = ["cli"] +license = "Apache-2.0" +maintainers = [ + "Kyle Finley ", + "Sam Fakhreddine ", ] packages = [ - { include = "runway" }, + {include = "runway"}, ] +readme = "README.md" +repository = "https://github.com/onicagroup/runway" [tool.poetry.dependencies] python = ">=3.9, <3.13" - -"backports.cached_property" = { version = "*", python = "<3.8" } awacs = "*" -boto3 = "^1.16" +boto3 = "^1.34.50" cfn-lint = "*" cfn_flip = "^1.2" # only used in runway._cli.commands._gen_sample.utils click = "^8.0" coloredlogs = "*, !=15.0.1" docker = ">=3.0.0" # used in runway.cfngin.hooks +eval-type-backport = {python = "< 3.10", version = "*"} # TODO (kyle): remove when dropping support for python 3.9 formic2 = "*" # only used in runway.cfngin.hooks.aws_lambda gitpython = "*" igittigitt = ">=2.0.5" -importlib-metadata = { version = "*", python = "<3.8" } jinja2 = ">=2.7" # used in runway.cfngin.blueprints.raw packaging = "*" # component of setuptools needed for version compare +pipenv = "2022.1.8" pyOpenSSL = "*" # For embedded hook & associated script usage -pydantic = "^1.4" +pydantic = "^2.8.0" pyhcl = "^0.4" # does not support HCL2, possibly move to extras_require in the future python-hcl2 = ">=3.0.0" pyyaml = ">5.4" @@ -56,56 +56,45 @@ send2trash = "*" tomli = ">=1.2.2" troposphere = ">=2.4, <5" typing_extensions = "*" # only really needed for < 3.8 but can still be used in >= 3.8 -urllib3 = "*" # allow us to follow botocore's hard pinning without needing to update our own yamllint = "*" -pipenv = "2022.1.8" -moto = "3.0.5" -testfixtures = "^7.0.3" -wheel = "^0.42.0" -pyinstaller = "^6.2.0" [tool.poetry.group.dev.dependencies] -black = ">=22.1" -coverage = { version = ">=6.3", extras = ["toml"] } -doc8 = ">=0.10" # for linting with vscode rst extension -dunamai = "^1.5" -flake8 = ">=4.0.1" -flake8-bugbear = ">=21.9.2" # flake8 plugin -flake8-comprehensions = ">=3.7.0" # flake8 plugin -flake8-docstrings = ">=1.6" # flake8 plugin -flake8-print = ">=4.0.0" # flake8 plugin -flake8-use-fstring = ">=1.3" # flake8 plugin -isort = ">=5.12" -mock = ">=4.0" -moto = { version = ">=3.0", extras = ["ec2", "ecs", "iam", "s3", "ssm"] } -pep8-naming = ">=0.12.1" # flake8 plugin -pipenv = "^2022.1.8" # only used in tests -pre-commit = ">=2.14" -pydocstyle = ">=6.1.1" # flake8 plugin -pylint = ">=2.12" -pytest = ">=7.0" -pytest-cov = ">=3.0" # pytest plugin -pytest-mock = ">=3.7" # pytest plugin -pytest-order = ">=1" # pytest plugin -pytest-subprocess = ">=1.4" # pytest plugin -pytest-sugar = ">=0.9" # pytest plugin -pytest-xdist = ">=2.5" # pytest plugin -semver = "^2.13" # only used in .github/scripts/urlshortener -testfixtures = "^7.0.3" # TODO remove use of this dependency - was inherited -tomli-w = ">=1.0" +pre-commit = "^3.8.0" [tool.poetry.group.docs.dependencies] -dunamai = "^1.5" -jsx-lexer = "^1.0" -sphinx = "^4.3" -sphinx-github-changelog = "^1.1" -sphinx-rtd-theme = "^1.0" -sphinx-tabs = "^3.2" -sphinxcontrib-apidoc = "^0.3" +doc8 = "^1.1.1" +furo = "^2024.8.6" +jsx-lexer = "^2.0.1" +sphinx = "^7.4.7" +sphinx-copybutton = "^0.5.2" +sphinx-design = "^0.6.1" +sphinx-github-changelog = "^1.4.0" +sphinx-notfound-page = "^1.0.4" +sphinxcontrib-apidoc = "^0.5.0" +sphinxcontrib-external-links = "^0.1.1" +sphinxcontrib-jquery = "^4.1" sphinxcontrib-programoutput = "^0.17" +[tool.poetry.group.lint.dependencies] +black = "^24.8.0" +ruff = "^0.5.7" + +[tool.poetry.group.test.dependencies] +coverage = {extras = ["toml"], version = "^7.6.1"} +moto = {extras = ["ec2", "ecs", "iam", "s3", "ssm"], version = "^5.0.12"} +pipenv = "^2022.1.8" # only used in tests +pytest = "^8.3.2" +pytest-cov = "^5.0.0" +pytest-mock = "^3.14.0" +pytest-order = "^1.2.1" +pytest-subprocess = "^1.5.2" +pytest-sugar = "^1.0.0" +pytest-xdist = {extras = ["psutil"], version = "^3.6.1"} +testfixtures = "^7.0.3" # TODO (kyle) remove use of this dependency - was inherited +tomli-w = "^1.0.0" + [tool.poetry.group.types.dependencies] -mypy-boto3 = "^1.16" # importable boto3 type annotations +mypy-boto3 = "^1.34.158" # importable boto3 type annotations [tool.poetry.group.types.dependencies.boto3-stubs] extras = [ @@ -126,7 +115,7 @@ extras = [ "ssm", "sts", ] -version = "^1.16" +version = "^1.34.158" [tool.poetry.scripts] runway = "runway._cli.main:cli" @@ -134,11 +123,6 @@ runway = "runway._cli.main:cli" [tool.poetry.urls] "Bug Tracker" = "https://github.com/onicagroup/runway/issues" -[build-system] -requires = ["poetry_core>=1.0.7"] -build-backend = "poetry.core.masonry.api" - - [tool.black] force-exclude = ''' /( @@ -153,214 +137,81 @@ force-exclude = ''' | _build | build | dist - | npm | runway/aws_sso_botocore )/ ''' include = '\.pyi?$' -line-length = 88 -target-version = ["py38", "py39"] - +line-length = 100 +target-version = ["py310", "py311", "py312", "py39"] [tool.coverage.report] exclude_lines = [ + "@overload", "cov: ignore", # standard exclude comment + "from pathlib import Path", "if TYPE_CHECKING:", # excluded blocks "if __name__ == .__main__.:", "raise AssertionError", # defensive exceptions "raise NotImplementedError", - "from pathlib import Path", - "@overload", ] fail_under = 85 precision = 2 show_missing = true - [tool.coverage.run] concurrency = [ "multiprocessing", "thread", ] omit = [ + "*/compat.py", "*/runway/aws_sso_botocore/*", # TODO remove native support is added to botocore "*/runway/cfngin/hooks/staticsite/auth_at_edge/templates/*", - "*/compat.py", "*/type_defs.py", ] -[tool.isort] -profile = "black" -known_local_folder = [ - "jwks_rsa", - "shared", - "update_urls", -] -skip = [ - ".demo", - ".eggs", - ".git", - ".mypy_cache", - ".runway", - ".runway_cache", - ".venv", - "_build", - "build", - "dist", - "integration_tests", - "node_modules", - "venv", -] - - -[tool.pylint.basic] -# http://pylint.pycqa.org/en/latest/technical_reference/features.html#basic-checker -attr-rgx = "([a-z_][a-z0-9_]{2,50}|VARIABLES)$" -# attr-name-hint = "([a-z_][a-z0-9_]{2,50}|VARIABLES)$" -good-names = [ - "_", - "a", - "b", - "ci", - "db", - "f", - "fn", - "fp", - "gb", - "i", - "id", - "j", - "k", - "kb", - "mb", - "ok", - "os", - "ui", - "v", -] - -[tool.pylint.classes] -# http://pylint.pycqa.org/en/latest/technical_reference/features.html#classes-checker -defining-attr-methods = [ - "__init__", - "__new__", - "setUp", -] -exclude-protected=[ - "_asdict", - "_fields", - "_replace", - "_source", - "_make", - "_session", # for boto3.session.Session - "_prompter", - "_client_config", # boto3.client.Client._client_config contains info like region - "_endpoint", # boto3.client.Client._endpoint contains s3 endpoint info - "_validate_props" # called on troposphere resources -] - -[tool.pylint.design] -# http://pylint.pycqa.org/en/latest/technical_reference/features.html#design-checker-options -max-args = 10 -max-attributes = 20 -max-bool-expr = 5 -max-branches = 20 -max-locals = 25 -max-parents = 10 -max-public-methods = 30 -max-returns = 10 -max-statements = 50 -min-public-methods = 0 - -[tool.pylint.format] -# http://pylint.pycqa.org/en/latest/technical_reference/features.html#format-checker -max-line-length = 120 -max-module-lines = 1000 - -[tool.pylint.imports] -# http://pylint.pycqa.org/en/latest/technical_reference/features.html#imports-checker -allow-wildcard-with-all = "no" - -[tool.pylint.logging] -# http://pylint.pycqa.org/en/latest/technical_reference/features.html#logging-checker -logging-format-style = "old" # TODO update to new - -[tool.pylint.master] -# http://pylint.pycqa.org/en/latest/technical_reference/features.html#general-options -extension-pkg-whitelist = [ - "pydantic", # https://github.com/samuelcolvin/pydantic/issues/992#issuecomment-553545180 -] -ignore-patterns = [ - ".+py[ci]$", -] -jobs = 0 - -[tool.pylint.miscellaneous] -# http://pylint.pycqa.org/en/latest/technical_reference/features.html#miscellaneous-checker -notes = ["FIXME"] - -[tool.pylint.message_control] -# http://pylint.pycqa.org/en/latest/technical_reference/features.html#messages-control-options -disable = [ - "line-too-long", # flake8 overlap - "missing-class-docstring", # flake8 (pydocstyle) overlap - "missing-function-docstring", # flake8 (pydocstyle) overlap - "missing-module-docstring", # flake8 (pydocstyle) overlap - "similarities", # black overcomplicated this - "ungrouped-imports", # false positive when using TYPE_CHECKING; isort should cover this - "unused-import", # flake8 overlap (F401) - "broad-exception-raised", - "missing-timeout" -] - -[tool.pylint.typecheck] -# http://pylint.pycqa.org/en/latest/technical_reference/features.html#typecheck-checker -ignored-classes = [ - "runway.config.ConfigComponent", - "runway.utils.MutableMap", -] -ignored-modules = ["_typeshed", "distutils"] - +[tool.poetry-dynamic-versioning] # poetry self add "poetry-dynamic-versioning[plugin]" +bump = true +enable = true +fix-shallow-repository = true +metadata = false +strict = true +style = "pep440" [tool.pyright] exclude = [ - "**/__pycache__", "**/.demo", "**/.eggs", "**/.git", "**/.runway", "**/.venv", + "**/__pycache__", "**/docs", "**/node_modules", "**/quickstarts", - "**/typings", "**/runway/aws_sso_botocore", "**/runway/cfngin/hooks/staticsite/auth_at_edge/templates", "**/runway/templates/cdk-py", "**/tests/functional/cfngin/test_aws_lambda_hook/lambda_src", - "**/tests/unit" -] -extraPaths = [ - "./.github/scripts/urlshortener", + "**/tests/unit", + "**/typings", ] pythonPlatform = "All" pythonVersion = "3.9" reportDuplicateImport = "none" reportImportCycles = "none" -reportIncompatibleMethodOverride = "warning" reportMissingTypeStubs = "none" reportPrivateUsage = "none" reportUnknownMemberType = "none" reportUnnecessaryIsInstance = "warning" +reportUnnecessaryTypeIgnoreComment = "error" reportUnusedImport = "none" reportUnusedVariable = "none" -reportWildcardImportFromLibrary = "none" strictParameterNoneValue = false typeCheckingMode = "strict" useLibraryCodeForTypes = true venv = ".venv" - [tool.pytest.ini_options] addopts = [ "--cov-config=pyproject.toml", @@ -378,3 +229,129 @@ python_classes = ["Test*"] python_files = ["test_*.py"] python_functions = ["test_*"] testpaths = ["tests"] + +[tool.ruff] # https://docs.astral.sh/ruff/settings/#top-level +extend-exclude = [ + "runway/aws_sso_botocore", # NOTE (kyle): ignoring vendored code + "runway/cfngin/hooks/staticsite/auth_at_edge/templates", # TODO (kyle): resolve lint error + "typings", +] +force-exclude = true +line-length = 120 +show-fixes = true +target-version = "py39" # important to set before applying fixes + +[tool.ruff.lint] # https://docs.astral.sh/ruff/settings/#lint +extend-safe-fixes = [ + "UP006", + "UP007", + "UP038", + "UP040", +] +ignore = [ + "ANN101", # Missing type annotation for `self` in method + "ANN102", # Missing type annotation for `cls` in classmethod + "ANN401", # Dynamically typed expressions (typing.Any) are disallowed # TODO (kyle): improve type annotations + "COM812", # Trailing comma missing + "D203", # 1 blank line required before class docstring + "D213", # Multi-line docstring summary should start at the second line + "D215", # Section underline is over-indented + "D403", # First word of the first line should be capitalized + "D406", # Section name should end with a newline + "D407", # Missing dashed underline after section + "D408", # Section underline should be in the line following the section's name + "D409", # Section underline should match the length of its name + "DTZ", # flake8-datetimez # NOTE (kyle): this is fine here + "EM", # flake8-errmsg + "ERA001", # Found commented-out code # NOTE (kyle): incorrectly detects cspell + "FA100", # Missing `from __future__ import annotations`, but uses `typing.Optional` + "FBT001", # Boolean positional arg in function definition + "FBT002", # Boolean default value in function definition + "FBT003", # Boolean positional value in function call + "FIX002", # Line contains TODO + "N818", # Exception name should be named with an Error suffix # TODO (kyle): resolve in next major release + "PERF203", # `try`-`except` within a loop incurs performance overhead + "PGH003", # Use specific rule codes when ignoring type issues # TODO (kyle): resolve this eventually + "RUF012", # TODO (kyle): remove when resolved - https://github.com/astral-sh/ruff/issues/5243 + "S105", # (hardcoded-password-string) Possible hardcoded password + "S106", # (hardcoded-password-func-arg) Possible hardcoded password + "S107", # (hardcoded-password-default) Possible hardcoded password + "S108", # Probable insecure usage of temporary file or directory + "S301", # `pickle` and modules that wrap it can be unsafe when used to deserialize untrusted data + "S60", # flake8-bandit # NOTE (kyle): most of these are for subprocess which we don't care about right now + "S604", # Function call with `shell=True` parameter identified # NOTE (kyle): required for runway + "TD003", # Missing issue link on the line following this TODO + "TID252", # Relative imports from parent modules are banned + "TRY", # tryceratops +] +select = ["ALL"] + +[tool.ruff.lint.extend-per-file-ignores] # https://docs.astral.sh/ruff/settings/#lintextend-per-file-ignores +"*.py" = [ + "PYI024", # Use `typing.NamedTuple` instead of `collections.namedtuple` # NOTE (kyle): should only apply to pyi +] +"runway/templates/*" = [ + "N999", # Invalid module name # NOTE (kyle): these are fine here +] +"tests/*" = [ + "PT004", # Fixture does not return anything, add leading underscore + "S101", # Use of `assert` detected # NOTE (kyle): this is fine here + "SLF001", # Private member accessed # NOTE (kyle): fine in tests +] + +[tool.ruff.lint.flake8-annotations] # https://docs.astral.sh/ruff/settings/#lintflake8-annotations +allow-star-arg-any = true + +[tool.ruff.lint.flake8-pytest-style] # https://docs.astral.sh/ruff/settings/#lintflake8-pytest-style +parametrize-names-type = "csv" # TODO (kyle): update tests to remove the need for this + +[tool.ruff.lint.flake8-self] +ignore-names = [ + "_Environ", + "_Hash", + "_session", +] + +[tool.ruff.lint.flake8-type-checking] # https://docs.astral.sh/ruff/settings/#lint_flake8-type-checking_runtime-evaluated-base-classes +runtime-evaluated-base-classes = [ + "pydantic.BaseModel", + "pydantic.BeforeValidator", + "runway.cfngin.hooks.base.HookArgsBaseModel", + "runway.config.models.base.ConfigProperty", + "runway.utils.BaseModel", +] + +[tool.ruff.lint.isort] # https://docs.astral.sh/ruff/settings/#lintisort +known-local-folder = [ + "jwks_rsa", + "shared", + "update_urls", +] +known-third-party = [ + "docker", # NOTE (kyle): the `docker/` directory confuses isort +] + +[tool.ruff.lint.pydocstyle] # https://docs.astral.sh/ruff/settings/#lintpydocstyle +convention = "google" + +[tool.ruff.lint.pylint] # https://docs.astral.sh/ruff/settings/#lintpylint +allow-magic-value-types = ["bytes", "int", "str"] +max-args = 15 +max-returns = 10 +max-statements = 50 + +[tool.ruff.lint.pyupgrade] # https://docs.astral.sh/ruff/settings/#pyupgrade-keep-runtime-typing +keep-runtime-typing = true # TODO (kyle): remove when dropping support for python 3.9 + +[tool.tomlsort] +all = true +in_place = true +sort_first = ["tool", "tool.poetry"] +spaces_before_inline_comment = 2 +trailing_comma_inline_array = true +overrides."tool.poetry".first = ["name", "version"] +overrides."tool.poetry.dependencies".first = ["python"] + +[build-system] +build-backend = "poetry_dynamic_versioning.backend" +requires = ["poetry-core", "poetry-dynamic-versioning>=1.2.0,<2.0.0"] diff --git a/quickstarts/conduit/pyproject.toml b/quickstarts/conduit/pyproject.toml index 8062507ee..3c647bd2e 100644 --- a/quickstarts/conduit/pyproject.toml +++ b/quickstarts/conduit/pyproject.toml @@ -1,10 +1,10 @@ [tool.poetry] name = "runway-quickstart-conduit" version = "0.0.0" -description = "Runway Quickstart" authors = [ "Onica Group LLC ", ] +description = "Runway Quickstart" license = "Apache-2.0" [tool.poetry.dependencies] @@ -14,5 +14,5 @@ python = "^3.9" runway = "^2.0" [build-system] -requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" +requires = ["poetry-core>=1.0.0"] diff --git a/quickstarts/conduit/update_env_endpoint.py b/quickstarts/conduit/update_env_endpoint.py index d4e527d30..43fba72fe 100755 --- a/quickstarts/conduit/update_env_endpoint.py +++ b/quickstarts/conduit/update_env_endpoint.py @@ -9,31 +9,23 @@ STACK_PREFIX = "realworld-" -def update_api_endpoint(): +def update_api_endpoint() -> None: """Update app environment file with backend endpoint.""" - environment = ( - subprocess.check_output(["poetry", "run", "runway", "whichenv"]) - .decode() - .strip() - ) - environment_file = os.path.join( - os.path.dirname(os.path.realpath(__file__)), + environment = subprocess.check_output(["poetry", "run", "runway", "whichenv"]).decode().strip() + environment_file = os.path.join( # noqa: PTH118 + os.path.dirname(os.path.realpath(__file__)), # noqa: PTH120 "src", "environments", "environment.prod.ts" if environment == "prod" else "environment.ts", ) cloudformation = boto3.resource("cloudformation") stack = cloudformation.Stack(STACK_PREFIX + environment) - endpoint = [ - i["OutputValue"] for i in stack.outputs if i["OutputKey"] == "ServiceEndpoint" - ][0] + endpoint = next(i["OutputValue"] for i in stack.outputs if i["OutputKey"] == "ServiceEndpoint") - with open(environment_file, "r") as stream: + with open(environment_file) as stream: # noqa: PTH123 content = stream.read() - content = re.sub( - r"api_url: \'.*\'$", f"api_url: '{endpoint}/api'", content, flags=re.M - ) - with open(environment_file, "w") as stream: + content = re.sub(r"api_url: \'.*\'$", f"api_url: '{endpoint}/api'", content, flags=re.MULTILINE) + with open(environment_file, "w") as stream: # noqa: PTH123 stream.write(content) diff --git a/quickstarts/runway/Dockerfile b/quickstarts/runway/Dockerfile index 046d7181f..ae431d2e2 100644 --- a/quickstarts/runway/Dockerfile +++ b/quickstarts/runway/Dockerfile @@ -1,22 +1,35 @@ -FROM ubuntu:bionic -MAINTAINER Onica Group LLC +FROM ubuntu:24.04 -RUN set -xe && \ - apt-get update && \ - apt-get -y install \ - curl \ - git \ - nano \ - npm \ - python-pip \ - unzip \ - uuid-runtime \ - vim && \ - rm -rf /var/lib/apt/lists/* && \ - update-alternatives --install /usr/bin/node node /usr/bin/nodejs 10 && \ - npm install npm@latest -g && \ - curl -L oni.ca/runway/latest/linux -o runway && \ - chmod +x runway && \ - mv runway /usr/local/bin +ARG PYTHON_VERSION="3.12.5" + +SHELL ["/bin/bash", "--login", "-o", "pipefail", "-c"] + +RUN set -ex; \ + apt-get update; \ + apt-get install -y build-essential libssl-dev zlib1g-dev \ + libbz2-dev libreadline-dev libsqlite3-dev curl git \ + libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev\ + npm; \ + rm -rf /var/lib/apt/lists/*; + +ENV PYTHON_VERSION="${PYTHON_VERSION}" +ENV PYENV_ROOT="/usr/local/share/.pyenv" +ENV PATH="$PYENV_ROOT/bin:$PATH" + +# Install python +RUN set -ex; \ + curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash; \ + PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install "${PYTHON_VERSION}"; \ + pyenv global "${PYTHON_VERSION}"; \ + "${PYENV_ROOT}/versions/${PYTHON_VERSION}/bin/pip" install --disable-pip-version-check --upgrade pip setuptools wheel; \ + "${PYENV_ROOT}/versions/${PYTHON_VERSION}/bin/pip" install pipx; \ + mkdir ~/.profile.d; \ + echo 'eval "$(pyenv init -)";' >> ~/.profile.d/001-pyenv.sh; \ + echo 'if [ -d ~/.profile.d ]; then for rc in ~/.profile.d/*.sh; do if [ -f "$rc" ]; then . "$rc"; fi done fi; unset rc;' > ~/.profile + +# Install runway - it is recommend to install poetry instad and use poetry to install runway per-project +RUN set -ex; \ + pipx ensurepath; \ + pipx install runway CMD ["bash"] diff --git a/quickstarts/runway/runway-quickstart.yml b/quickstarts/runway/runway-quickstart.yml index 2fff4f7ac..1276ba9fb 100644 --- a/quickstarts/runway/runway-quickstart.yml +++ b/quickstarts/runway/runway-quickstart.yml @@ -10,31 +10,22 @@ Parameters: - Linux KeyPair: - Description: "The existing EC2 KeyPair to be used to access the Runway \ - instance" + Description: "The existing EC2 KeyPair to be used to access the Runway instance" Type: AWS::EC2::KeyPair::KeyName SourceIP: - Description: "The egress (public) IPv4 address from which you plan to \ - access your Runway instance. Hint- https://whatismyip.com . \ - Specify address only, do not include /CIDR designator, \ - example 157.123.231.123" + Description: "The egress (public) IPv4 address from which you plan to access your Runway instance. Hint- https://whatismyip.com . Specify address only, do not include /CIDR designator, example 157.123.231.123" Type: String IamRole: Type: String - Description: "Choose 'auto/admin' to have this CloudFormation template \ - deploy an ADMIN IAM Role for Runway to use to call AWS \ - services. Choose 'manual' to specify an existing IAM Role \ - with more restrictive permissions." + Description: "Choose 'auto/admin' to have this CloudFormation template deploy an ADMIN IAM Role for Runway to use to call AWS services. Choose 'manual' to specify an existing IAM Role with more restrictive permissions." AllowedValues: - auto/admin - manual IamRoleName: - Description: "If you chose 'manual' for IamRole, specify the name of an \ - existing IAM Role here, otherwise leave as the default value \ - of 'none'" + Description: "If you chose 'manual' for IamRole, specify the name of an existing IAM Role here, otherwise leave as the default value of 'none'" Type: String Default: none @@ -209,7 +200,7 @@ Resources: Properties: VpcId: !Ref VPC CidrBlock: 10.0.0.0/24 - AvailabilityZone: !Select ['0', !GetAZs {Ref: 'AWS::Region'}] + AvailabilityZone: !Select ['0', !GetAZs Ref: 'AWS::Region'] PublicRouteTable: Type: AWS::EC2::RouteTable @@ -266,20 +257,17 @@ Resources: AssumeRolePolicyDocument: Version: 2012-10-17 Statement: - - - Effect: Allow + - Effect: Allow Principal: Service: - ec2.amazonaws.com Action: - sts:AssumeRole Policies: - - - PolicyName: RunwayIamRolePolicy + - PolicyName: RunwayIamRolePolicy PolicyDocument: Version: 2012-10-17 Statement: - - - Effect: Allow + - Effect: Allow Action: '*' Resource: '*' diff --git a/runway.file.spec b/runway.file.spec deleted file mode 100644 index 3b8b79db0..000000000 --- a/runway.file.spec +++ /dev/null @@ -1,139 +0,0 @@ -"""pyinstaller spec file to build a single-binary distribution of runway. - -This file should be considered a python file and linted as such. - -""" -# pylint: disable=undefined-variable,wrong-import-order,invalid-name -# pylint: disable=wrong-import-position,import-self -import os -import pkgutil -from pkg_resources import get_distribution, get_entry_info - -from PyInstaller.utils.hooks import collect_data_files, copy_metadata - -# distutils not included with virtualenv < 20 so we have to import it here -# can be removed once we can upgrade virtualenv and pyinstaller -import distutils - -if getattr(distutils, "distutils_path", "").endswith("__init__.py"): - distutils.distutils_path = os.path.dirname(distutils.distutils_path) - -CLI_PATH = os.path.join(os.path.dirname(os.path.dirname(workpath)), "runway") # noqa - - -def get_submodules(package): - """Get submodules of a package to add to hiddenimports. - - Package must be installed and imported for this to be used. - - This is needed for dependencies that do not have a - native pyinstaller hook. This may not find everything that - needs to be included. - - Args: - package: An import package to inspect. - - Returns: - List of submodules. - - """ - return [ - name - for _, name, _ in pkgutil.walk_packages( - path=package.__path__, prefix=package.__name__ + ".", onerror=lambda x: None - ) - ] - - -def Entrypoint(dist, group, name, **kwargs): # noqa - """Get entrypoint info for packages using setuptools.""" - ep = get_entry_info(dist, group, name) - # script name must not be a valid module name to avoid name clashes on import - script_path = os.path.join(workpath, name + "-script.py") # noqa: F821 - print("creating script for entry point", dist, group, name) - with open(script_path, "w") as fh: - print("import", ep.module_name, file=fh) - print("%s.%s()" % (ep.module_name, ".".join(ep.attrs)), file=fh) - - return Analysis([script_path] + kwargs.get("scripts", []), **kwargs) # noqa: F821 - - -# files that are not explicitly imported but consumed at runtime -# need to be included as data_files. -data_files = [ - (os.path.join(CLI_PATH, "blueprints"), "./runway/blueprints"), - (os.path.join(CLI_PATH, "cfngin/hooks"), "./runway/cfngin/hooks"), - (os.path.join(CLI_PATH, "templates"), "./runway/templates"), -] -data_files.append( - ("{}/yamllint/conf".format(get_distribution("yamllint").location), "yamllint/conf/") -) -data_files.append( - ("{}/cfnlint/rules".format(get_distribution("cfn-lint").location), "cfnlint/rules/") -) -data_files.append( - ("{}/botocore/data".format(get_distribution("botocore").location), "botocore/data/") -) -data_files.extend(collect_data_files("cfnlint")) -data_files.extend(collect_data_files("distutils")) -data_files.extend(collect_data_files("hcl2")) -data_files.extend(collect_data_files("pip")) -data_files.extend(collect_data_files("wheel")) -data_files.append(copy_metadata("runway")[0]) # support scm version - -# pyinstaller is not able to find dependencies of dependencies -# unless a hook already exists for pyinstaller so we have to -# add their dependencies here. -hiddenimports = [] -# these packages do not have pyinstaller hooks so we need to import -# them to collect a list of submodules to include as hidden imports. -import runway # noqa -import troposphere # noqa -import awacs # noqa -import botocore # noqa -import pip # noqa -import wheel # noqa -import yamllint # noqa -import cfnlint # noqa - -hiddenimports.extend(get_submodules(runway)) -hiddenimports.extend(get_submodules(troposphere)) -hiddenimports.extend(get_submodules(awacs)) -hiddenimports.extend(get_submodules(botocore)) -hiddenimports.extend(get_submodules(pip)) -hiddenimports.extend(get_submodules(wheel)) -hiddenimports.extend(get_submodules(distutils)) -hiddenimports.extend(get_submodules(yamllint)) -hiddenimports.extend(get_submodules(cfnlint)) -# needed due to pkg_resources dropping python2 support -# can be removed on the next pyinstaller release -# https://github.com/pypa/setuptools/issues/1963#issuecomment-582084099 -hiddenimports.append("pkg_resources.py2_warn") - -a = Entrypoint( - "runway", - "console_scripts", - "runway", - pathex=[CLI_PATH], - datas=data_files, - hiddenimports=hiddenimports, - win_no_prefer_redirects=False, - win_private_assemblies=False, - cipher=None, - noarchive=False, - binaries=[], -) -pyz = PYZ(a.pure, a.zipped_data, cipher=None) # noqa: F821 -exe = EXE( - pyz, # noqa: F821 - a.scripts, - a.binaries, - a.zipfiles, - a.datas, - [], - name="runway", - strip=False, - upx=True, - runtime_tmpdir=None, - console=True, -) diff --git a/runway.folder.spec b/runway.folder.spec deleted file mode 100644 index 16de8ac5c..000000000 --- a/runway.folder.spec +++ /dev/null @@ -1,147 +0,0 @@ -"""pyinstaller spec file to build a single-binary distribution of runway. - -This file should be considered a python file and linted as such. - -""" -# pylint: disable=undefined-variable,wrong-import-order,invalid-name -# pylint: disable=wrong-import-position,import-self -import os -import pkgutil -from pkg_resources import get_distribution, get_entry_info - -from PyInstaller.utils.hooks import collect_data_files, copy_metadata - -# distutils not included with virtualenv < 20 so we have to import it here -# can be removed once we can upgrade virtualenv and pyinstaller -import distutils - -if getattr(distutils, "distutils_path", "").endswith("__init__.py"): - distutils.distutils_path = os.path.dirname(distutils.distutils_path) - -CLI_PATH = os.path.join(os.path.dirname(os.path.dirname(workpath)), "runway") # noqa - - -def get_submodules(package): - """Get submodules of a package to add to hiddenimports. - - Package must be installed and imported for this to be used. - - This is needed for dependencies that do not have a - native pyinstaller hook. This may not find everything that - needs to be included. - - Args: - package: An import package to inspect. - - Returns: - List of submodules. - - """ - return [ - name - for _, name, _ in pkgutil.walk_packages( - path=package.__path__, prefix=package.__name__ + ".", onerror=lambda x: None - ) - ] - - -def Entrypoint(dist, group, name, **kwargs): # noqa - """Get entrypoint info for packages using setuptools.""" - ep = get_entry_info(dist, group, name) - # script name must not be a valid module name to avoid name clashes on import - script_path = os.path.join(workpath, name + "-script.py") # noqa: F821 - print("creating script for entry point", dist, group, name) - with open(script_path, "w") as fh: - print("import", ep.module_name, file=fh) - print("%s.%s()" % (ep.module_name, ".".join(ep.attrs)), file=fh) - - return Analysis([script_path] + kwargs.get("scripts", []), **kwargs) # noqa: F821 - - -# files that are not explicitly imported but consumed at runtime -# need to be included as data_files. -data_files = [ - (os.path.join(CLI_PATH, "blueprints"), "./runway/blueprints"), - (os.path.join(CLI_PATH, "cfngin/hooks"), "./runway/cfngin/hooks"), - (os.path.join(CLI_PATH, "templates"), "./runway/templates"), -] -data_files.append( - ("{}/yamllint/conf".format(get_distribution("yamllint").location), "yamllint/conf/") -) -data_files.append( - ("{}/cfnlint/rules".format(get_distribution("cfn-lint").location), "cfnlint/rules/") -) -data_files.append( - ("{}/botocore/data".format(get_distribution("botocore").location), "botocore/data/") -) -data_files.extend(collect_data_files("cfnlint")) -data_files.extend(collect_data_files("distutils")) -data_files.extend(collect_data_files("hcl2")) -data_files.extend(collect_data_files("pip")) -data_files.extend(collect_data_files("wheel")) -data_files.append(copy_metadata("runway")[0]) # support scm version - -# pyinstaller is not able to find dependencies of dependencies -# unless a hook already exists for pyinstaller so we have to -# add their dependencies here. -hiddenimports = [] -# these packages do not have pyinstaller hooks so we need to import -# them to collect a list of submodules to include as hidden imports. -import runway # noqa -import troposphere # noqa -import awacs # noqa -import botocore # noqa -import pip # noqa -import wheel # noqa -import yamllint # noqa -import cfnlint # noqa - -hiddenimports.extend(get_submodules(runway)) -hiddenimports.extend(get_submodules(troposphere)) -hiddenimports.extend(get_submodules(awacs)) -hiddenimports.extend(get_submodules(botocore)) -hiddenimports.extend(get_submodules(pip)) -hiddenimports.extend(get_submodules(wheel)) -hiddenimports.extend(get_submodules(distutils)) -hiddenimports.extend(get_submodules(yamllint)) -hiddenimports.extend(get_submodules(cfnlint)) -# needed due to pkg_resources dropping python2 support -# can be removed on the next pyinstaller release -# https://github.com/pypa/setuptools/issues/1963#issuecomment-582084099 -hiddenimports.append("pkg_resources.py2_warn") - -a = Entrypoint( - "runway", - "console_scripts", - "runway", - pathex=[CLI_PATH], - datas=data_files, - hiddenimports=hiddenimports, - win_no_prefer_redirects=False, - win_private_assemblies=False, - cipher=None, - noarchive=False, - binaries=[], -) -pyz = PYZ(a.pure, a.zipped_data, cipher=None) # noqa: F821 -exe = EXE( - pyz, # noqa: F821 - a.scripts, - [], - exclude_binaries=True, - # for some reason pyinstaller won't create the correct dir - # structure if this is the same name as a dir used in datas - name="runway-cli", - strip=False, - upx=True, - console=True, -) -coll = COLLECT( - exe, # noqa: F821 - a.binaries, - a.zipfiles, - a.datas, - name="runway", - strip=False, - upx=True, -) diff --git a/runway/__init__.py b/runway/__init__.py index aeda2c1bb..8f613a465 100644 --- a/runway/__init__.py +++ b/runway/__init__.py @@ -1,20 +1,23 @@ """Set package version.""" +from __future__ import annotations + import logging -import sys from ._logging import LogLevels, RunwayLogger # noqa: F401 logging.setLoggerClass(RunwayLogger) -if sys.version_info < (3, 8): - # importlib.metadata is standard lib for python>=3.8, use backport - from importlib_metadata import PackageNotFoundError, version # type: ignore -else: - from importlib.metadata import PackageNotFoundError, version # type: ignore - -try: - __version__ = version(__name__) -except PackageNotFoundError: - # package is not installed - __version__ = "0.0.0" +__version__: str = "0.0.0" +"""Version of the Python package presented as a :class:`string`. + +Dynamically set upon release by `poetry-dynamic-versioning `__. + +""" + +__version_tuple__: tuple[int, int, int] | tuple[int, int, int, str] = (0, 0, 0) +"""Version of the Python package presented as a :class:`tuple`. + +Dynamically set upon release by `poetry-dynamic-versioning `__. + +""" diff --git a/runway/_cli/commands/_deploy.py b/runway/_cli/commands/_deploy.py index 7c20ecde7..3498ab03b 100644 --- a/runway/_cli/commands/_deploy.py +++ b/runway/_cli/commands/_deploy.py @@ -2,7 +2,7 @@ # docs: file://./../../../docs/source/commands.rst import logging -from typing import Any, Tuple +from typing import Any import click from pydantic import ValidationError @@ -23,7 +23,7 @@ @options.tags @options.verbose @click.pass_context -def deploy(ctx: click.Context, debug: bool, tags: Tuple[str, ...], **_: Any) -> None: +def deploy(ctx: click.Context, debug: bool, tags: tuple[str, ...], **_: Any) -> None: """Deploy infrastructure as code. \b diff --git a/runway/_cli/commands/_destroy.py b/runway/_cli/commands/_destroy.py index 8a2a14a8c..ddc8802e1 100644 --- a/runway/_cli/commands/_destroy.py +++ b/runway/_cli/commands/_destroy.py @@ -2,7 +2,7 @@ # docs: file://./../../../docs/source/commands.rst import logging -from typing import Any, Tuple +from typing import Any import click from pydantic import ValidationError @@ -23,7 +23,7 @@ @options.tags @options.verbose @click.pass_context -def destroy(ctx: click.Context, debug: bool, tags: Tuple[str, ...], **_: Any) -> None: +def destroy(ctx: click.Context, debug: bool, tags: tuple[str, ...], **_: Any) -> None: """Destroy infrastructure as code. \b diff --git a/runway/_cli/commands/_docs.py b/runway/_cli/commands/_docs.py index b6a40e321..9c4e8f8f6 100644 --- a/runway/_cli/commands/_docs.py +++ b/runway/_cli/commands/_docs.py @@ -15,7 +15,7 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__.replace("._", "."))) -DOCS_URL = "https://docs.onica.com/projects/runway/" +DOCS_URL = "https://runway.readthedocs.io/" @click.command("docs", short_help="open doc site") diff --git a/runway/_cli/commands/_envvars.py b/runway/_cli/commands/_envvars.py index ebf939d2f..1b5cda697 100644 --- a/runway/_cli/commands/_envvars.py +++ b/runway/_cli/commands/_envvars.py @@ -4,7 +4,7 @@ import logging import os import platform -from typing import TYPE_CHECKING, Any, Dict, cast +from typing import TYPE_CHECKING, Any, cast import click from pydantic import ValidationError @@ -41,9 +41,7 @@ def envvars(ctx: click.Context, debug: bool, **_: Any) -> None: ctx.obj.env.ci = True LOGGER.verbose("forced Runway to non-interactive mode to suppress prompts") try: - env_vars = Runway( - ctx.obj.runway_config, ctx.obj.get_runway_context() - ).get_env_vars() + env_vars = Runway(ctx.obj.runway_config, ctx.obj.get_runway_context()).get_env_vars() except ValidationError as err: LOGGER.error(err, exc_info=debug) ctx.exit(1) @@ -58,7 +56,7 @@ def envvars(ctx: click.Context, debug: bool, **_: Any) -> None: print_env_vars(env_vars) -def print_env_vars(env_vars: Dict[str, Any]) -> None: +def print_env_vars(env_vars: dict[str, Any]) -> None: """Print environment variables.""" if platform.system() == "Windows": if os.getenv("MSYSTEM", "").startswith("MINGW"): @@ -67,14 +65,14 @@ def print_env_vars(env_vars: Dict[str, Any]) -> None: return __print_env_vars_posix(env_vars) -def __print_env_vars_posix(env_vars: Dict[str, Any]) -> None: +def __print_env_vars_posix(env_vars: dict[str, Any]) -> None: """Print environment variables for bash.""" LOGGER.debug("using posix formatting for environment variable export") for key, val in env_vars.items(): click.echo(f'export {key}="{val}"') -def __print_env_vars_psh(env_vars: Dict[str, Any]) -> None: +def __print_env_vars_psh(env_vars: dict[str, Any]) -> None: """Print environment variables for Powershell.""" LOGGER.debug("using powershell formatting for environment variable export") for key, val in env_vars.items(): diff --git a/runway/_cli/commands/_gen_sample/_k8s_cfn_repo.py b/runway/_cli/commands/_gen_sample/_k8s_cfn_repo.py index da153f799..302f45828 100644 --- a/runway/_cli/commands/_gen_sample/_k8s_cfn_repo.py +++ b/runway/_cli/commands/_gen_sample/_k8s_cfn_repo.py @@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Any, cast import click -from cfn_flip import to_yaml +from cfn_flip import to_yaml # pyright: ignore[reportUnknownVariableType] from ....blueprints.k8s.k8s_iam import Iam from ....blueprints.k8s.k8s_master import Cluster diff --git a/runway/_cli/commands/_gen_sample/_k8s_flux_repo.py b/runway/_cli/commands/_gen_sample/_k8s_flux_repo.py index 75d6959fd..3c7f15981 100644 --- a/runway/_cli/commands/_gen_sample/_k8s_flux_repo.py +++ b/runway/_cli/commands/_gen_sample/_k8s_flux_repo.py @@ -42,9 +42,7 @@ def k8s_flux_repo(ctx: click.Context, **_: Any) -> None: copy_sample(ctx, tfstate_src_dir, dest / tfstate_src_dir.parts[-1]) tfstate_templates_dir = dest / "tfstate.cfn/templates" tfstate_templates_dir.mkdir() - write_tfstate_template( - tfstate_templates_dir / "tf_state.yml", bucket_deletion_policy="Delete" - ) + write_tfstate_template(tfstate_templates_dir / "tf_state.yml", bucket_deletion_policy="Delete") LOGGER.success("Sample k8s infrastructure repo created at %s", dest) LOGGER.notice("See the README for setup and deployment instructions.") diff --git a/runway/_cli/commands/_gen_sample/_k8s_tf_repo.py b/runway/_cli/commands/_gen_sample/_k8s_tf_repo.py index 3e9907ee9..c406ed618 100644 --- a/runway/_cli/commands/_gen_sample/_k8s_tf_repo.py +++ b/runway/_cli/commands/_gen_sample/_k8s_tf_repo.py @@ -35,9 +35,7 @@ def k8s_tf_repo(ctx: click.Context, **_: Any) -> None: tfstate_dir = dest / "tfstate.cfn/templates" tfstate_dir.mkdir() - write_tfstate_template( - tfstate_dir / "tf_state.yml", bucket_deletion_policy="Delete" - ) + write_tfstate_template(tfstate_dir / "tf_state.yml", bucket_deletion_policy="Delete") LOGGER.success("Sample k8s infrastructure repo created at %s", dest) LOGGER.notice("See the README for setup and deployment instructions.") diff --git a/runway/_cli/commands/_gen_sample/_tf.py b/runway/_cli/commands/_gen_sample/_tf.py index a6577edac..35129e483 100644 --- a/runway/_cli/commands/_gen_sample/_tf.py +++ b/runway/_cli/commands/_gen_sample/_tf.py @@ -22,7 +22,7 @@ @options.no_color @options.verbose @click.pass_context -def tf(ctx: click.Context, **_: Any) -> None: # pylint: disable=invalid-name +def tf(ctx: click.Context, **_: Any) -> None: """Generate a sample Terraform project.""" src = TEMPLATES / "terraform" dest = Path.cwd() / "sampleapp.tf" diff --git a/runway/_cli/commands/_gen_sample/utils.py b/runway/_cli/commands/_gen_sample/utils.py index fd80fc191..8236b73ec 100644 --- a/runway/_cli/commands/_gen_sample/utils.py +++ b/runway/_cli/commands/_gen_sample/utils.py @@ -5,7 +5,7 @@ from pathlib import Path import click -from cfn_flip import to_yaml +from cfn_flip import to_yaml # pyright: ignore[reportUnknownVariableType] from typing_extensions import Literal from ....blueprints.tf_state import TfState @@ -20,7 +20,7 @@ def convert_gitignore(src: Path) -> Path: """Rename a gitignore template. Keyword Args: - Path object for source file. + src: Path object for source file. Returns: The renamed file if it was created. diff --git a/runway/_cli/commands/_init.py b/runway/_cli/commands/_init.py index a4e08af90..a983cd122 100644 --- a/runway/_cli/commands/_init.py +++ b/runway/_cli/commands/_init.py @@ -2,7 +2,7 @@ # docs: file://./../../../docs/source/commands.rst import logging -from typing import Any, Tuple +from typing import Any import click from pydantic import ValidationError @@ -23,7 +23,7 @@ @options.tags @options.verbose @click.pass_context -def init(ctx: click.Context, debug: bool, tags: Tuple[str, ...], **_: Any) -> None: +def init(ctx: click.Context, debug: bool, tags: tuple[str, ...], **_: Any) -> None: """Run initialization/bootstrap steps. \b diff --git a/runway/_cli/commands/_kbenv/__init__.py b/runway/_cli/commands/_kbenv/__init__.py index 656d10b1f..db30f094f 100644 --- a/runway/_cli/commands/_kbenv/__init__.py +++ b/runway/_cli/commands/_kbenv/__init__.py @@ -1,7 +1,7 @@ """``runway kbenv`` command group.""" # docs: file://./../../../../docs/source/commands.rst -from typing import Any, List +from typing import Any import click @@ -13,7 +13,7 @@ __all__ = ["install", "list_installed", "run", "uninstall"] -COMMANDS: List[click.Command] = [install, list_installed, run, uninstall] +COMMANDS: list[click.Command] = [install, list_installed, run, uninstall] @click.group("kbenv", short_help="kubectl (install|run)") diff --git a/runway/_cli/commands/_kbenv/_list.py b/runway/_cli/commands/_kbenv/_list.py index 78abc9d8e..0d2478e0b 100644 --- a/runway/_cli/commands/_kbenv/_list.py +++ b/runway/_cli/commands/_kbenv/_list.py @@ -26,6 +26,4 @@ def list_installed(**_: Any) -> None: LOGGER.info("kubectl versions installed:") click.echo("\n".join(v.name for v in versions)) else: - LOGGER.warning( - "no versions of kubectl installed at path %s", kbenv.versions_dir - ) + LOGGER.warning("no versions of kubectl installed at path %s", kbenv.versions_dir) diff --git a/runway/_cli/commands/_kbenv/_run.py b/runway/_cli/commands/_kbenv/_run.py index c5ea3e4dc..8b01fc9fa 100644 --- a/runway/_cli/commands/_kbenv/_run.py +++ b/runway/_cli/commands/_kbenv/_run.py @@ -3,7 +3,7 @@ # docs: file://./../../../../docs/source/commands.rst import logging import subprocess -from typing import Any, Tuple +from typing import Any import click @@ -13,15 +13,13 @@ LOGGER = logging.getLogger(__name__.replace("._", ".")) -@click.command( - "run", short_help="run kubectl", context_settings={"ignore_unknown_options": True} -) +@click.command("run", short_help="run kubectl", context_settings={"ignore_unknown_options": True}) @click.argument("args", metavar="", nargs=-1, required=True) @options.debug @options.no_color @options.verbose @click.pass_context -def run(ctx: click.Context, args: Tuple[str, ...], **_: Any) -> None: +def run(ctx: click.Context, args: tuple[str, ...], **_: Any) -> None: """Run a kubectl command. Uses the version of kubectl specified in the ".kubectl-version" file @@ -31,4 +29,4 @@ def run(ctx: click.Context, args: Tuple[str, ...], **_: Any) -> None: before the kubectl command. """ - ctx.exit(subprocess.call([KBEnvManager().install()] + list(args))) + ctx.exit(subprocess.call([KBEnvManager().install(), *list(args)])) diff --git a/runway/_cli/commands/_kbenv/_uninstall.py b/runway/_cli/commands/_kbenv/_uninstall.py index 5e76b1c0a..08a05c7b7 100644 --- a/runway/_cli/commands/_kbenv/_uninstall.py +++ b/runway/_cli/commands/_kbenv/_uninstall.py @@ -1,8 +1,10 @@ """Uninstall kubectl version(s) that were installed by Runway and/or kbenv.""" # docs: file://./../../../../docs/source/commands.rst +from __future__ import annotations + import logging -from typing import TYPE_CHECKING, Any, Optional, cast +from typing import TYPE_CHECKING, Any, cast import click @@ -33,7 +35,7 @@ def uninstall( ctx: click.Context, *, - version: Optional[str] = None, + version: str | None = None, all_versions: bool = False, **_: Any, ) -> None: @@ -45,10 +47,7 @@ def uninstall( """ kbenv = KBEnvManager() version = version or (str(kbenv.version) if kbenv.version else None) - if version: - version_tuple = KBEnvManager.parse_version_string(version) - else: - version_tuple = kbenv.version + version_tuple = KBEnvManager.parse_version_string(version) if version else kbenv.version if version_tuple and not all_versions: if not kbenv.uninstall(version_tuple): ctx.exit(1) diff --git a/runway/_cli/commands/_new.py b/runway/_cli/commands/_new.py index 180d183a5..de397a4f2 100644 --- a/runway/_cli/commands/_new.py +++ b/runway/_cli/commands/_new.py @@ -15,7 +15,7 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__.replace("._", "."))) RUNWAY_YML = """--- -# See full syntax at https://docs.onica.com/projects/runway +# See full syntax at https://runway.readthedocs.io deployments: - modules: - path: sampleapp.cfn @@ -37,16 +37,12 @@ def new(ctx: click.Context, **_: Any) -> None: LOGGER.verbose("checking for preexisting runway.yml file...") if runway_yml.is_file(): - LOGGER.error( - "There is already a %s file in the current directory", runway_yml.name - ) + LOGGER.error("There is already a %s file in the current directory", runway_yml.name) ctx.exit(1) - runway_yml.write_text( - RUNWAY_YML, encoding=locale.getpreferredencoding(do_setlocale=False) - ) + runway_yml.write_text(RUNWAY_YML, encoding=locale.getpreferredencoding(do_setlocale=False)) LOGGER.success("runway.yml generated") LOGGER.notice( "See addition getting started information at " - "https://docs.onica.com/projects/runway/page/getting_started.html" + "https://runway.readthedocs.io/page/getting_started.html" ) diff --git a/runway/_cli/commands/_plan.py b/runway/_cli/commands/_plan.py index ff9c10f2a..b78fd50c0 100644 --- a/runway/_cli/commands/_plan.py +++ b/runway/_cli/commands/_plan.py @@ -2,7 +2,7 @@ # docs: file://./../../../docs/source/commands.rst import logging -from typing import Any, Tuple +from typing import Any import click from pydantic import ValidationError @@ -23,7 +23,7 @@ @options.tags @options.verbose @click.pass_context -def plan(ctx: click.Context, debug: bool, tags: Tuple[str, ...], **_: Any) -> None: +def plan(ctx: click.Context, debug: bool, tags: tuple[str, ...], **_: Any) -> None: """Determine what infrastructure changes will occur during the next deploy. \b diff --git a/runway/_cli/commands/_run_python.py b/runway/_cli/commands/_run_python.py index 336a1b2e0..ff3c91abb 100644 --- a/runway/_cli/commands/_run_python.py +++ b/runway/_cli/commands/_run_python.py @@ -33,10 +33,8 @@ def run_python(filename: str, **_: Any) -> None: execglobals = globals().copy() # override name & file so script operates as if it were invoked directly execglobals.update({"__name__": "__main__", "__file__": filename}) - exec( # pylint: disable=exec-used - Path(filename).read_text( - encoding=locale.getpreferredencoding(do_setlocale=False) - ), + exec( # noqa: S102 + Path(filename).read_text(encoding=locale.getpreferredencoding(do_setlocale=False)), execglobals, execglobals, ) diff --git a/runway/_cli/commands/_schema/_cfngin.py b/runway/_cli/commands/_schema/_cfngin.py index 963518b4d..075f4ad75 100644 --- a/runway/_cli/commands/_schema/_cfngin.py +++ b/runway/_cli/commands/_schema/_cfngin.py @@ -2,10 +2,11 @@ from __future__ import annotations +import json import locale import logging from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional, cast +from typing import TYPE_CHECKING, Any, cast import click @@ -37,7 +38,7 @@ metavar="", ) @options.verbose -def cfngin(indent: int, output: Optional[str], **_: Any) -> None: +def cfngin(indent: int, output: str | None, **_: Any) -> None: """Output JSON schema for CFNgin configuration files. The schema that is output can be used to validate configuration files. @@ -45,7 +46,7 @@ def cfngin(indent: int, output: Optional[str], **_: Any) -> None: and suggestions within configuration files. """ - content = CfnginConfigDefinitionModel.schema_json(indent=indent) + content = json.dumps(CfnginConfigDefinitionModel.model_json_schema(), indent=indent) if output: file_path = Path(output).absolute() file_path.write_text( # append empty line to end of file diff --git a/runway/_cli/commands/_schema/_runway.py b/runway/_cli/commands/_schema/_runway.py index da33fc37f..0e4e90bd1 100644 --- a/runway/_cli/commands/_schema/_runway.py +++ b/runway/_cli/commands/_schema/_runway.py @@ -2,10 +2,11 @@ from __future__ import annotations +import json import locale import logging from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional, cast +from typing import TYPE_CHECKING, Any, cast import click @@ -37,7 +38,7 @@ metavar="", ) @options.verbose -def runway(indent: int, output: Optional[str], **_: Any) -> None: +def runway(indent: int, output: str | None, **_: Any) -> None: """Output JSON schema Runway configuration files. The schema that is output can be used to validate configuration files. @@ -45,7 +46,7 @@ def runway(indent: int, output: Optional[str], **_: Any) -> None: and suggestions within configuration files. """ - content = RunwayConfigDefinitionModel.schema_json(indent=indent) + content = json.dumps(RunwayConfigDefinitionModel.model_json_schema(), indent=indent) if output: file_path = Path(output).absolute() file_path.write_text( # append empty line to end of file diff --git a/runway/_cli/commands/_tfenv/_install.py b/runway/_cli/commands/_tfenv/_install.py index 4bfa49064..ffc38c774 100644 --- a/runway/_cli/commands/_tfenv/_install.py +++ b/runway/_cli/commands/_tfenv/_install.py @@ -1,8 +1,10 @@ """Install a version of Terraform.""" # docs: file://./../../../../docs/source/commands.rst +from __future__ import annotations + import logging -from typing import Any, Optional +from typing import Any import click @@ -19,7 +21,7 @@ @options.no_color @options.verbose @click.pass_context -def install(ctx: click.Context, version: Optional[str] = None, **_: Any) -> None: +def install(ctx: click.Context, version: str | None = None, **_: Any) -> None: """Install the specified of Terraform (e.g. 0.12.0). If no version is specified, Runway will attempt to find and read a @@ -28,13 +30,11 @@ def install(ctx: click.Context, version: Optional[str] = None, **_: Any) -> None """ try: - LOGGER.debug( - "terraform path: %s", TFEnvManager().install(version_requested=version) - ) + LOGGER.debug("terraform path: %s", TFEnvManager().install(version_requested=version)) except ValueError as err: LOGGER.debug("terraform install failed", exc_info=True) if "unable to find" not in str(err): - LOGGER.error( + LOGGER.error( # noqa: G201 "unexpected error encountered when trying to install Terraform", exc_info=True, ) diff --git a/runway/_cli/commands/_tfenv/_list.py b/runway/_cli/commands/_tfenv/_list.py index a9a46685c..e54a41b5d 100644 --- a/runway/_cli/commands/_tfenv/_list.py +++ b/runway/_cli/commands/_tfenv/_list.py @@ -26,6 +26,4 @@ def list_installed(**_: Any) -> None: LOGGER.info("Terraform versions installed:") click.echo("\n".join(v.name for v in versions)) else: - LOGGER.warning( - "no versions of Terraform installed at path %s", tfenv.versions_dir - ) + LOGGER.warning("no versions of Terraform installed at path %s", tfenv.versions_dir) diff --git a/runway/_cli/commands/_tfenv/_run.py b/runway/_cli/commands/_tfenv/_run.py index 31465c776..031f15602 100644 --- a/runway/_cli/commands/_tfenv/_run.py +++ b/runway/_cli/commands/_tfenv/_run.py @@ -3,7 +3,7 @@ # docs: file://./../../../../docs/source/commands.rst import logging import subprocess -from typing import Any, Tuple +from typing import Any import click @@ -14,15 +14,13 @@ LOGGER = logging.getLogger(__name__.replace("._", ".")) -@click.command( - "run", short_help="run terraform", context_settings={"ignore_unknown_options": True} -) +@click.command("run", short_help="run terraform", context_settings={"ignore_unknown_options": True}) @click.argument("args", metavar="", nargs=-1, required=True) @options.debug @options.no_color @options.verbose @click.pass_context -def run(ctx: click.Context, args: Tuple[str, ...], **_: Any) -> None: +def run(ctx: click.Context, args: tuple[str, ...], **_: Any) -> None: """Run a Terraform command. Uses the version of Terraform specified in the ".terraform-version" file @@ -33,11 +31,11 @@ def run(ctx: click.Context, args: Tuple[str, ...], **_: Any) -> None: """ try: - ctx.exit(subprocess.call([TFEnvManager().install()] + list(args))) + ctx.exit(subprocess.call([TFEnvManager().install(), *list(args)])) except ValueError as err: LOGGER.debug("terraform install failed", exc_info=True) if "unable to find" not in str(err): - LOGGER.error( + LOGGER.error( # noqa: G201 "unexpected error encountered when trying to install Terraform", exc_info=True, ) diff --git a/runway/_cli/commands/_tfenv/_uninstall.py b/runway/_cli/commands/_tfenv/_uninstall.py index 41a12293a..6a3b522bc 100644 --- a/runway/_cli/commands/_tfenv/_uninstall.py +++ b/runway/_cli/commands/_tfenv/_uninstall.py @@ -1,8 +1,10 @@ """Uninstall Terraform version(s) that were installed by Runway and/or tfenv.""" # docs: file://./../../../../docs/source/commands.rst +from __future__ import annotations + import logging -from typing import TYPE_CHECKING, Any, Optional, cast +from typing import TYPE_CHECKING, Any, cast import click @@ -33,7 +35,7 @@ def uninstall( ctx: click.Context, *, - version: Optional[str] = None, + version: str | None = None, all_versions: bool = False, **_: Any, ) -> None: diff --git a/runway/_cli/logs.py b/runway/_cli/logs.py index 9abf1b8f1..93b49474c 100644 --- a/runway/_cli/logs.py +++ b/runway/_cli/logs.py @@ -2,7 +2,7 @@ import logging import os -from typing import Any, Dict +from typing import Any import coloredlogs @@ -15,7 +15,7 @@ LOG_FORMAT = "[runway] %(message)s" LOG_FORMAT_VERBOSE = logging.BASIC_FORMAT -LOG_FIELD_STYLES: Dict[str, Dict[str, Any]] = { +LOG_FIELD_STYLES: dict[str, dict[str, Any]] = { "asctime": {}, "hostname": {}, "levelname": {}, @@ -24,7 +24,7 @@ "prefix": {}, "programname": {}, } -LOG_LEVEL_STYLES: Dict[str, Dict[str, Any]] = { +LOG_LEVEL_STYLES: dict[str, dict[str, Any]] = { "critical": {"color": "red", "bold": True}, "debug": {"color": "green"}, "error": {"color": "red"}, @@ -46,9 +46,7 @@ class LogSettings: "level_styles": os.getenv("RUNWAY_LOG_LEVEL_STYLES"), } - def __init__( - self, *, debug: int = 0, no_color: bool = False, verbose: bool = False - ): + def __init__(self, *, debug: int = 0, no_color: bool = False, verbose: bool = False) -> None: """Instantiate class. Args: @@ -62,7 +60,7 @@ def __init__( self.verbose = verbose @property - def coloredlogs(self) -> Dict[str, Any]: + def coloredlogs(self) -> dict[str, Any]: """Return settings for coloredlogs.""" return { "fmt": self.fmt, @@ -85,7 +83,7 @@ def fmt(self) -> str: return LOG_FORMAT @cached_property - def field_styles(self) -> Dict[str, Any]: + def field_styles(self) -> dict[str, Any]: """Return log field styles. If "RUNWAY_LOG_FIELD_STYLES" exists in the environment, it will be @@ -98,14 +96,12 @@ def field_styles(self) -> Dict[str, Any]: result = LOG_FIELD_STYLES.copy() if self.ENV["field_styles"]: result.update( - coloredlogs.parse_encoded_styles( # type: ignore - self.ENV["field_styles"] - ) + coloredlogs.parse_encoded_styles(self.ENV["field_styles"]) # type: ignore ) return result @cached_property - def level_styles(self) -> Dict[str, Any]: + def level_styles(self) -> dict[str, Any]: """Return log level styles. If "RUNWAY_LOG_LEVEL_STYLES" exists in the environment, it will be @@ -118,9 +114,7 @@ def level_styles(self) -> Dict[str, Any]: result = LOG_LEVEL_STYLES.copy() if self.ENV["level_styles"]: result.update( - coloredlogs.parse_encoded_styles( # type: ignore - self.ENV["level_styles"] - ) + coloredlogs.parse_encoded_styles(self.ENV["level_styles"]) # type: ignore ) return result @@ -134,9 +128,7 @@ def log_level(self) -> LogLevels: return LogLevels.INFO -def setup_logging( - *, debug: int = 0, no_color: bool = False, verbose: bool = False -) -> None: +def setup_logging(*, debug: int = 0, no_color: bool = False, verbose: bool = False) -> None: """Configure log settings for Runway CLI. Keyword Args: diff --git a/runway/_cli/main.py b/runway/_cli/main.py index c6ef0dec1..54470636e 100644 --- a/runway/_cli/main.py +++ b/runway/_cli/main.py @@ -3,7 +3,7 @@ import argparse import logging import os -from typing import Any, Dict +from typing import Any import click @@ -15,7 +15,7 @@ LOGGER = logging.getLogger("runway.cli") -CLICK_CONTEXT_SETTINGS: Dict[str, Any] = { +CLICK_CONTEXT_SETTINGS: dict[str, Any] = { "help_option_names": ["-h", "--help"], "max_content_width": 999, } @@ -34,7 +34,7 @@ def invoke(self, ctx: click.Context) -> Any: return super().invoke(ctx) @staticmethod - def __parse_global_options(ctx: click.Context) -> Dict[str, Any]: + def __parse_global_options(ctx: click.Context) -> dict[str, Any]: """Parse global options. These options are passed to subcommands but, should be parsed by the @@ -44,20 +44,14 @@ def __parse_global_options(ctx: click.Context) -> Dict[str, Any]: """ parser = argparse.ArgumentParser(add_help=False) parser.add_argument("--ci", action="store_true", default=bool(os.getenv("CI"))) - parser.add_argument( - "--debug", default=int(os.getenv("DEBUG", "0")), action="count" - ) - parser.add_argument( - "-e", "--deploy-environment", default=os.getenv("DEPLOY_ENVIRONMENT") - ) + parser.add_argument("--debug", default=int(os.getenv("DEBUG", "0")), action="count") + parser.add_argument("-e", "--deploy-environment", default=os.getenv("DEPLOY_ENVIRONMENT")) parser.add_argument( "--no-color", action="store_true", default=bool(os.getenv("RUNWAY_NO_COLOR")), ) - parser.add_argument( - "--verbose", action="store_true", default=bool(os.getenv("VERBOSE")) - ) + parser.add_argument("--verbose", action="store_true", default=bool(os.getenv("VERBOSE"))) args, _ = parser.parse_known_args(list(ctx.args)) return vars(args) @@ -71,13 +65,11 @@ def __parse_global_options(ctx: click.Context) -> Dict[str, Any]: def cli(ctx: click.Context, **_: Any) -> None: """Runway CLI. - Full documentation available at https://docs.onica.com/projects/runway/ + Full documentation available at https://runway.readthedocs.io/ """ opts = ctx.meta["global.options"] - setup_logging( - debug=opts["debug"], no_color=opts["no_color"], verbose=opts["verbose"] - ) + setup_logging(debug=opts["debug"], no_color=opts["no_color"], verbose=opts["verbose"]) ctx.obj = CliContext(**opts) diff --git a/runway/_cli/options.py b/runway/_cli/options.py index 463ca6630..2844fa4f6 100644 --- a/runway/_cli/options.py +++ b/runway/_cli/options.py @@ -14,8 +14,7 @@ "--debug", count=True, envvar="DEBUG", - help="Supply once to display Runway debug logs. " - "Supply twice to display all debug logs.", + help="Supply once to display Runway debug logs. Supply twice to display all debug logs.", ) deploy_environment = click.option( diff --git a/runway/_cli/utils.py b/runway/_cli/utils.py index be895ed40..ca6f4ccce 100644 --- a/runway/_cli/utils.py +++ b/runway/_cli/utils.py @@ -6,21 +6,25 @@ import os import sys from pathlib import Path -from typing import Any, Iterator, List, Optional, Tuple +from typing import TYPE_CHECKING, Any import click import yaml from ..compat import cached_property from ..config import RunwayConfig -from ..config.components.runway import ( - RunwayDeploymentDefinition, - RunwayModuleDefinition, -) from ..context import RunwayContext from ..core.components import DeployEnvironment from ..exceptions import ConfigNotFound +if TYPE_CHECKING: + from collections.abc import Iterator + + from ..config.components.runway import ( + RunwayDeploymentDefinition, + RunwayModuleDefinition, + ) + LOGGER = logging.getLogger(__name__) @@ -32,7 +36,7 @@ def __init__( *, ci: bool = False, debug: int = 0, - deploy_environment: Optional[str] = None, + deploy_environment: str | None = None, verbose: bool = False, **_: Any, ) -> None: @@ -95,7 +99,7 @@ def runway_config_path(self) -> Path: sys.exit(1) def get_runway_context( - self, deploy_environment: Optional[DeployEnvironment] = None + self, deploy_environment: DeployEnvironment | None = None ) -> RunwayContext: """Get a Runway context object. @@ -104,7 +108,7 @@ def get_runway_context( Args: deploy_environment: Object representing the current deploy environment. - Returns + Returns: RunwayContext """ @@ -168,9 +172,9 @@ def __str__(self) -> str: def select_deployments( ctx: click.Context, - deployments: List[RunwayDeploymentDefinition], - tags: Optional[Tuple[str, ...]] = None, -) -> List[RunwayDeploymentDefinition]: + deployments: list[RunwayDeploymentDefinition], + tags: tuple[str, ...] | None = None, +) -> list[RunwayDeploymentDefinition]: """Select which deployments to run. Uses tags, interactive prompts, or selects all. @@ -178,6 +182,7 @@ def select_deployments( Args: ctx: Current click context. deployments: List of deployment(s) to choose from. + tags: Deployment tags to filter. Returns: Selected deployment(s). @@ -192,9 +197,7 @@ def select_deployments( LOGGER.debug("only one deployment detected; no selection necessary") else: # build the menu before displaying it so debug logs don't break up what is printed - deployment_menu = yaml.safe_dump( - {i + 1: d.menu_entry for i, d in enumerate(deployments)} - ) + deployment_menu = yaml.safe_dump({i + 1: d.menu_entry for i, d in enumerate(deployments)}) click.secho("\nConfigured deployments\n", bold=True, underline=True) click.echo(deployment_menu) if ctx.command.name == "destroy": @@ -206,9 +209,7 @@ def select_deployments( 'Enter number of deployment to run (or "all")', default="all", show_choices=False, - type=click.Choice( - [str(n) for n in range(1, len(deployments) + 1)] + ["all"] - ), + type=click.Choice([str(n) for n in range(1, len(deployments) + 1)] + ["all"]), ) if choice != "all": deployments = [deployments[int(choice) - 1]] @@ -217,8 +218,8 @@ def select_deployments( def select_modules( - ctx: click.Context, modules: List[RunwayModuleDefinition] -) -> List[RunwayModuleDefinition]: + ctx: click.Context, modules: list[RunwayModuleDefinition] +) -> list[RunwayModuleDefinition]: """Interactively select which modules to run. Args: @@ -233,8 +234,7 @@ def select_modules( LOGGER.debug("only one module detected; no selection necessary") if ctx.command.name == "destroy": LOGGER.info( - "Only one module detected; all modules " - "automatically selected for deletion." + "Only one module detected; all modules automatically selected for deletion." ) if not click.confirm("Proceed?"): ctx.exit(0) @@ -243,8 +243,7 @@ def select_modules( click.echo(yaml.safe_dump({i + 1: m.menu_entry for i, m in enumerate(modules)})) if ctx.command.name == "destroy": click.echo( - '(operating in destroy mode -- "all" will destroy all ' - "modules in reverse order)\n" + '(operating in destroy mode -- "all" will destroy all modules in reverse order)\n' ) choice = click.prompt( 'Enter number of module to run (or "all")', @@ -263,9 +262,9 @@ def select_modules( def select_modules_using_tags( ctx: click.Context, - deployments: List[RunwayDeploymentDefinition], - tags: Tuple[str, ...], -) -> List[RunwayDeploymentDefinition]: + deployments: list[RunwayDeploymentDefinition], + tags: tuple[str, ...], +) -> list[RunwayDeploymentDefinition]: """Select modules to run using tags. Args: @@ -277,9 +276,9 @@ def select_modules_using_tags( List of selected deployments with selected modules. """ - deployments_to_run: List[RunwayDeploymentDefinition] = [] + deployments_to_run: list[RunwayDeploymentDefinition] = [] for deployment in deployments: - modules_to_run: List[RunwayModuleDefinition] = [] + modules_to_run: list[RunwayModuleDefinition] = [] for module in deployment.modules: if module.child_modules: module.child_modules = [ diff --git a/runway/_logging.py b/runway/_logging.py index b80df2b67..15ea7592f 100644 --- a/runway/_logging.py +++ b/runway/_logging.py @@ -1,8 +1,13 @@ """Runway logging.""" +from __future__ import annotations + import logging from enum import IntEnum -from typing import Any, MutableMapping, Tuple, Union +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from collections.abc import MutableMapping class LogLevels(IntEnum): @@ -21,7 +26,7 @@ class LogLevels(IntEnum): @classmethod def has_value(cls, value: int) -> bool: """Check if IntEnum has a value.""" - return value in cls._value2member_map_ # pylint: disable=no-member + return value in cls._value2member_map_ # Issue with this version of LoggerAdapter https://github.com/python/typeshed/issues/7855 @@ -54,18 +59,20 @@ def __init__( self.prefix = prefix self.prefix_template = prefix_template - def notice(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None: + def notice(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None: """Delegate a notice call to the underlying logger. Args: msg: String template or exception to use for the log record. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.log(LogLevels.NOTICE, msg, *args, **kwargs) def process( - self, msg: Union[Exception, str], kwargs: MutableMapping[str, Any] - ) -> Tuple[str, MutableMapping[str, Any]]: + self, msg: Exception | str, kwargs: MutableMapping[str, Any] + ) -> tuple[str, MutableMapping[str, Any]]: """Process the message to append the prefix. Args: @@ -75,7 +82,7 @@ def process( """ return self.prefix_template.format(prefix=self.prefix, msg=msg), kwargs - def setLevel(self, level: Union[int, str]) -> None: # noqa + def setLevel(self, level: int | str) -> None: # noqa: N802 """Set the specified level on the underlying logger. Python 2 backport. @@ -83,20 +90,24 @@ def setLevel(self, level: Union[int, str]) -> None: # noqa """ self.logger.setLevel(level) - def success(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None: + def success(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None: """Delegate a success call to the underlying logger. Args: msg: String template or exception to use for the log record. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.log(LogLevels.SUCCESS, msg, *args, **kwargs) - def verbose(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None: + def verbose(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None: """Delegate a verbose call to the underlying logger. Args: msg: String template or exception to use for the log record. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.log(LogLevels.VERBOSE, msg, *args, **kwargs) @@ -105,7 +116,7 @@ def verbose(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None class RunwayLogger(logging.Logger): """Extend built-in logger with additional levels.""" - def __init__(self, name: str, level: Union[int, str] = logging.NOTSET) -> None: + def __init__(self, name: str, level: int | str = logging.NOTSET) -> None: """Instantiate the class. Args: @@ -118,31 +129,37 @@ def __init__(self, name: str, level: Union[int, str] = logging.NOTSET) -> None: logging.addLevelName(LogLevels.NOTICE, LogLevels.NOTICE.name) logging.addLevelName(LogLevels.SUCCESS, LogLevels.SUCCESS.name) - def notice(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None: + def notice(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None: """Log 'msg % args' with severity `NOTICE`. Args: msg: String template or exception to use for the log record. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ if self.isEnabledFor(LogLevels.NOTICE): self._log(LogLevels.NOTICE, msg, args, **kwargs) - def success(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None: + def success(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None: """Log 'msg % args' with severity `SUCCESS`. Args: msg: String template or exception to use for the log record. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ if self.isEnabledFor(LogLevels.SUCCESS): self._log(LogLevels.SUCCESS, msg, args, **kwargs) - def verbose(self, msg: Union[Exception, str], *args: Any, **kwargs: Any) -> None: + def verbose(self, msg: Exception | str, *args: Any, **kwargs: Any) -> None: """Log 'msg % args' with severity `VERBOSE`. Args: msg: String template or exception to use for the log record. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ if self.isEnabledFor(LogLevels.VERBOSE): diff --git a/runway/aws_sso_botocore/credentials.py b/runway/aws_sso_botocore/credentials.py index e842a9767..ee532ed4d 100644 --- a/runway/aws_sso_botocore/credentials.py +++ b/runway/aws_sso_botocore/credentials.py @@ -248,7 +248,6 @@ class SSOProvider(CredentialProvider): "sso_account_id", ] - # pylint: disable=super-init-not-called def __init__( self, load_config, client_creator, profile_name, cache=None, token_cache=None ): diff --git a/runway/blueprints/k8s/k8s_iam.py b/runway/blueprints/k8s/k8s_iam.py index 75c33443d..57397dd71 100755 --- a/runway/blueprints/k8s/k8s_iam.py +++ b/runway/blueprints/k8s/k8s_iam.py @@ -52,9 +52,7 @@ def create_template(self) -> None: ) nodeinstanceprofile = template.add_resource( - iam.InstanceProfile( - "NodeInstanceProfile", Path="/", Roles=[nodeinstancerole.ref()] - ) + iam.InstanceProfile("NodeInstanceProfile", Path="/", Roles=[nodeinstancerole.ref()]) ) template.add_output( Output( @@ -105,6 +103,4 @@ def create_template(self) -> None: if __name__ == "__main__": from runway.context import CfnginContext - print( # noqa: T201 - Iam("test", CfnginContext(parameters={"namespace": "test"})).to_json() - ) + print(Iam("test", CfnginContext(parameters={"namespace": "test"})).to_json()) # noqa: T201 diff --git a/runway/blueprints/k8s/k8s_master.py b/runway/blueprints/k8s/k8s_master.py index d253f2457..7fc8c7318 100755 --- a/runway/blueprints/k8s/k8s_master.py +++ b/runway/blueprints/k8s/k8s_master.py @@ -159,6 +159,4 @@ def create_template(self) -> None: if __name__ == "__main__": from runway.context import CfnginContext - print( # noqa: T201 - Cluster("test", CfnginContext(parameters={"namespace": "test"})).to_json() - ) + print(Cluster("test", CfnginContext(parameters={"namespace": "test"})).to_json()) # noqa: T201 diff --git a/runway/blueprints/k8s/k8s_workers.py b/runway/blueprints/k8s/k8s_workers.py index 8181b5749..18caa7c55 100755 --- a/runway/blueprints/k8s/k8s_workers.py +++ b/runway/blueprints/k8s/k8s_workers.py @@ -22,18 +22,18 @@ def get_valid_instance_types() -> Any: """Return list of instance types from either a JSON or gzipped JSON file.""" - base_path = os.path.join( - os.path.dirname(botocore.__file__), "data", "ec2", "2016-11-15" + base_path = os.path.join( # noqa: PTH118 + os.path.dirname(botocore.__file__), "data", "ec2", "2016-11-15" # noqa: PTH120 ) - json_path = os.path.join(base_path, "service-2.json") - gzip_path = os.path.join(base_path, "service-2.json.gz") + json_path = os.path.join(base_path, "service-2.json") # noqa: PTH118 + gzip_path = os.path.join(base_path, "service-2.json.gz") # noqa: PTH118 - if os.path.exists(gzip_path): + if os.path.exists(gzip_path): # noqa: PTH110 with gzip.open(gzip_path, "rt", encoding="utf-8") as stream: data = json.load(stream) - elif os.path.exists(json_path): - with open(json_path, "r", encoding="utf-8") as stream: + elif os.path.exists(json_path): # noqa: PTH110 + with open(json_path, encoding="utf-8") as stream: # noqa: PTH123 data = json.load(stream) else: raise FileNotFoundError("Neither JSON nor gzipped JSON file found.") @@ -47,8 +47,7 @@ class NodeGroup(Blueprint): VARIABLES = { "KeyName": { "type": CFNString, # string to allow it to be unset - "description": "(Optional) EC2 Key Pair to allow SSH " - "access to the instances", + "description": "(Optional) EC2 Key Pair to allow SSH access to the instances", "default": "", }, "NodeImageId": { @@ -57,10 +56,10 @@ class NodeGroup(Blueprint): }, "NodeInstanceType": { "type": CFNString, - "description": "EC2 instance type for the node " "instances", + "description": "EC2 instance type for the node instances", "default": "t2.medium", "allowed_values": get_valid_instance_types(), - "constraint_description": "Must be a valid EC2 " "instance type", + "constraint_description": "Must be a valid EC2 instance type", }, "NodeInstanceProfile": { "type": CFNString, @@ -68,12 +67,12 @@ class NodeGroup(Blueprint): }, "NodeAutoScalingGroupMinSize": { "type": CFNNumber, - "description": "Minimum size of Node " "Group ASG.", + "description": "Minimum size of Node Group ASG.", "default": 1, }, "NodeAutoScalingGroupMaxSize": { "type": CFNNumber, - "description": "Maximum size of Node " "Group ASG.", + "description": "Maximum size of Node Group ASG.", "default": 3, }, "NodeVolumeSize": { @@ -98,16 +97,16 @@ class NodeGroup(Blueprint): }, "NodeGroupName": { "type": CFNString, - "description": "Unique identifier for the Node " "Group.", + "description": "Unique identifier for the Node Group.", }, "ClusterControlPlaneSecurityGroup": { "type": EC2SecurityGroupId, - "description": "The security " "group of the " "cluster control " "plane.", + "description": "The security group of the cluster control plane.", }, "VpcId": {"type": EC2VPCId, "description": "The VPC of the worker instances"}, "Subnets": { "type": EC2SubnetIdList, - "description": "The subnets where workers can be " "created.", + "description": "The subnets where workers can be created.", }, "UseDesiredInstanceCount": { "type": CFNString, @@ -120,8 +119,7 @@ def create_template(self) -> None: template = self.template template.set_version("2010-09-09") template.set_description( - "Kubernetes workers via EKS - V1.0.0 " - "- compatible with amazon-eks-node-v23+" + "Kubernetes workers via EKS - V1.0.0 - compatible with amazon-eks-node-v23+" ) # Metadata @@ -159,9 +157,7 @@ def create_template(self) -> None: }, { "Label": {"default": "Worker Network Configuration"}, - "Parameters": [ - self.variables[i].name for i in ["VpcId", "Subnets"] - ], + "Parameters": [self.variables[i].name for i in ["VpcId", "Subnets"]], }, ] } @@ -173,9 +169,7 @@ def create_template(self) -> None: "DesiredInstanceCountSpecified", Equals(self.variables["UseDesiredInstanceCount"].ref, "true"), ) - template.add_condition( - "KeyNameSpecified", Not(Equals(self.variables["KeyName"].ref, "")) - ) + template.add_condition("KeyNameSpecified", Not(Equals(self.variables["KeyName"].ref, ""))) # Resources nodesecuritygroup = template.add_resource( @@ -215,9 +209,7 @@ def create_template(self) -> None: Description="Allow worker Kubelets and pods to receive " "communication from the cluster control plane", GroupId=nodesecuritygroup.ref(), - SourceSecurityGroupId=self.variables[ - "ClusterControlPlaneSecurityGroup" - ].ref, + SourceSecurityGroupId=self.variables["ClusterControlPlaneSecurityGroup"].ref, IpProtocol="tcp", FromPort=1025, ToPort=65535, @@ -242,9 +234,7 @@ def create_template(self) -> None: "443 to receive communication from cluster " "control plane", GroupId=nodesecuritygroup.ref(), - SourceSecurityGroupId=self.variables[ - "ClusterControlPlaneSecurityGroup" - ].ref, # noqa + SourceSecurityGroupId=self.variables["ClusterControlPlaneSecurityGroup"].ref, IpProtocol="tcp", FromPort=443, ToPort=443, @@ -266,7 +256,7 @@ def create_template(self) -> None: template.add_resource( ec2.SecurityGroupIngress( "ClusterControlPlaneSecurityGroupIngress", - Description="Allow pods to communicate with the cluster API " "Server", + Description="Allow pods to communicate with the cluster API Server", GroupId=self.variables["ClusterControlPlaneSecurityGroup"].ref, SourceSecurityGroupId=nodesecuritygroup.ref(), IpProtocol="tcp", @@ -294,9 +284,7 @@ def create_template(self) -> None: ), ImageId=self.variables["NodeImageId"].ref, InstanceType=self.variables["NodeInstanceType"].ref, - KeyName=If( - "KeyNameSpecified", self.variables["KeyName"].ref, NoValue - ), + KeyName=If("KeyNameSpecified", self.variables["KeyName"].ref, NoValue), MetadataOptions=ec2.MetadataOptions( HttpPutResponseHopLimit=2, HttpEndpoint="enabled", @@ -305,7 +293,7 @@ def create_template(self) -> None: SecurityGroupIds=[nodesecuritygroup.ref()], UserData=Base64( Sub( - "\n".join( + "\n".join( # noqa: FLY002 [ "#!/bin/bash", "set -o xtrace", @@ -342,12 +330,8 @@ def create_template(self) -> None: MinSize=self.variables["NodeAutoScalingGroupMinSize"].ref, MaxSize=self.variables["NodeAutoScalingGroupMaxSize"].ref, Tags=[ - autoscaling.Tag( - "Name", Sub("${ClusterName}-${NodeGroupName}-Node"), True - ), - autoscaling.Tag( - Sub("kubernetes.io/cluster/${ClusterName}"), "owned", True - ), + autoscaling.Tag("Name", Sub("${ClusterName}-${NodeGroupName}-Node"), True), + autoscaling.Tag(Sub("kubernetes.io/cluster/${ClusterName}"), "owned", True), ], VPCZoneIdentifier=self.variables["Subnets"].ref, UpdatePolicy=UpdatePolicy( diff --git a/runway/blueprints/staticsite/auth_at_edge.py b/runway/blueprints/staticsite/auth_at_edge.py index 04ed244b6..fd30bc002 100644 --- a/runway/blueprints/staticsite/auth_at_edge.py +++ b/runway/blueprints/staticsite/auth_at_edge.py @@ -8,7 +8,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional +from typing import TYPE_CHECKING, Any, ClassVar import awacs.logs import awacs.s3 @@ -27,7 +27,7 @@ class AuthAtEdge(StaticSite): """Auth@Edge Blueprint.""" - AUTH_VARIABLES: Dict[str, BlueprintVariableTypeDef] = { + AUTH_VARIABLES: dict[str, BlueprintVariableTypeDef] = { "OAuthScopes": {"type": list, "default": [], "description": "OAuth2 Scopes"}, "PriceClass": { "type": str, @@ -44,8 +44,7 @@ class AuthAtEdge(StaticSite): "RedirectPathAuthRefresh": { "type": str, "default": "/refreshauth", - "description": "The URL path that should " - "handle the JWT refresh request.", + "description": "The URL path that should handle the JWT refresh request.", }, "NonSPAMode": { "type": bool, @@ -59,14 +58,14 @@ class AuthAtEdge(StaticSite): }, } IAM_ARN_PREFIX = "arn:aws:iam::aws:policy/service-role/" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {} + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {} def __init__( self, name: str, context: CfnginContext, - mappings: Optional[Dict[str, Dict[str, Any]]] = None, - description: Optional[str] = None, + mappings: dict[str, dict[str, Any]] | None = None, + description: str | None = None, ) -> None: """Initialize the Blueprint. @@ -77,9 +76,7 @@ def __init__( description: Used to describe the resulting CloudFormation template. """ - super().__init__( - name=name, context=context, description=description, mappings=mappings - ) + super().__init__(name=name, context=context, description=description, mappings=mappings) self.VARIABLES.update(StaticSite.VARIABLES) self.VARIABLES.update(self.AUTH_VARIABLES) @@ -94,8 +91,8 @@ def create_template(self) -> None: bucket = self.add_bucket() oai = self.add_origin_access_identity() bucket_policy = self.add_cloudfront_bucket_policy(bucket, oai) - # TODO Make this available in Auth@Edge - lambda_function_associations: List[cloudfront.LambdaFunctionAssociation] = [] + # TODO (kyle): make this available in Auth@Edge + lambda_function_associations: list[cloudfront.LambdaFunctionAssociation] = [] if self.directory_index_specified: index_rewrite = self._get_index_rewrite_role_function_and_version() @@ -109,36 +106,28 @@ def create_template(self) -> None: check_auth_name, "Check Authorization information for request", "check_auth", - self.add_lambda_execution_role( - "CheckAuthLambdaExecutionRole", check_auth_name - ), + self.add_lambda_execution_role("CheckAuthLambdaExecutionRole", check_auth_name), ) http_headers_name = "HttpHeaders" http_headers_lambda = self.get_auth_at_edge_lambda_and_ver( http_headers_name, "Additional Headers added to every response", "http_headers", - self.add_lambda_execution_role( - "HttpHeadersLambdaExecutionRole", http_headers_name - ), + self.add_lambda_execution_role("HttpHeadersLambdaExecutionRole", http_headers_name), ) parse_auth_name = "ParseAuth" parse_auth_lambda = self.get_auth_at_edge_lambda_and_ver( parse_auth_name, "Parse the Authorization Headers/Cookies for the request", "parse_auth", - self.add_lambda_execution_role( - "ParseAuthLambdaExecutionRole", parse_auth_name - ), + self.add_lambda_execution_role("ParseAuthLambdaExecutionRole", parse_auth_name), ) refresh_auth_name = "RefreshAuth" refresh_auth_lambda = self.get_auth_at_edge_lambda_and_ver( refresh_auth_name, "Refresh the Authorization information when expired", "refresh_auth", - self.add_lambda_execution_role( - "RefreshAuthLambdaExecutionRole", refresh_auth_name - ), + self.add_lambda_execution_role("RefreshAuthLambdaExecutionRole", refresh_auth_name), ) sign_out_name = "SignOut" sign_out_lambda = self.get_auth_at_edge_lambda_and_ver( @@ -163,7 +152,7 @@ def create_template(self) -> None: def get_auth_at_edge_lambda_and_ver( self, title: str, description: str, handle: str, role: iam.Role - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Create a lambda function and its version. Args: @@ -217,9 +206,7 @@ def get_auth_at_edge_lambda( return lamb - def add_version( - self, title: str, lambda_function: awslambda.Function - ) -> awslambda.Version: + def add_version(self, title: str, lambda_function: awslambda.Function) -> awslambda.Version: """Create a version association with a Lambda@Edge function. In order to ensure different versions of the function @@ -235,22 +222,20 @@ def add_version( s3_key = lambda_function.properties["Code"].to_dict()["S3Key"] code_hash = s3_key.split(".")[0].split("-")[-1] return self.template.add_resource( - awslambda.Version( - title + "Ver" + code_hash, FunctionName=lambda_function.ref() - ) + awslambda.Version(title + "Ver" + code_hash, FunctionName=lambda_function.ref()) ) def get_distribution_options( self, bucket: s3.Bucket, oai: cloudfront.CloudFrontOriginAccessIdentity, - lambda_funcs: List[cloudfront.LambdaFunctionAssociation], + lambda_funcs: list[cloudfront.LambdaFunctionAssociation], check_auth_lambda_version: awslambda.Version, http_headers_lambda_version: awslambda.Version, parse_auth_lambda_version: awslambda.Version, refresh_auth_lambda_version: awslambda.Version, sign_out_lambda_version: awslambda.Version, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Retrieve the options for our CloudFront distribution. Keyword Args: @@ -353,7 +338,7 @@ def get_distribution_options( "ViewerCertificate": self.add_acm_cert(), } - def _get_error_responses(self) -> List[cloudfront.CustomErrorResponse]: + def _get_error_responses(self) -> list[cloudfront.CustomErrorResponse]: """Return error response based on site stack variables. When custom_error_responses are defined return those, if running @@ -378,10 +363,9 @@ def _get_error_responses(self) -> List[cloudfront.CustomErrorResponse]: ) ] - # pyright: reportIncompatibleMethodOverride=none - def _get_cloudfront_bucket_policy_statements( # pylint: disable=arguments-differ + def _get_cloudfront_bucket_policy_statements( # pyright: ignore [reportIncompatibleMethodOverride] self, bucket: s3.Bucket, oai: cloudfront.CloudFrontOriginAccessIdentity - ) -> List[Statement]: + ) -> list[Statement]: return [ Statement( Action=[awacs.s3.GetObject], diff --git a/runway/blueprints/staticsite/dependencies.py b/runway/blueprints/staticsite/dependencies.py index 595de7622..255a8599a 100755 --- a/runway/blueprints/staticsite/dependencies.py +++ b/runway/blueprints/staticsite/dependencies.py @@ -1,6 +1,8 @@ #!/usr/bin/env python """Module with static website supporting infrastructure.""" + import logging +from typing import Any import awacs.s3 from awacs.aws import Allow, AWSPrincipal, Policy, Statement @@ -98,12 +100,8 @@ def create_template(self) -> None: Statement( Action=[awacs.s3.PutObject], Effect=Allow, - Principal=AWSPrincipal( - Join(":", ["arn:aws:iam:", AccountId, "root"]) - ), - Resource=[ - Join("", ["arn:aws:s3:::", awslogbucket.ref(), "/*"]) - ], + Principal=AWSPrincipal(Join(":", ["arn:aws:iam:", AccountId, "root"])), + Resource=[Join("", ["arn:aws:s3:::", awslogbucket.ref(), "/*"])], ) ], ), @@ -114,11 +112,7 @@ def create_template(self) -> None: "Artifacts", AccessControl=s3.Private, LifecycleConfiguration=s3.LifecycleConfiguration( - Rules=[ - s3.LifecycleRule( - NoncurrentVersionExpirationInDays=90, Status="Enabled" - ) - ] + Rules=[s3.LifecycleRule(NoncurrentVersionExpirationInDays=90, Status="Enabled")] ), VersioningConfiguration=s3.VersioningConfiguration(Status="Enabled"), ) @@ -132,7 +126,7 @@ def create_template(self) -> None: ) if self.variables["AuthAtEdge"]: - userpool_client_params = { + userpool_client_params: dict[str, Any] = { "AllowedOAuthFlows": ["code"], "AllowedOAuthScopes": self.variables["OAuthScopes"], } @@ -142,11 +136,8 @@ def create_template(self) -> None: "SupportedIdentityProviders" ] - redirect_domains = [ - add_url_scheme(x) for x in self.variables["Aliases"] - ] + [ - add_url_scheme(x) - for x in self.variables["AdditionalRedirectDomains"] + redirect_domains = [add_url_scheme(x) for x in self.variables["Aliases"]] + [ + add_url_scheme(x) for x in self.variables["AdditionalRedirectDomains"] ] redirect_uris = get_redirect_uris( redirect_domains, @@ -161,9 +152,7 @@ def create_template(self) -> None: ]["callback_urls"] if self.variables["CreateUserPool"]: - user_pool = template.add_resource( - cognito.UserPool("AuthAtEdgeUserPool") - ) + user_pool = template.add_resource(cognito.UserPool("AuthAtEdgeUserPool")) user_pool_id = user_pool.ref() @@ -175,9 +164,7 @@ def create_template(self) -> None: ) ) else: - user_pool_id = self.context.hook_data["aae_user_pool_id_retriever"][ - "id" - ] + user_pool_id = self.context.hook_data["aae_user_pool_id_retriever"]["id"] userpool_client_params["UserPoolId"] = user_pool_id client = template.add_resource( diff --git a/runway/blueprints/staticsite/staticsite.py b/runway/blueprints/staticsite/staticsite.py index 1cd91c701..ed1c2b9de 100755 --- a/runway/blueprints/staticsite/staticsite.py +++ b/runway/blueprints/staticsite/staticsite.py @@ -5,7 +5,7 @@ import hashlib import logging import os -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Union +from typing import TYPE_CHECKING, Any, ClassVar import awacs.awslambda import awacs.iam @@ -52,7 +52,7 @@ class _IndexRewriteFunctionInfoTypeDef(TypedDict): class StaticSite(Blueprint): """CFNgin blueprint for creating S3 bucket and CloudFront distribution.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "AcmCertificateArn": { "type": str, "default": "", @@ -61,7 +61,7 @@ class StaticSite(Blueprint): "Aliases": { "type": list, "default": [], - "description": "(Optional) Domain aliases the " "distribution", + "description": "(Optional) Domain aliases the distribution", }, "Compress": { "type": bool, @@ -87,9 +87,7 @@ class StaticSite(Blueprint): "RewriteDirectoryIndex": { "type": str, "default": "", - "description": "(Optional) File name to " - "append to directory " - "requests.", + "description": "(Optional) File name to append to directory requests.", }, "RoleBoundaryArn": { "type": str, @@ -101,17 +99,17 @@ class StaticSite(Blueprint): "WAFWebACL": { "type": str, "default": "", - "description": "(Optional) WAF id to associate with the " "distribution.", + "description": "(Optional) WAF id to associate with the distribution.", }, "custom_error_responses": { "type": list, "default": [], - "description": "(Optional) Custom error " "responses.", + "description": "(Optional) Custom error responses.", }, "lambda_function_associations": { "type": list, "default": [], - "description": "(Optional) Lambda " "function " "associations.", + "description": "(Optional) Lambda function associations.", }, } @@ -165,10 +163,8 @@ def create_template(self) -> None: if self.directory_index_specified: index_rewrite = self._get_index_rewrite_role_function_and_version() - lambda_function_associations = ( - self.get_directory_index_lambda_association( - lambda_function_associations, index_rewrite["version"] - ) + lambda_function_associations = self.get_directory_index_lambda_association( + lambda_function_associations, index_rewrite["version"] ) distribution_options = self.get_cloudfront_distribution_options( @@ -178,7 +174,7 @@ def create_template(self) -> None: else: self.add_bucket_policy(bucket) - def get_lambda_associations(self) -> List[cloudfront.LambdaFunctionAssociation]: + def get_lambda_associations(self) -> list[cloudfront.LambdaFunctionAssociation]: """Retrieve any lambda associations from the instance variables.""" # If custom associations defined, use them if self.variables["lambda_function_associations"]: @@ -192,9 +188,9 @@ def get_lambda_associations(self) -> List[cloudfront.LambdaFunctionAssociation]: @staticmethod def get_directory_index_lambda_association( - lambda_associations: List[cloudfront.LambdaFunctionAssociation], + lambda_associations: list[cloudfront.LambdaFunctionAssociation], directory_index_rewrite_version: awslambda.Version, - ) -> List[cloudfront.LambdaFunctionAssociation]: + ) -> list[cloudfront.LambdaFunctionAssociation]: """Retrieve the directory index lambda associations with the added rewriter. Args: @@ -214,8 +210,8 @@ def get_cloudfront_distribution_options( self, bucket: s3.Bucket, oai: cloudfront.CloudFrontOriginAccessIdentity, - lambda_function_associations: List[cloudfront.LambdaFunctionAssociation], - ) -> Dict[str, Any]: + lambda_function_associations: list[cloudfront.LambdaFunctionAssociation], + ) -> dict[str, Any]: """Retrieve the options for our CloudFront distribution. Args: @@ -275,19 +271,19 @@ def get_cloudfront_distribution_options( "ViewerCertificate": self.add_acm_cert(), } - def add_aliases(self) -> Union[List[str], Ref]: + def add_aliases(self) -> list[str] | Ref: """Add aliases.""" if self.aliases_specified: return self.variables["Aliases"] return NoValue - def add_web_acl(self) -> Union[str, Ref]: + def add_web_acl(self) -> list[str] | Ref: """Add Web ACL.""" if self.waf_name_specified: return self.variables["WAFWebACL"] return NoValue - def add_logging_bucket(self) -> Union[cloudfront.Logging, Ref]: + def add_logging_bucket(self) -> cloudfront.Logging | Ref: """Add Logging Bucket.""" if self.cf_logging_enabled: return cloudfront.Logging( @@ -295,7 +291,7 @@ def add_logging_bucket(self) -> Union[cloudfront.Logging, Ref]: ) return NoValue - def add_acm_cert(self) -> Union[cloudfront.ViewerCertificate, Ref]: + def add_acm_cert(self) -> cloudfront.ViewerCertificate | Ref: """Add ACM cert.""" if self.acm_certificate_specified: return cloudfront.ViewerCertificate( @@ -309,7 +305,7 @@ def add_origin_access_identity(self) -> cloudfront.CloudFrontOriginAccessIdentit return self.template.add_resource( cloudfront.CloudFrontOriginAccessIdentity( "OAI", - CloudFrontOriginAccessIdentityConfig=cloudfront.CloudFrontOriginAccessIdentityConfig( # noqa + CloudFrontOriginAccessIdentityConfig=cloudfront.CloudFrontOriginAccessIdentityConfig( Comment="CF access to website" ), ) @@ -364,19 +360,13 @@ def add_bucket(self) -> s3.Bucket: Rules=[s3.OwnershipControlsRule(ObjectOwnership="ObjectWriter")] ), LifecycleConfiguration=s3.LifecycleConfiguration( - Rules=[ - s3.LifecycleRule( - NoncurrentVersionExpirationInDays=90, Status="Enabled" - ) - ] + Rules=[s3.LifecycleRule(NoncurrentVersionExpirationInDays=90, Status="Enabled")] ), VersioningConfiguration=s3.VersioningConfiguration(Status="Enabled"), ) ) self.template.add_output( - Output( - "BucketName", Description="Name of website bucket", Value=bucket.ref() - ) + Output("BucketName", Description="Name of website bucket", Value=bucket.ref()) ) if not self.cf_enabled: @@ -413,9 +403,7 @@ def add_cloudfront_bucket_policy( Bucket=bucket.ref(), PolicyDocument=PolicyDocument( Version="2012-10-17", - Statement=self._get_cloudfront_bucket_policy_statements( - bucket, oai - ), + Statement=self._get_cloudfront_bucket_policy_statements(bucket, oai), ), ) ) @@ -464,9 +452,7 @@ def add_lambda_execution_role( "lambda.amazonaws.com", "edgelambda.amazonaws.com" ), PermissionsBoundary=( - self.variables["RoleBoundaryArn"] - if self.role_boundary_specified - else NoValue + self.variables["RoleBoundaryArn"] if self.role_boundary_specified else NoValue ), Policies=[ iam.Policy( @@ -490,9 +476,7 @@ def add_lambda_execution_role( ) ) - def add_cloudfront_directory_index_rewrite( - self, role: iam.Role - ) -> awslambda.Function: + def add_cloudfront_directory_index_rewrite(self, role: iam.Role) -> awslambda.Function: """Add an index CloudFront directory index rewrite lambda function to the template. Keyword Args: @@ -503,11 +487,11 @@ def add_cloudfront_directory_index_rewrite( """ code_str = "" - path = os.path.join( - os.path.dirname(__file__), + path = os.path.join( # noqa: PTH118 + os.path.dirname(__file__), # noqa: PTH120 "templates/cf_directory_index_rewrite.template.js", ) - with open(path, encoding="utf-8") as file_: + with open(path, encoding="utf-8") as file_: # noqa: PTH123 code_str = file_.read().replace( "{{RewriteDirectoryIndex}}", self.variables["RewriteDirectoryIndex"] ) @@ -546,10 +530,8 @@ def add_cloudfront_directory_index_rewrite_version( The CloudFront directory index rewrite version. """ - code_hash = hashlib.md5( - str( - directory_index_rewrite.properties["Code"].properties["ZipFile"] - ).encode() + code_hash = hashlib.md5( # noqa: S324 + str(directory_index_rewrite.properties["Code"].properties["ZipFile"]).encode() ).hexdigest() return self.template.add_resource( @@ -562,7 +544,7 @@ def add_cloudfront_directory_index_rewrite_version( def add_cloudfront_distribution( self, bucket_policy: s3.BucketPolicy, - cloudfront_distribution_options: Dict[str, Any], + cloudfront_distribution_options: dict[str, Any], ) -> cloudfront.Distribution: """Add the CloudFront distribution to the template / output the id and domain name. @@ -578,9 +560,7 @@ def add_cloudfront_distribution( cloudfront.Distribution( "CFDistribution", DependsOn=bucket_policy.title, - DistributionConfig=cloudfront.DistributionConfig( - **cloudfront_distribution_options - ), + DistributionConfig=cloudfront.DistributionConfig(**cloudfront_distribution_options), ) ) self.template.add_output( @@ -602,7 +582,7 @@ def add_cloudfront_distribution( @staticmethod def _get_cloudfront_bucket_policy_statements( bucket: s3.Bucket, oai: cloudfront.CloudFrontOriginAccessIdentity - ) -> List[Statement]: + ) -> list[Statement]: return [ Statement( Action=[awacs.s3.GetObject], @@ -621,9 +601,7 @@ def _get_index_rewrite_role_function_and_version( ) function = self.add_cloudfront_directory_index_rewrite(role) version = self.add_cloudfront_directory_index_rewrite_version(function) - return _IndexRewriteFunctionInfoTypeDef( - function=function, role=role, version=version - ) + return _IndexRewriteFunctionInfoTypeDef(function=function, role=role, version=version) # Helper section to enable easy blueprint -> template generation diff --git a/runway/blueprints/tf_state.py b/runway/blueprints/tf_state.py index d6417aaba..7adc14424 100755 --- a/runway/blueprints/tf_state.py +++ b/runway/blueprints/tf_state.py @@ -2,7 +2,7 @@ """Module with Terraform state resources.""" from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict +from typing import TYPE_CHECKING, ClassVar import awacs.dynamodb import awacs.s3 @@ -19,7 +19,7 @@ class TfState(Blueprint): """CFNgin blueprint for creating Terraform state resources.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "BucketDeletionPolicy": { "type": str, "allowed_values": ["Delete", "Retain"], @@ -58,17 +58,13 @@ def create_template(self) -> None: dynamodb.Table( "TerraformStateTable", AttributeDefinitions=[ - dynamodb.AttributeDefinition( - AttributeName="LockID", AttributeType="S" - ) + dynamodb.AttributeDefinition(AttributeName="LockID", AttributeType="S") ], KeySchema=[dynamodb.KeySchema(AttributeName="LockID", KeyType="HASH")], ProvisionedThroughput=dynamodb.ProvisionedThroughput( ReadCapacityUnits=2, WriteCapacityUnits=2 ), - TableName=If( - "TableNameOmitted", NoValue, self.variables["TableName"].ref - ), + TableName=If("TableNameOmitted", NoValue, self.variables["TableName"].ref), ) ) self.template.add_output( @@ -84,15 +80,9 @@ def create_template(self) -> None: "TerraformStateBucket", DeletionPolicy=self.variables["BucketDeletionPolicy"], AccessControl=s3.Private, - BucketName=If( - "BucketNameOmitted", NoValue, self.variables["BucketName"].ref - ), + BucketName=If("BucketNameOmitted", NoValue, self.variables["BucketName"].ref), LifecycleConfiguration=s3.LifecycleConfiguration( - Rules=[ - s3.LifecycleRule( - NoncurrentVersionExpirationInDays=90, Status="Enabled" - ) - ] + Rules=[s3.LifecycleRule(NoncurrentVersionExpirationInDays=90, Status="Enabled")] ), VersioningConfiguration=s3.VersioningConfiguration(Status="Enabled"), ) @@ -129,9 +119,7 @@ def create_template(self) -> None: Statement( Action=[awacs.s3.GetObject, awacs.s3.PutObject], Effect=Allow, - Resource=[ - Join("", [terraformstatebucket.get_att("Arn"), "/*"]) - ], + Resource=[Join("", [terraformstatebucket.get_att("Arn"), "/*"])], ), Statement( Action=[ @@ -160,6 +148,4 @@ def create_template(self) -> None: if __name__ == "__main__": from runway.context import CfnginContext - print( # noqa: T201 - TfState("test", CfnginContext(parameters={"namespace": "test"})).to_json() - ) + print(TfState("test", CfnginContext(parameters={"namespace": "test"})).to_json()) # noqa: T201 diff --git a/runway/cfngin/actions/base.py b/runway/cfngin/actions/base.py index 5e98739af..2937e1899 100644 --- a/runway/cfngin/actions/base.py +++ b/runway/cfngin/actions/base.py @@ -6,7 +6,7 @@ import os import sys import threading -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Union +from typing import TYPE_CHECKING, Any, Callable, ClassVar import botocore.exceptions @@ -66,7 +66,7 @@ def build_walker(concurrency: int) -> Callable[..., Any]: return ThreadedWalker(semaphore).walk -def stack_template_url(bucket_name: str, blueprint: Blueprint, endpoint: str): +def stack_template_url(bucket_name: str, blueprint: Blueprint, endpoint: str) -> str: """Produce an s3 url for a given blueprint. Args: @@ -99,21 +99,21 @@ class BaseAction: """ DESCRIPTION: ClassVar[str] = "Base action" - NAME: ClassVar[Optional[str]] = None + NAME: ClassVar[str | None] = None - bucket_name: Optional[str] - bucket_region: Optional[str] + bucket_name: str | None + bucket_region: str | None cancel: threading.Event context: CfnginContext - provider_builder: Optional[ProviderBuilder] + provider_builder: ProviderBuilder | None s3_conn: S3Client def __init__( self, context: CfnginContext, - provider_builder: Optional[ProviderBuilder] = None, - cancel: Optional[threading.Event] = None, - ): + provider_builder: ProviderBuilder | None = None, + cancel: threading.Event | None = None, + ) -> None: """Instantiate class. Args: @@ -158,9 +158,7 @@ def ensure_cfn_bucket(self) -> None: """CloudFormation bucket where templates will be stored.""" if self.bucket_name: try: - ensure_s3_bucket( - self.s3_conn, self.bucket_name, self.bucket_region, create=False - ) + ensure_s3_bucket(self.s3_conn, self.bucket_name, self.bucket_region, create=False) except botocore.exceptions.ClientError: raise CfnginBucketNotFound(bucket_name=self.bucket_name) from None @@ -174,21 +172,17 @@ def execute(self, **kwargs: Any) -> None: LOGGER.error(str(err)) sys.exit(1) - def pre_run( - self, *, dump: Union[bool, str] = False, outline: bool = False, **__kwargs: Any - ) -> None: + def pre_run(self, *, dump: bool | str = False, outline: bool = False, **__kwargs: Any) -> None: """Perform steps before running the action.""" - def post_run( - self, *, dump: Union[bool, str] = False, outline: bool = False, **__kwargs: Any - ) -> None: + def post_run(self, *, dump: bool | str = False, outline: bool = False, **__kwargs: Any) -> None: """Perform steps after running the action.""" def run( self, *, concurrency: int = 0, - dump: Union[bool, str] = False, + dump: bool | str = False, force: bool = False, outline: bool = False, tail: bool = False, @@ -213,10 +207,7 @@ def s3_stack_push(self, blueprint: Blueprint, force: bool = False) -> str: key_name = stack_template_key_name(blueprint) template_url = self.stack_template_url(blueprint) try: - template_exists = ( - self.s3_conn.head_object(Bucket=self.bucket_name, Key=key_name) - is not None - ) + template_exists = bool(self.s3_conn.head_object(Bucket=self.bucket_name, Key=key_name)) except botocore.exceptions.ClientError as err: if err.response["Error"]["Code"] == "404": template_exists = False @@ -240,9 +231,7 @@ def stack_template_url(self, blueprint: Blueprint) -> str: """S3 URL for CloudFormation template object.""" if not self.bucket_name: raise ValueError("bucket_name required") - return stack_template_url( - self.bucket_name, blueprint, get_s3_endpoint(self.s3_conn) - ) + return stack_template_url(self.bucket_name, blueprint, get_s3_endpoint(self.s3_conn)) def _generate_plan( self, @@ -266,8 +255,7 @@ def _generate_plan( tail_fn = self._tail_stack if tail else None steps = [ - Step(stack, fn=self._stack_action, watch_func=tail_fn) - for stack in self.context.stacks + Step(stack, fn=self._stack_action, watch_func=tail_fn) for stack in self.context.stacks ] graph = Graph.from_steps(steps) @@ -295,6 +283,4 @@ def _tail_stack( ) -> None: """Tail a stack's event stream.""" provider = self.build_provider() - return provider.tail_stack( - stack, cancel, action=self.NAME, retries=retries, **kwargs - ) + return provider.tail_stack(stack, cancel, action=self.NAME, retries=retries, **kwargs) diff --git a/runway/cfngin/actions/deploy.py b/runway/cfngin/actions/deploy.py index 8b4081315..0550957f7 100644 --- a/runway/cfngin/actions/deploy.py +++ b/runway/cfngin/actions/deploy.py @@ -3,9 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union - -from typing_extensions import Literal +from typing import TYPE_CHECKING, Any, Callable from ..exceptions import ( CancelExecution, @@ -35,6 +33,7 @@ if TYPE_CHECKING: from mypy_boto3_cloudformation.type_defs import ParameterTypeDef, StackTypeDef + from typing_extensions import Literal from ...config.models.cfngin import CfnginHookDefinitionModel from ...context import CfnginContext @@ -50,7 +49,7 @@ DESTROYING_STATUS = SubmittedStatus("submitted for destruction") -def build_stack_tags(stack: Stack) -> List[TagTypeDef]: +def build_stack_tags(stack: Stack) -> list[TagTypeDef]: """Build a common set of tags to attach to a stack.""" return [{"Key": t[0], "Value": t[1]} for t in stack.tags.items()] @@ -64,9 +63,7 @@ def should_update(stack: Stack) -> bool: """ if stack.locked: if not stack.force: - LOGGER.debug( - "%s:locked and not in --force list; refusing to update", stack.name - ) + LOGGER.debug("%s:locked and not in --force list; refusing to update", stack.name) return False LOGGER.debug("%s:locked but is in --force list", stack.name) return True @@ -100,9 +97,7 @@ def should_ensure_cfn_bucket(outline: bool, dump: bool) -> bool: return not outline and not dump -def _resolve_parameters( - parameters: Dict[str, Any], blueprint: Blueprint -) -> Dict[str, Any]: +def _resolve_parameters(parameters: dict[str, Any], blueprint: Blueprint) -> dict[str, Any]: """Resolve CloudFormation Parameters for a given blueprint. Given a list of parameters, handles: @@ -118,7 +113,7 @@ def _resolve_parameters( The resolved parameters. """ - params: Dict[str, Any] = {} + params: dict[str, Any] = {} for key, value in parameters.items(): if key not in blueprint.parameter_definitions: LOGGER.debug("blueprint %s does not use parameter %s", blueprint.name, key) @@ -132,7 +127,7 @@ def _resolve_parameters( continue if isinstance(value, bool): LOGGER.debug('converting parameter %s boolean "%s" to string', key, value) - value = str(value).lower() + value = str(value).lower() # noqa: PLW2901 params[key] = value return params @@ -142,11 +137,11 @@ class UsePreviousParameterValue: def _handle_missing_parameters( - parameter_values: Dict[str, Any], - all_params: List[str], - required_params: List[str], - existing_stack: Optional[StackTypeDef] = None, -) -> List[Tuple[str, Any]]: + parameter_values: dict[str, Any], + all_params: list[str], + required_params: list[str], + existing_stack: StackTypeDef | None = None, +) -> list[tuple[str, Any]]: """Handle any missing parameters. If an existing_stack is provided, look up missing parameters there. @@ -175,9 +170,7 @@ def _handle_missing_parameters( ] for param in missing_params: if param in stack_parameters: - LOGGER.debug( - "using previous value for parameter %s from existing stack", param - ) + LOGGER.debug("using previous value for parameter %s from existing stack", param) parameter_values[param] = UsePreviousParameterValue final_missing = list(set(required_params) - set(parameter_values.keys())) if final_missing: @@ -188,11 +181,11 @@ def _handle_missing_parameters( def handle_hooks( stage: Literal["post_deploy", "pre_deploy"], - hooks: List[CfnginHookDefinitionModel], + hooks: list[CfnginHookDefinitionModel], provider: Provider, context: CfnginContext, *, - dump: Union[bool, str] = False, + dump: bool | str = False, outline: bool = False, ) -> None: """Handle pre/post hooks. @@ -239,9 +232,7 @@ def upload_disabled(self) -> bool: """Whether the CloudFormation template should be uploaded to S3.""" if self.upload_explicitly_disabled: return True - if not self.bucket_name: - return True - return False + return bool(not self.bucket_name) @upload_disabled.setter def upload_disabled(self, value: bool) -> None: @@ -261,8 +252,8 @@ def upload_disabled(self, value: bool) -> None: @staticmethod def build_parameters( - stack: Stack, provider_stack: Optional[StackTypeDef] = None - ) -> List[ParameterTypeDef]: + stack: Stack, provider_stack: StackTypeDef | None = None + ) -> list[ParameterTypeDef]: """Build the CloudFormation Parameters for our stack. Args: @@ -280,7 +271,7 @@ def build_parameters( resolved, all_parameters, required_parameters, provider_stack ) - param_list: List[ParameterTypeDef] = [] + param_list: list[ParameterTypeDef] = [] for key, value in parameters: param_dict: ParameterTypeDef = {"ParameterKey": key} @@ -293,9 +284,7 @@ def build_parameters( return param_list - def _destroy_stack( - self, stack: Stack, *, status: Optional[Status] = None, **_: Any - ) -> Status: + def _destroy_stack(self, stack: Stack, *, status: Status | None = None, **_: Any) -> Status: """Delete a CloudFormation stack. Used to remove stacks that exist in the persistent graph but not @@ -344,9 +333,10 @@ def _destroy_stack( except CancelExecution: return SkippedStatus(reason="canceled execution") - # TODO refactor long if, elif, else block - # pylint: disable=too-many-return-statements,too-many-branches,too-many-statements - def _launch_stack(self, stack: Stack, *, status: Status, **_: Any) -> Status: + # TODO (kyle): refactor long if, elif, else block + def _launch_stack( # noqa: C901, PLR0911, PLR0915, PLR0912 + self, stack: Stack, *, status: Status, **_: Any + ) -> Status: """Handle the creating or updating of a stack in CloudFormation. Also makes sure that we don't try to create or update a stack while @@ -383,9 +373,7 @@ def _launch_stack(self, stack: Stack, *, status: Status, **_: Any) -> Status: provider.get_stack_status(provider_stack), ) - if provider.is_stack_rolling_back( # pylint: disable=no-else-return - provider_stack - ): + if provider.is_stack_rolling_back(provider_stack): if status.reason and "rolling back" in status.reason: return status @@ -396,10 +384,10 @@ def _launch_stack(self, stack: Stack, *, status: Status, **_: Any) -> Status: reason = "rolling back new stack" return SubmittedStatus(reason) - elif provider.is_stack_in_progress(provider_stack): + if provider.is_stack_in_progress(provider_stack): LOGGER.debug("%s:in progress", stack.fqn) return status - elif provider.is_stack_destroyed(provider_stack): + if provider.is_stack_destroyed(provider_stack): LOGGER.debug("%s:finished deleting", stack.fqn) recreate = True # Continue with creation afterwards @@ -502,7 +490,7 @@ def _template(self, blueprint: Blueprint) -> Template: return Template(url=self.s3_stack_push(blueprint)) @staticmethod - def _stack_policy(stack: Stack) -> Optional[Template]: + def _stack_policy(stack: Stack) -> Template | None: """Return a Template object for the stacks stack policy.""" return Template(body=stack.stack_policy) if stack.stack_policy else None @@ -523,7 +511,7 @@ def __generate_plan(self, tail: bool = False) -> Plan: graph = Graph() config_stack_names = [stack.name for stack in self.context.stacks] - inverse_steps: List[Step] = [] + inverse_steps: list[Step] = [] persist_graph = self.context.persistent_graph.transposed() for ind_node, dep_nodes in persist_graph.dag.graph.items(): @@ -556,9 +544,7 @@ def __generate_plan(self, tail: bool = False) -> Plan: return Plan(context=self.context, description=self.DESCRIPTION, graph=graph) - def pre_run( - self, *, dump: Union[bool, str] = False, outline: bool = False, **_: Any - ) -> None: + def pre_run(self, *, dump: bool | str = False, outline: bool = False, **_: Any) -> None: """Any steps that need to be taken prior to running the action.""" if should_ensure_cfn_bucket(outline, bool(dump)): self.ensure_cfn_bucket() @@ -575,8 +561,8 @@ def run( self, *, concurrency: int = 0, - dump: Union[bool, str] = False, - force: bool = False, # pylint: disable=unused-argument + dump: bool | str = False, + force: bool = False, # noqa: ARG002 outline: bool = False, tail: bool = False, upload_disabled: bool = False, @@ -616,9 +602,7 @@ def run( if isinstance(dump, str): plan.dump(directory=dump, context=self.context, provider=self.provider) - def post_run( - self, *, dump: Union[bool, str] = False, outline: bool = False, **_: Any - ) -> None: + def post_run(self, *, dump: bool | str = False, outline: bool = False, **_: Any) -> None: """Any steps that need to be taken after running the action.""" handle_hooks( "post_deploy", diff --git a/runway/cfngin/actions/destroy.py b/runway/cfngin/actions/destroy.py index e83dae0c5..7396327dc 100644 --- a/runway/cfngin/actions/destroy.py +++ b/runway/cfngin/actions/destroy.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Callable, Optional, Union +from typing import TYPE_CHECKING, Any, Callable from ..exceptions import StackDoesNotExist from ..hooks.utils import handle_hooks @@ -49,9 +49,7 @@ def _stack_action(self) -> Callable[..., Status]: """Run against a step.""" return self._destroy_stack - def _destroy_stack( - self, stack: Stack, *, status: Optional[Status], **_: Any - ) -> Status: + def _destroy_stack(self, stack: Stack, *, status: Status | None, **_: Any) -> Status: wait_time = 0 if status is PENDING else STACK_POLL_TIME if self.cancel.wait(wait_time): return INTERRUPTED @@ -82,17 +80,15 @@ def _destroy_stack( LOGGER.debug("%s:destroying stack", stack.fqn) provider.destroy_stack(stack_data) return DESTROYING_STATUS - LOGGER.critical( - "%s: %s", stack.fqn, provider.get_delete_failed_status_reason(stack.fqn) - ) + LOGGER.critical("%s: %s", stack.fqn, provider.get_delete_failed_status_reason(stack.fqn)) return FailedStatus(provider.get_stack_status_reason(stack_data)) def pre_run( self, *, - dump: Union[bool, str] = False, # pylint: disable=unused-argument + dump: bool | str = False, # noqa: ARG002 outline: bool = False, - **__kwargs: Any, + **_kwargs: Any, ) -> None: """Any steps that need to be taken prior to running the action.""" pre_destroy = self.context.config.pre_destroy @@ -108,17 +104,15 @@ def run( self, *, concurrency: int = 0, - dump: Union[bool, str] = False, # pylint: disable=unused-argument + dump: bool | str = False, # noqa: ARG002 force: bool = False, - outline: bool = False, # pylint: disable=unused-argument + outline: bool = False, # noqa: ARG002 tail: bool = False, - upload_disabled: bool = False, # pylint: disable=unused-argument + upload_disabled: bool = False, # noqa: ARG002 **_kwargs: Any, ) -> None: """Kicks off the destruction of the stacks in the stack_definitions.""" - plan = self._generate_plan( - tail=tail, reverse=True, include_persistent_graph=True - ) + plan = self._generate_plan(tail=tail, reverse=True, include_persistent_graph=True) if not plan.keys(): LOGGER.warning("no stacks detected (error in config?)") if force: @@ -137,9 +131,9 @@ def run( def post_run( self, *, - dump: Union[bool, str] = False, # pylint: disable=unused-argument + dump: bool | str = False, # noqa: ARG002 outline: bool = False, - **__kwargs: Any, + **_kwargs: Any, ) -> None: """Any steps that need to be taken after running the action.""" if not outline and self.context.config.post_destroy: diff --git a/runway/cfngin/actions/diff.py b/runway/cfngin/actions/diff.py index 620027742..7deaae5b5 100644 --- a/runway/cfngin/actions/diff.py +++ b/runway/cfngin/actions/diff.py @@ -9,12 +9,8 @@ TYPE_CHECKING, Any, Callable, - Dict, Generic, - List, - Tuple, TypeVar, - Union, cast, ) @@ -64,14 +60,14 @@ def __eq__(self, other: object) -> bool: """Compare if self is equal to another object.""" return self.__dict__ == other.__dict__ - def changes(self) -> List[str]: + def changes(self) -> list[str]: """Return changes to represent the diff between old and new value. Returns: Representation of the change (if any) between old and new value. """ - output: List[str] = [] + output: list[str] = [] if self.status() is self.UNMODIFIED: output = [self.formatter % (" ", self.key, self.old_value)] elif self.status() is self.ADDED: @@ -95,8 +91,8 @@ def status(self) -> str: def diff_dictionaries( - old_dict: Dict[str, _OV], new_dict: Dict[str, _NV] -) -> Tuple[int, List[DictValue[_OV, _NV]]]: + old_dict: dict[str, _OV], new_dict: dict[str, _NV] +) -> tuple[int, list[DictValue[_OV, _NV]]]: """Calculate the diff two single dimension dictionaries. Args: @@ -116,7 +112,7 @@ def diff_dictionaries( common_set = old_set & new_set changes = 0 - output: List[DictValue[Any, Any]] = [] + output: list[DictValue[Any, Any]] = [] for key in added_set: changes += 1 output.append(DictValue(key, None, new_dict[key])) @@ -134,7 +130,7 @@ def diff_dictionaries( return changes, output -def format_params_diff(parameter_diff: List[DictValue[Any, Any]]) -> str: +def format_params_diff(parameter_diff: list[DictValue[Any, Any]]) -> str: """Handle the formatting of differences in parameters. Args: @@ -155,8 +151,8 @@ def format_params_diff(parameter_diff: List[DictValue[Any, Any]]) -> str: def diff_parameters( - old_params: Dict[str, _OV], new_params: Dict[str, _NV] -) -> List[DictValue[_OV, _NV]]: + old_params: dict[str, _OV], new_params: dict[str, _NV] +) -> list[DictValue[_OV, _NV]]: """Compare the old vs. new parameters and returns a "diff". If there are no changes, we return an empty list. @@ -195,7 +191,7 @@ def _stack_action(self) -> Callable[..., Status]: """Run against a step.""" return self._diff_stack - def _diff_stack(self, stack: Stack, **_: Any) -> Status: + def _diff_stack(self, stack: Stack, **_: Any) -> Status: # noqa: C901 """Handle diffing a stack in CloudFormation vs our config.""" if self.cancel.wait(0): return INTERRUPTED @@ -228,15 +224,10 @@ def _diff_stack(self, stack: Stack, **_: Any) -> Status: stack.set_outputs(provider.get_outputs(stack.fqn)) except exceptions.StackDoesNotExist: if self.context.persistent_graph: - return SkippedStatus( - "persistent graph: stack does not exist, will be removed" - ) + return SkippedStatus("persistent graph: stack does not exist, will be removed") return DoesNotExistInCloudFormation() except AttributeError as err: - if ( - self.context.persistent_graph - and "defined class or template path" in str(err) - ): + if self.context.persistent_graph and "defined class or template path" in str(err): return SkippedStatus("persistent graph: will be destroyed") raise except ClientError as err: @@ -245,8 +236,7 @@ def _diff_stack(self, stack: Stack, **_: Any) -> Status: and "length less than or equal to" in err.response["Error"]["Message"] ): LOGGER.error( - "%s:template is too large to provide directly to the API; " - "S3 must be used", + "%s:template is too large to provide directly to the API; S3 must be used", stack.name, ) return SkippedStatus("cfngin_bucket: existing bucket required") @@ -257,17 +247,15 @@ def run( self, *, concurrency: int = 0, - dump: Union[bool, str] = False, # pylint: disable=unused-argument - force: bool = False, # pylint: disable=unused-argument - outline: bool = False, # pylint: disable=unused-argument - tail: bool = False, # pylint: disable=unused-argument - upload_disabled: bool = False, # pylint: disable=unused-argument + dump: bool | str = False, # noqa: ARG002 + force: bool = False, # noqa: ARG002 + outline: bool = False, # noqa: ARG002 + tail: bool = False, # noqa: ARG002 + upload_disabled: bool = False, # noqa: ARG002 **_kwargs: Any, ) -> None: """Kicks off the diffing of the stacks in the stack_definitions.""" - plan = self._generate_plan( - require_unlocked=False, include_persistent_graph=True - ) + plan = self._generate_plan(require_unlocked=False, include_persistent_graph=True) plan.outline(logging.DEBUG) if plan.keys(): LOGGER.info("diffing stacks: %s", ", ".join(plan.keys())) @@ -279,9 +267,9 @@ def run( def pre_run( self, *, - dump: Union[bool, str] = False, # pylint: disable=unused-argument - outline: bool = False, # pylint: disable=unused-argument - **__kwargs: Any, + dump: bool | str = False, # noqa: ARG002 + outline: bool = False, # noqa: ARG002 + **_kwargs: Any, ) -> None: """Any steps that need to be taken prior to running the action. @@ -296,14 +284,11 @@ def pre_run( sys.exit(1) if bucket.not_found: LOGGER.warning( - 'cfngin_bucket "%s" does not exist and will be creating ' - "during the next deploy", + 'cfngin_bucket "%s" does not exist and will be creating during the next deploy', bucket.name, ) LOGGER.verbose("proceeding without a cfngin_bucket...") self.bucket_name = None - def post_run( - self, *, dump: Union[bool, str] = False, outline: bool = False, **__kwargs: Any - ) -> None: + def post_run(self, *, dump: bool | str = False, outline: bool = False, **__kwargs: Any) -> None: """Do nothing.""" diff --git a/runway/cfngin/actions/graph.py b/runway/cfngin/actions/graph.py index aa44e5acc..2f74c020d 100644 --- a/runway/cfngin/actions/graph.py +++ b/runway/cfngin/actions/graph.py @@ -5,18 +5,20 @@ import json import logging import sys -from typing import TYPE_CHECKING, Any, Iterable, List, TextIO, Tuple, Union +from typing import TYPE_CHECKING, Any, TextIO from ..plan import merge_graphs from .base import BaseAction if TYPE_CHECKING: + from collections.abc import Iterable + from ..plan import Graph, Step LOGGER = logging.getLogger(__name__) -def each_step(graph: Graph) -> Iterable[Tuple[Step, List[Step]]]: +def each_step(graph: Graph) -> Iterable[tuple[Step, list[Step]]]: """Yield each step and it's direct dependencies. Args: @@ -56,10 +58,7 @@ def json_format(out: TextIO, graph: Graph) -> None: graph: Graph to be output. """ - steps = { - step.name: {"deps": [dep.name for dep in deps]} - for step, deps in each_step(graph) - } + steps = {step.name: {"deps": [dep.name for dep in deps]} for step, deps in each_step(graph)} json.dump({"steps": steps}, out, indent=4) out.write("\n") @@ -85,18 +84,16 @@ def _stack_action(self) -> Any: def run( self, *, - concurrency: int = 0, # pylint: disable=unused-argument - dump: Union[bool, str] = False, # pylint: disable=unused-argument - force: bool = False, # pylint: disable=unused-argument - outline: bool = False, # pylint: disable=unused-argument - tail: bool = False, # pylint: disable=unused-argument - upload_disabled: bool = False, # pylint: disable=unused-argument + concurrency: int = 0, # noqa: ARG002 + dump: bool | str = False, # noqa: ARG002 + force: bool = False, # noqa: ARG002 + outline: bool = False, # noqa: ARG002 + tail: bool = False, # noqa: ARG002 + upload_disabled: bool = False, # noqa: ARG002 **kwargs: Any, ) -> None: """Generate the underlying graph and prints it.""" - graph = self._generate_plan( - require_unlocked=False, include_persistent_graph=True - ).graph + graph = self._generate_plan(require_unlocked=False, include_persistent_graph=True).graph if self.context.persistent_graph: graph = merge_graphs(self.context.persistent_graph, graph) if kwargs.get("reduce"): diff --git a/runway/cfngin/actions/info.py b/runway/cfngin/actions/info.py index 2d0770911..81ca4eb93 100644 --- a/runway/cfngin/actions/info.py +++ b/runway/cfngin/actions/info.py @@ -40,6 +40,4 @@ def run(self, *_args: Any, **_kwargs: Any) -> None: LOGGER.info("%s:", stack.fqn) if "Outputs" in provider_stack: for output in provider_stack["Outputs"]: - LOGGER.info( - "\t%s: %s", output.get("OutputKey"), output.get("OutputValue") - ) + LOGGER.info("\t%s: %s", output.get("OutputKey"), output.get("OutputValue")) diff --git a/runway/cfngin/actions/init.py b/runway/cfngin/actions/init.py index 9fec7f05d..9d59de217 100644 --- a/runway/cfngin/actions/init.py +++ b/runway/cfngin/actions/init.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast from ...compat import cached_property from ...config.models.cfngin import CfnginStackDefinitionModel @@ -31,9 +31,9 @@ class Action(BaseAction): def __init__( self, context: CfnginContext, - provider_builder: Optional[ProviderBuilder] = None, - cancel: Optional[threading.Event] = None, - ): + provider_builder: ProviderBuilder | None = None, + cancel: threading.Event | None = None, + ) -> None: """Instantiate class. This class creates a copy of the context object prior to initialization @@ -46,9 +46,7 @@ def __init__( cancel: Cancel handler. """ - super().__init__( - context=context.copy(), provider_builder=provider_builder, cancel=cancel - ) + super().__init__(context=context.copy(), provider_builder=provider_builder, cancel=cancel) @property def _stack_action(self) -> Any: @@ -56,7 +54,7 @@ def _stack_action(self) -> Any: return None @cached_property - def cfngin_bucket(self) -> Optional[Bucket]: + def cfngin_bucket(self) -> Bucket | None: """CFNgin bucket. Raises: @@ -86,11 +84,11 @@ def run( self, *, concurrency: int = 0, - dump: Union[bool, str] = False, # pylint: disable=unused-argument - force: bool = False, # pylint: disable=unused-argument - outline: bool = False, # pylint: disable=unused-argument + dump: bool | str = False, # noqa: ARG002 + force: bool = False, # noqa: ARG002 + outline: bool = False, # noqa: ARG002 tail: bool = False, - upload_disabled: bool = True, # pylint: disable=unused-argument + upload_disabled: bool = True, # noqa: ARG002 **_kwargs: Any, ) -> None: """Run the action. @@ -125,9 +123,7 @@ def run( LOGGER.notice("using default blueprint to create cfngin_bucket...") self.context.config.stacks = [self.default_cfngin_bucket_stack] # clear cached values that were populated by checking the previous condition - self.context._del_cached_property( # pylint: disable=protected-access - "stacks", "stacks_dict" - ) + self.context._del_cached_property("stacks", "stacks_dict") # noqa: SLF001 if self.provider_builder: self.provider_builder.region = self.context.bucket_region deploy.Action( @@ -144,7 +140,7 @@ def run( def pre_run( self, *, - dump: Union[bool, str] = False, + dump: bool | str = False, outline: bool = False, **__kwargs: Any, ) -> None: @@ -153,7 +149,7 @@ def pre_run( def post_run( self, *, - dump: Union[bool, str] = False, + dump: bool | str = False, outline: bool = False, **__kwargs: Any, ) -> None: diff --git a/runway/cfngin/awscli_yamlhelper.py b/runway/cfngin/awscli_yamlhelper.py index 723cb7b79..cfe1b5db0 100644 --- a/runway/cfngin/awscli_yamlhelper.py +++ b/runway/cfngin/awscli_yamlhelper.py @@ -15,14 +15,15 @@ from __future__ import annotations import json -from typing import Any, Dict, MutableMapping, MutableSequence, cast +from collections.abc import MutableMapping, MutableSequence +from typing import Any, cast import yaml -def intrinsics_multi_constructor( # pylint: disable=unused-argument - loader: yaml.Loader, tag_prefix: str, node: yaml.Node -) -> Dict[str, Any]: +def intrinsics_multi_constructor( + loader: yaml.Loader, tag_prefix: str, node: yaml.Node # noqa: ARG001 +) -> dict[str, Any]: """YAML constructor to parse CloudFormation intrinsics. This will return a dictionary with key being the intrinsic name @@ -59,12 +60,12 @@ def intrinsics_multi_constructor( # pylint: disable=unused-argument return {cfntag: value} -def yaml_dump(dict_to_dump: Dict[str, Any]) -> str: +def yaml_dump(dict_to_dump: dict[str, Any]) -> str: """Dump the dictionary as a YAML document.""" return yaml.safe_dump(dict_to_dump, default_flow_style=False) -def yaml_parse(yamlstr: str) -> Dict[str, Any]: +def yaml_parse(yamlstr: str) -> dict[str, Any]: """Parse a yaml string.""" try: # PyYAML doesn't support json as well as it should, so if the input diff --git a/runway/cfngin/blueprints/base.py b/runway/cfngin/blueprints/base.py index f404a987c..efe5fbe27 100644 --- a/runway/cfngin/blueprints/base.py +++ b/runway/cfngin/blueprints/base.py @@ -6,18 +6,7 @@ import hashlib import logging import string -from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - Dict, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) +from typing import TYPE_CHECKING, Any, ClassVar from troposphere import Output, Parameter, Ref, Template @@ -54,15 +43,11 @@ "constraint_description": "ConstraintDescription", } -_T = TypeVar("_T") - class CFNParameter: """Wrapper around a value to indicate a CloudFormation Parameter.""" - def __init__( - self, name: str, value: Union[bool, float, int, List[Any], str, Any] - ) -> None: + def __init__(self, name: str, value: bool | float | list[Any] | str | Any) -> None: """Instantiate class. Args: @@ -82,7 +67,7 @@ def __init__( else: raise TypeError( f"CFNParameter ({name}) value must be one of bool, float, int, str, " - f"List[str] but got: {type(value)}" + f"list[str] but got: {type(value)}" ) def __repr__(self) -> str: @@ -94,7 +79,7 @@ def ref(self) -> Ref: """Ref the value of a parameter.""" return Ref(self.name) - def to_parameter_value(self) -> Union[List[Any], str]: + def to_parameter_value(self) -> list[Any] | str: """Return the value to be submitted to CloudFormation.""" return self.value @@ -110,17 +95,17 @@ def build_parameter(name: str, properties: BlueprintVariableTypeDef) -> Paramete Returns: The created parameter object. - """ # noqa: E501 + """ param = Parameter(name, Type=properties.get("type")) for name_, attr in PARAMETER_PROPERTIES.items(): if name_ in properties: - setattr(param, attr, properties[name_]) # type: ignore + setattr(param, attr, properties[name_]) return param def validate_variable_type( var_name: str, - var_type: Union[Type[CFNType], TroposphereType[Any], type], + var_type: type[CFNType] | TroposphereType[Any] | type, value: Any, ) -> Any: """Ensure the value is the correct variable type. @@ -143,21 +128,18 @@ def validate_variable_type( try: value = var_type.create(value) except Exception as exc: - raise ValidatorError( - var_name, f"{var_type.resource_name}.create", value, exc - ) from exc + raise ValidatorError(var_name, f"{var_type.resource_name}.create", value, exc) from exc elif issubclass(var_type, CFNType): value = CFNParameter(name=var_name, value=value) - else: - if not isinstance(value, var_type): - raise TypeError( - f"Value for variable {var_name} must be of type {var_type}. Actual " - f"type: {type(value)}" - ) + elif not isinstance(value, var_type): + raise TypeError( + f"Value for variable {var_name} must be of type {var_type}. Actual " + f"type: {type(value)}" + ) return value -def validate_allowed_values(allowed_values: Optional[List[Any]], value: Any) -> bool: +def validate_allowed_values(allowed_values: list[Any] | None, value: Any) -> bool: """Support a variable defining which values it allows. Args: @@ -177,7 +159,7 @@ def validate_allowed_values(allowed_values: Optional[List[Any]], value: Any) -> def resolve_variable( var_name: str, var_def: BlueprintVariableTypeDef, - provided_variable: Optional[Variable], + provided_variable: Variable | None, blueprint_name: str, ) -> Any: """Resolve a provided variable value against the variable definition. @@ -203,10 +185,9 @@ def resolve_variable( original exception. """ - try: - var_type = var_def["type"] - except KeyError: - raise VariableTypeRequired(blueprint_name, var_name) from None + if "type" not in var_def: + raise VariableTypeRequired(blueprint_name, var_name) + var_type = var_def["type"] if provided_variable: if not provided_variable.resolved: @@ -216,10 +197,9 @@ def resolve_variable( else: # Variable value not provided, try using the default, if it exists # in the definition - try: - value = var_def["default"] - except KeyError: - raise MissingVariable(blueprint_name, var_name) from None + if "default" not in var_def: + raise MissingVariable(blueprint_name, var_name) + value = var_def["default"] # If no validator, return the value as is, otherwise apply validator validator = var_def.get("validator", lambda v: v) @@ -241,9 +221,7 @@ def resolve_variable( return value -def parse_user_data( - variables: Dict[str, Any], raw_user_data: str, blueprint_name: str -) -> str: +def parse_user_data(variables: dict[str, Any], raw_user_data: str, blueprint_name: str) -> str: """Parse the given user data and renders it as a template. It supports referencing template variables to create userdata @@ -271,7 +249,7 @@ def parse_user_data( is not given in the blueprint """ - variable_values: Dict[str, Any] = {} + variable_values: dict[str, Any] = {} for key, value in variables.items(): if isinstance(value, CFNParameter): @@ -311,11 +289,11 @@ class Blueprint(DelCachedPropMixin): """ - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {} + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {} context: CfnginContext - description: Optional[str] - mappings: Optional[Dict[str, Dict[str, Any]]] + description: str | None + mappings: dict[str, dict[str, Any]] | None name: str template: Template @@ -324,11 +302,11 @@ def __init__( name: str, context: CfnginContext, *, - description: Optional[str] = None, - mappings: Optional[Dict[str, Dict[str, Any]]] = None, - template: Optional[Template] = None, + description: str | None = None, + mappings: dict[str, dict[str, Any]] | None = None, + template: Template | None = None, **_: Any, - ): + ) -> None: """Instantiate class. Args: @@ -349,7 +327,7 @@ def __init__( """ self._rendered = None - self._resolved_variables: Optional[Dict[str, Any]] = None + self._resolved_variables: dict[str, Any] | None = None self._version = None self.context = context self.description = description @@ -363,12 +341,12 @@ def __init__( "deprecated PARAMETERS or " "LOCAL_PARAMETERS, rather than VARIABLES. " "Please update your blueprints. See " - "https://docs.onica.com/projects/runway/page/cfngin/blueprints.html#variables " + "https://runway.readthedocs.io/page/cfngin/blueprints.html#variables " "for additional information." ) @cached_property - def cfn_parameters(self) -> Dict[str, Union[List[Any], str]]: + def cfn_parameters(self) -> dict[str, list[Any] | str]: """Return a dict of variables with type :class:`~runway.cfngin.blueprints.variables.types.CFNType`. .. versionadded:: 2.0.0 @@ -376,8 +354,8 @@ def cfn_parameters(self) -> Dict[str, Union[List[Any], str]]: Returns: Variables that need to be submitted as CloudFormation Parameters. - """ # noqa - output: Dict[str, Union[List[Any], str]] = {} + """ + output: dict[str, list[Any] | str] = {} for key, value in self.variables.items(): if hasattr(value, "to_parameter_value"): output[key] = value.to_parameter_value() @@ -388,7 +366,7 @@ def create_template(self) -> None: raise NotImplementedError @property - def defined_variables(self) -> Dict[str, BlueprintVariableTypeDef]: + def defined_variables(self) -> dict[str, BlueprintVariableTypeDef]: """Return a copy of :attr:`VARIABLES` to avoid accidental modification of the ClassVar. .. versionchanged:: 2.0.0 @@ -398,7 +376,7 @@ def defined_variables(self) -> Dict[str, BlueprintVariableTypeDef]: return copy.deepcopy(self.VARIABLES) @property - def output_definitions(self) -> Dict[str, Dict[str, Any]]: + def output_definitions(self) -> dict[str, dict[str, Any]]: """Get the output definitions. .. versionadded:: 2.0.0 @@ -411,7 +389,7 @@ def output_definitions(self) -> Dict[str, Dict[str, Any]]: return {k: output.to_dict() for k, output in self.template.outputs.items()} @cached_property - def parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]: + def parameter_definitions(self) -> dict[str, BlueprintVariableTypeDef]: """Get the parameter definitions to submit to CloudFormation. Any variable definition whose type is an instance of @@ -425,7 +403,7 @@ def parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]: containing key/values for various parameter properties. """ - output: Dict[str, BlueprintVariableTypeDef] = {} + output: dict[str, BlueprintVariableTypeDef] = {} for var_name, attrs in self.defined_variables.items(): var_type = attrs.get("type") if isinstance(var_type, type) and issubclass(var_type, CFNType): @@ -435,7 +413,7 @@ def parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]: return output @cached_property - def parameter_values(self) -> Dict[str, Union[List[Any], str]]: + def parameter_values(self) -> dict[str, list[Any] | str]: """Return a dict of variables with type :class:`~runway.cfngin.blueprints.variables.types.CFNType`. .. versionadded:: 2.0.0 @@ -444,8 +422,8 @@ def parameter_values(self) -> Dict[str, Union[List[Any], str]]: Variables that need to be submitted as CloudFormation Parameters. Will be a dictionary of : . - """ # noqa - output: Dict[str, Any] = {} + """ + output: dict[str, Any] = {} for key, value in self.variables.items(): try: output[key] = value.to_parameter_value() @@ -461,7 +439,7 @@ def rendered(self) -> str: return self._rendered @cached_property - def required_parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]: + def required_parameter_definitions(self) -> dict[str, BlueprintVariableTypeDef]: """Return all template parameters that do not have a default value. .. versionadded:: 2.0.0 @@ -483,7 +461,7 @@ def requires_change_set(self) -> bool: return self.template.transform is not None @property - def variables(self) -> Dict[str, Any]: + def variables(self) -> dict[str, Any]: """Return a Dict of variables available to the Template. These variables will have been defined within :attr:`VARIABLES` or @@ -504,7 +482,7 @@ def variables(self) -> Dict[str, Any]: return self._resolved_variables @variables.setter - def variables(self, value: Dict[str, Any]) -> None: + def variables(self, value: dict[str, Any]) -> None: """Setter for :meth:`variables`. .. versionadded:: 2.0.0 @@ -533,7 +511,7 @@ def add_output(self, name: str, value: Any) -> None: """ self.template.add_output(Output(name, Value=value)) - def get_cfn_parameters(self) -> Dict[str, Union[List[Any], str]]: + def get_cfn_parameters(self) -> dict[str, list[Any] | str]: """Return a dictionary of variables with `type` :class:`CFNType`. .. deprecated:: 2.0.0 @@ -549,7 +527,7 @@ def get_cfn_parameters(self) -> Dict[str, Union[List[Any], str]]: ) return self.cfn_parameters - def get_output_definitions(self) -> Dict[str, Dict[str, Any]]: + def get_output_definitions(self) -> dict[str, dict[str, Any]]: """Get the output definitions. .. deprecated:: 2.0.0 @@ -566,7 +544,7 @@ def get_output_definitions(self) -> Dict[str, Dict[str, Any]]: ) return self.output_definitions - def get_parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]: + def get_parameter_definitions(self) -> dict[str, BlueprintVariableTypeDef]: """Get the parameter definitions to submit to CloudFormation. Any variable definition whose `type` is an instance of @@ -587,7 +565,7 @@ def get_parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]: ) return self.parameter_definitions - def get_parameter_values(self) -> Dict[str, Union[List[Any], str]]: + def get_parameter_values(self) -> dict[str, list[Any] | str]: """Return a dict of variables with type :class:`~runway.cfngin.blueprints.variables.types.CFNType`. .. deprecated:: 2.0.0 @@ -597,14 +575,14 @@ def get_parameter_values(self) -> Dict[str, Union[List[Any], str]]: Variables that need to be submitted as CloudFormation Parameters. Will be a dictionary of : . - """ # noqa + """ LOGGER.warning( "%s.get_parameter_values is deprecated and will be removed in a future release", self.__class__.__name__, ) return self.parameter_values - def get_required_parameter_definitions(self) -> Dict[str, BlueprintVariableTypeDef]: + def get_required_parameter_definitions(self) -> dict[str, BlueprintVariableTypeDef]: """Return all template parameters that do not have a default value. .. deprecated:: 2.0.0 @@ -622,7 +600,7 @@ def get_required_parameter_definitions(self) -> Dict[str, BlueprintVariableTypeD ) return self.required_parameter_definitions - def get_variables(self) -> Dict[str, Any]: + def get_variables(self) -> dict[str, Any]: """Return a dictionary of variables available to the template. These variables will have been defined within `VARIABLES` or @@ -664,7 +642,7 @@ def read_user_data(self, user_data_path: str) -> str: raw_user_data = read_value_from_path(user_data_path) return parse_user_data(self.variables, raw_user_data, self.name) - def render_template(self) -> Tuple[str, str]: + def render_template(self) -> tuple[str, str]: """Render the Blueprint to a CloudFormation template.""" self.import_mappings() self.create_template() @@ -672,7 +650,7 @@ def render_template(self) -> Tuple[str, str]: self.set_template_description(self.description) self.setup_parameters() rendered = self.template.to_json(indent=self.context.template_indent) - version = hashlib.md5(rendered.encode()).hexdigest()[:8] + version = hashlib.md5(rendered.encode()).hexdigest()[:8] # noqa: S324 return version, rendered def reset_template(self) -> None: @@ -681,7 +659,7 @@ def reset_template(self) -> None: self._rendered = None self._version = None - def resolve_variables(self, provided_variables: List[Variable]) -> None: + def resolve_variables(self, provided_variables: list[Variable]) -> None: """Resolve the values of the blueprint variables. This will resolve the values of the `VARIABLES` with values from the @@ -694,9 +672,7 @@ def resolve_variables(self, provided_variables: List[Variable]) -> None: self._resolved_variables = {} variable_dict = {var.name: var for var in provided_variables} for var_name, var_def in self.defined_variables.items(): - value = resolve_variable( - var_name, var_def, variable_dict.get(var_name), self.name - ) + value = resolve_variable(var_name, var_def, variable_dict.get(var_name), self.name) self._resolved_variables[var_name] = value def set_template_description(self, description: str) -> None: @@ -720,14 +696,14 @@ def setup_parameters(self) -> None: built_param = build_parameter(name, attrs) template.add_parameter(built_param) - def to_json(self, variables: Optional[Dict[str, Any]] = None) -> str: + def to_json(self, variables: dict[str, Any] | None = None) -> str: """Render the blueprint and return the template in json form. Args: variables: Dictionary providing/overriding variable values. """ - variables_to_resolve: List[Variable] = [] + variables_to_resolve: list[Variable] = [] if variables: for key, value in variables.items(): variables_to_resolve.append(Variable(key, value, "cfngin")) @@ -736,7 +712,7 @@ def to_json(self, variables: Optional[Dict[str, Any]] = None) -> str: # The provided value for a CFN parameter has no effect in this # context (generating the CFN template), so any string can be # provided for its value - just needs to be something - variables_to_resolve.append(Variable(k, "unused_value", "cfngin")) + variables_to_resolve.append(Variable(k, "unused_value", "cfngin")) # noqa: PERF401 self.resolve_variables(variables_to_resolve) return self.render_template()[1] diff --git a/runway/cfngin/blueprints/cfngin_bucket.py b/runway/cfngin/blueprints/cfngin_bucket.py index 2e528d302..e34979e93 100644 --- a/runway/cfngin/blueprints/cfngin_bucket.py +++ b/runway/cfngin/blueprints/cfngin_bucket.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict, Union +from typing import TYPE_CHECKING, ClassVar from troposphere import Equals, If, Not, NoValue, Or, Tag, Tags, s3 @@ -21,7 +21,7 @@ class CfnginBucket(Blueprint): """CFNgin Bucket Blueprint.""" DESCRIPTION: ClassVar[str] = f"{__name__}.CFNginBucket (v{__version__})" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "AccessControl": { "allowed_values": [ "AuthenticatedRead", @@ -66,13 +66,11 @@ def bucket(self) -> s3.Bucket: self.add_output("BucketArn", bucket.get_att("Arn")) self.add_output("BucketDomainName", bucket.get_att("DomainName")) self.add_output("BucketName", bucket.ref()) - self.add_output( - "BucketRegionalDomainName", bucket.get_att("RegionalDomainName") - ) + self.add_output("BucketRegionalDomainName", bucket.get_att("RegionalDomainName")) return bucket @cached_property - def bucket_encryption(self) -> Union[AWSHelperFn, s3.BucketEncryption]: + def bucket_encryption(self) -> AWSHelperFn | s3.BucketEncryption: """CFNgin bucket encryption. This cached property can be overridden in a subclass to customize the diff --git a/runway/cfngin/blueprints/raw.py b/runway/cfngin/blueprints/raw.py index 8a5e141a2..a7637a43f 100644 --- a/runway/cfngin/blueprints/raw.py +++ b/runway/cfngin/blueprints/raw.py @@ -5,10 +5,9 @@ import hashlib import json import logging -import os import sys from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any from jinja2 import Environment, FileSystemLoader @@ -24,7 +23,7 @@ LOGGER = logging.getLogger(__name__) -def get_template_path(file_path: Path) -> Optional[Path]: +def get_template_path(file_path: Path) -> Path | None: """Find raw template in working directory or in sys.path. template_path from config may refer to templates co-located with the CFNgin @@ -32,7 +31,7 @@ def get_template_path(file_path: Path) -> Optional[Path]: loading to find the path to the template. Args: - filename: Template path. + file_path: Template path. Returns: Path to file, or None if no file found @@ -47,7 +46,7 @@ def get_template_path(file_path: Path) -> Optional[Path]: return None -def resolve_variable(provided_variable: Optional[Variable], blueprint_name: str) -> Any: +def resolve_variable(provided_variable: Variable | None, blueprint_name: str) -> Any: """Resolve a provided variable value against the variable definition. This acts as a subset of resolve_variable logic in the base module, leaving @@ -73,7 +72,7 @@ def resolve_variable(provided_variable: Optional[Variable], blueprint_name: str) return value -class RawTemplateBlueprint(Blueprint): # pylint: disable=abstract-method +class RawTemplateBlueprint(Blueprint): """Blueprint class for blueprints auto-generated from raw templates. Attributes: @@ -89,13 +88,13 @@ class RawTemplateBlueprint(Blueprint): # pylint: disable=abstract-method raw_template_path: Path - def __init__( # pylint: disable=super-init-not-called + def __init__( self, name: str, context: CfnginContext, *, - description: Optional[str] = None, - mappings: Optional[Dict[str, Any]] = None, + description: str | None = None, + mappings: dict[str, Any] | None = None, raw_template_path: Path, **_: Any, ) -> None: @@ -116,7 +115,7 @@ def __init__( # pylint: disable=super-init-not-called self.raw_template_path = raw_template_path @property - def output_definitions(self) -> Dict[str, Dict[str, Any]]: + def output_definitions(self) -> dict[str, dict[str, Any]]: """Get the output definitions. .. versionadded:: 2.0.0 @@ -129,7 +128,7 @@ def output_definitions(self) -> Dict[str, Dict[str, Any]]: return self.to_dict().get("Outputs", {}) @cached_property - def parameter_definitions(self) -> Dict[str, Any]: + def parameter_definitions(self) -> dict[str, Any]: """Get the parameter definitions to submit to CloudFormation. .. versionadded:: 2.0.0 @@ -142,7 +141,7 @@ def parameter_definitions(self) -> Dict[str, Any]: return self.to_dict().get("Parameters", {}) @cached_property - def parameter_values(self) -> Dict[str, Union[List[Any], str]]: + def parameter_values(self) -> dict[str, list[Any] | str]: """Return a dict of variables with type :class:`~runway.cfngin.blueprints.variables.types.CFNType`. .. versionadded:: 2.0.0 @@ -151,25 +150,21 @@ def parameter_values(self) -> Dict[str, Union[List[Any], str]]: Variables that need to be submitted as CloudFormation Parameters. Will be a dictionary of ``: ``. - """ # noqa + """ return self._resolved_variables or {} @property def rendered(self) -> str: - """Return (generating first if needed) rendered template.""" + """Return (generating first if needed) rendered Template.""" if not self._rendered: template_path = get_template_path(self.raw_template_path) if template_path: - if len(os.path.splitext(template_path)) == 2 and ( - os.path.splitext(template_path)[1] == ".j2" - ): + if template_path.suffix == ".j2": self._rendered = ( - Environment( - loader=FileSystemLoader( - searchpath=os.path.dirname(template_path) - ) + Environment( # noqa: S701 + loader=FileSystemLoader(searchpath=template_path.parent) ) - .get_template(os.path.basename(template_path)) + .get_template(template_path.name) .render( context=self.context, mappings=self.mappings, @@ -178,10 +173,10 @@ def rendered(self) -> str: ) ) else: - with open(template_path, "r", encoding="utf-8") as template: + with template_path.open(encoding="utf-8") as template: self._rendered = template.read() else: - raise InvalidConfig(f"Could not find template {self.raw_template_path}") + raise InvalidConfig(f"Could not find Template {self.raw_template_path}") # clear cached properties that rely on this property self._del_cached_property("parameter_definitions") @@ -196,10 +191,10 @@ def requires_change_set(self) -> bool: def version(self) -> str: """Return (generating first if needed) version hash.""" if not self._version: - self._version = hashlib.md5(self.rendered.encode()).hexdigest()[:8] + self._version = hashlib.md5(self.rendered.encode()).hexdigest()[:8] # noqa: S324 return self._version - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: """Return the template as a python dictionary. Returns: @@ -208,7 +203,7 @@ def to_dict(self) -> Dict[str, Any]: """ return parse_cloudformation_template(self.rendered) - def to_json(self, variables: Optional[Dict[str, Any]] = None) -> str: + def to_json(self, variables: dict[str, Any] | None = None) -> str: # noqa: ARG002 """Return the template in JSON. Args: @@ -218,11 +213,11 @@ def to_json(self, variables: Optional[Dict[str, Any]] = None) -> str: # load -> dumps will produce json from json or yaml templates return json.dumps(self.to_dict(), sort_keys=True, indent=4) - def render_template(self) -> Tuple[str, str]: + def render_template(self) -> tuple[str, str]: """Load template and generate its md5 hash.""" return (self.version, self.rendered) - def resolve_variables(self, provided_variables: List[Variable]) -> None: + def resolve_variables(self, provided_variables: list[Variable]) -> None: """Resolve the values of the blueprint variables. This will resolve the values of the template parameters with values @@ -237,7 +232,7 @@ def resolve_variables(self, provided_variables: List[Variable]) -> None: # Pass 1 to set resolved_variables to provided variables self._resolved_variables = {} variable_dict = {var.name: var for var in provided_variables} - for var_name, _var_def in variable_dict.items(): + for var_name in variable_dict: value = resolve_variable(variable_dict.get(var_name), self.name) if value is not None: self._resolved_variables[var_name] = value @@ -248,7 +243,7 @@ def resolve_variables(self, provided_variables: List[Variable]) -> None: defined_variables = self.parameter_definitions.copy() self._resolved_variables = {} variable_dict = {var.name: var for var in provided_variables} - for var_name, _var_def in defined_variables.items(): + for var_name in defined_variables: value = resolve_variable(variable_dict.get(var_name), self.name) if value is not None: self._resolved_variables[var_name] = value diff --git a/runway/cfngin/blueprints/testutil.py b/runway/cfngin/blueprints/testutil.py index b2d943bbd..625cbbaad 100644 --- a/runway/cfngin/blueprints/testutil.py +++ b/runway/cfngin/blueprints/testutil.py @@ -8,7 +8,7 @@ import unittest from glob import glob from pathlib import Path -from typing import TYPE_CHECKING, Any, Iterator, List, Optional, Type, cast +from typing import TYPE_CHECKING, Any, cast from ...config import CfnginConfig from ...context import CfnginContext @@ -16,15 +16,15 @@ from ...variables import Variable if TYPE_CHECKING: + from collections.abc import Iterator + from ...config.models.cfngin import CfnginStackDefinitionModel from .base import Blueprint def diff(first: str, second: str) -> str: """Human readable differ.""" - return "\n".join( - list(difflib.Differ().compare(first.splitlines(), second.splitlines())) - ) + return "\n".join(list(difflib.Differ().compare(first.splitlines(), second.splitlines()))) class BlueprintTestCase(unittest.TestCase): @@ -32,9 +32,7 @@ class BlueprintTestCase(unittest.TestCase): OUTPUT_PATH: str = "tests/fixtures/blueprints" - def assertRenderedBlueprint( # noqa: N802 pylint: disable=invalid-name - self, blueprint: Blueprint - ) -> None: + def assertRenderedBlueprint(self, blueprint: Blueprint) -> None: # noqa: N802 """Test that the rendered blueprint json matches the expected result. Result files are to be stored in the repo as @@ -46,18 +44,16 @@ def assertRenderedBlueprint( # noqa: N802 pylint: disable=invalid-name rendered_dict = blueprint.template.to_dict() rendered_text = json.dumps(rendered_dict, indent=4, sort_keys=True) - with open( + with open( # noqa: PTH123 expected_output + "-result", "w", encoding="utf-8" ) as expected_output_file: expected_output_file.write(rendered_text) - with open(expected_output, encoding="utf-8") as expected_output_file: + with open(expected_output, encoding="utf-8") as expected_output_file: # noqa: PTH123 expected_dict = json.loads(expected_output_file.read()) expected_text = json.dumps(expected_dict, indent=4, sort_keys=True) - self.assertEqual( - rendered_dict, expected_dict, diff(rendered_text, expected_text) - ) + assert rendered_dict == expected_dict, diff(rendered_text, expected_text) # noqa: S101 class YamlDirTestGenerator: @@ -107,17 +103,17 @@ class YamlDirTestGenerator: def __init__(self) -> None: """Instantiate class.""" self.classdir = os.path.relpath(self.__class__.__module__.replace(".", "/")) - if not os.path.isdir(self.classdir): - self.classdir = os.path.dirname(self.classdir) + if not os.path.isdir(self.classdir): # noqa: PTH112 + self.classdir = os.path.dirname(self.classdir) # noqa: PTH120 # These properties can be overridden from the test generator subclass. @property - def base_class(self) -> Type[BlueprintTestCase]: + def base_class(self) -> type[BlueprintTestCase]: """Return the baseclass.""" return BlueprintTestCase @property - def yaml_dirs(self) -> List[str]: + def yaml_dirs(self) -> list[str]: """Yaml directories.""" return ["."] @@ -126,22 +122,23 @@ def yaml_filename(self) -> str: """Yaml filename.""" return "test_*.yaml" - # pylint incorrectly detects this def test_generator( self, ) -> Iterator[BlueprintTestCase]: """Test generator.""" # Search for tests in given paths - configs: List[str] = [] + configs: list[str] = [] for directory in self.yaml_dirs: - configs.extend(glob(f"{self.classdir}/{directory}/{self.yaml_filename}")) + configs.extend( + glob(f"{self.classdir}/{directory}/{self.yaml_filename}") # noqa: PTH207 + ) - class ConfigTest(self.base_class): # type: ignore + class ConfigTest(self.base_class): """Config test.""" context: CfnginContext - def __init__( # pylint: disable=super-init-not-called + def __init__( self, config: CfnginConfig, stack: CfnginStackDefinitionModel, @@ -152,23 +149,19 @@ def __init__( # pylint: disable=super-init-not-called self.stack = stack self.description = f"{stack.name} ({filepath})" - def __call__(self) -> None: # pylint: disable=arguments-differ + def __call__(self) -> None: # pyright: ignore[reportIncompatibleMethodOverride] """Run when the class instance is called directly.""" # Use the context property of the baseclass, if present. # If not, default to a basic context. try: ctx = self.context except AttributeError: - ctx = CfnginContext( - config=self.config, parameters={"environment": "test"} - ) + ctx = CfnginContext(config=self.config, parameters={"environment": "test"}) configvars = self.stack.variables or {} variables = [Variable(k, v, "cfngin") for k, v in configvars.items()] - blueprint_class = load_object_from_string( - cast(str, self.stack.class_path) - ) + blueprint_class = load_object_from_string(cast(str, self.stack.class_path)) blueprint = blueprint_class(self.stack.name, ctx) blueprint.resolve_variables(variables or []) blueprint.setup_parameters() @@ -176,14 +169,14 @@ def __call__(self) -> None: # pylint: disable=arguments-differ self.assertRenderedBlueprint(blueprint) def assertEqual( # noqa: N802 - self, first: Any, second: Any, msg: Optional[str] = None + self, first: Any, second: Any, msg: str | None = None ) -> None: """Test that first and second are equal. If the values do not compare equal, the test will fail. """ - assert first == second, msg + assert first == second, msg # noqa: S101 for config_file in configs: config_path = Path(config_file) diff --git a/runway/cfngin/blueprints/type_defs.py b/runway/cfngin/blueprints/type_defs.py index 9bfa9d678..30d5a1469 100644 --- a/runway/cfngin/blueprints/type_defs.py +++ b/runway/cfngin/blueprints/type_defs.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, Callable, List +from typing import Any, Callable from typing_extensions import TypedDict @@ -17,7 +17,7 @@ class _OptionalBlueprintVariableTypeDef(TypedDict, total=False): """Type definition for runway.cfngin.blueprints.base.Blueprint.VARIABLES items.""" allowed_pattern: str - allowed_values: List[Any] + allowed_values: list[Any] constraint_description: str default: Any description: str @@ -74,4 +74,4 @@ class BlueprintVariableTypeDef( If there is an issue validating the value, an exception (``ValueError``, ``TypeError``, etc) should be raised by the function. - """ # noqa + """ diff --git a/runway/cfngin/blueprints/variables/types.py b/runway/cfngin/blueprints/variables/types.py index ce1ea3f11..6d1e833be 100644 --- a/runway/cfngin/blueprints/variables/types.py +++ b/runway/cfngin/blueprints/variables/types.py @@ -2,19 +2,7 @@ from __future__ import annotations -from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - Dict, - Generic, - List, - Optional, - Type, - TypeVar, - Union, - overload, -) +from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypeVar, overload from troposphere import BaseAWSObject @@ -22,7 +10,6 @@ from typing_extensions import Literal TroposphereT = TypeVar("TroposphereT", bound=BaseAWSObject) -# https://github.com/PyCQA/pylint/issues/6003 class TroposphereType(Generic[TroposphereT]): @@ -46,7 +33,7 @@ class TroposphereType(Generic[TroposphereT]): def __init__( self, - defined_type: Type[TroposphereT], + defined_type: type[TroposphereT], *, many: bool = False, optional: bool = False, @@ -78,7 +65,7 @@ def __init__( self._validate = validate @staticmethod - def _validate_type(defined_type: Type[TroposphereT]) -> None: + def _validate_type(defined_type: type[TroposphereT]) -> None: if not hasattr(defined_type, "from_dict"): raise ValueError("Type must have `from_dict` attribute") @@ -88,17 +75,17 @@ def resource_name(self) -> str: return str(getattr(self._type, "resource_name", None) or self._type.__name__) @overload - def create(self, value: Dict[str, Any]) -> TroposphereT: ... + def create(self, value: dict[str, Any]) -> TroposphereT: ... @overload - def create(self, value: List[Dict[str, Any]]) -> List[TroposphereT]: ... + def create(self, value: list[dict[str, Any]]) -> list[TroposphereT]: ... @overload def create(self, value: None) -> None: ... def create( - self, value: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] - ) -> Optional[Union[TroposphereT, List[TroposphereT]]]: + self, value: dict[str, Any] | list[dict[str, Any]] | None + ) -> TroposphereT | list[TroposphereT] | None: """Create the troposphere type from the value. Args: @@ -119,33 +106,27 @@ def create( # Our type is a resource, so ensure we have a dict of title to # parameters if not isinstance(value, dict): - raise ValueError( - "Resources must be specified as a dict of title to parameters" - ) + raise ValueError("Resources must be specified as a dict of title to parameters") if not self._many and len(value) > 1: raise ValueError( - "Only one resource can be provided for this " - "TroposphereType variable" + "Only one resource can be provided for this TroposphereType variable" ) result = [self._type.from_dict(title, v) for title, v in value.items()] + elif self._many and isinstance(value, list): + result = [self._type.from_dict(None, v) for v in value] + elif not isinstance(value, dict): + raise ValueError( + "TroposphereType for a single non-resource" + "type must be specified as a dict of " + "parameters" + ) else: - # Our type is for properties, not a resource, so don't use - # titles - if self._many and isinstance(value, list): - result = [self._type.from_dict(None, v) for v in value] - elif not isinstance(value, dict): - raise ValueError( - "TroposphereType for a single non-resource" - "type must be specified as a dict of " - "parameters" - ) - else: - result = [self._type.from_dict(None, value)] + result = [self._type.from_dict(None, value)] if self._validate: for v in result: - v._validate_props() + v._validate_props() # noqa: SLF001 return result[0] if not self._many else result @@ -166,7 +147,7 @@ class CFNType: """ - parameter_type: ClassVar[str] + parameter_type: ClassVar # General CFN types @@ -235,17 +216,13 @@ class EC2ImageId(CFNType): class EC2InstanceId(CFNType): """An Amazon EC2 instance ID, such as i-1e731a32.""" - parameter_type: ClassVar[Literal["AWS::EC2::Instance::Id"]] = ( - "AWS::EC2::Instance::Id" - ) + parameter_type: ClassVar[Literal["AWS::EC2::Instance::Id"]] = "AWS::EC2::Instance::Id" class EC2KeyPairKeyName(CFNType): """An Amazon EC2 key pair name.""" - parameter_type: ClassVar[Literal["AWS::EC2::KeyPair::KeyName"]] = ( - "AWS::EC2::KeyPair::KeyName" - ) + parameter_type: ClassVar[Literal["AWS::EC2::KeyPair::KeyName"]] = "AWS::EC2::KeyPair::KeyName" class EC2SecurityGroupGroupName(CFNType): @@ -259,9 +236,7 @@ class EC2SecurityGroupGroupName(CFNType): class EC2SecurityGroupId(CFNType): """A security group ID, such as sg-a123fd85.""" - parameter_type: ClassVar[Literal["AWS::EC2::SecurityGroup::Id"]] = ( - "AWS::EC2::SecurityGroup::Id" - ) + parameter_type: ClassVar[Literal["AWS::EC2::SecurityGroup::Id"]] = "AWS::EC2::SecurityGroup::Id" class EC2SubnetId(CFNType): @@ -306,9 +281,7 @@ class EC2ImageIdList(CFNType): """ - parameter_type: ClassVar[Literal["List"]] = ( - "List" - ) + parameter_type: ClassVar[Literal["List"]] = "List" class EC2InstanceIdList(CFNType): @@ -338,25 +311,19 @@ class EC2SecurityGroupIdList(CFNType): class EC2SubnetIdList(CFNType): """An array of subnet IDs, such as subnet-123a351e, subnet-456b351e.""" - parameter_type: ClassVar[Literal["List"]] = ( - "List" - ) + parameter_type: ClassVar[Literal["List"]] = "List" class EC2VolumeIdList(CFNType): """An array of Amazon EBS volume IDs, such as vol-3cdd3f56, vol-4cdd3f56.""" - parameter_type: ClassVar[Literal["List"]] = ( - "List" - ) + parameter_type: ClassVar[Literal["List"]] = "List" class EC2VPCIdList(CFNType): """An array of VPC IDs, such as vpc-a123baa3, vpc-b456baa3.""" - parameter_type: ClassVar[Literal["List"]] = ( - "List" - ) + parameter_type: ClassVar[Literal["List"]] = "List" class Route53HostedZoneIdList(CFNType): @@ -377,9 +344,7 @@ class SSMParameterName(CFNType): """ - parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Name"]] = ( - "AWS::SSM::Parameter::Name" - ) + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Name"]] = "AWS::SSM::Parameter::Name" class SSMParameterValueString(CFNType): @@ -413,9 +378,9 @@ class SSMParameterValueCommaDelimitedList(CFNType): """ - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value"] - ] = "AWS::SSM::Parameter::Value" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = ( + "AWS::SSM::Parameter::Value" + ) class SSMParameterValueEC2AvailabilityZoneName(CFNType): @@ -429,25 +394,25 @@ class SSMParameterValueEC2AvailabilityZoneName(CFNType): class SSMParameterValueEC2ImageId(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value"] - ] = "AWS::SSM::Parameter::Value" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = ( + "AWS::SSM::Parameter::Value" + ) class SSMParameterValueEC2InstanceId(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value"] - ] = "AWS::SSM::Parameter::Value" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = ( + "AWS::SSM::Parameter::Value" + ) class SSMParameterValueEC2KeyPairKeyName(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value"] - ] = "AWS::SSM::Parameter::Value" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = ( + "AWS::SSM::Parameter::Value" + ) class SSMParameterValueEC2SecurityGroupGroupName(CFNType): @@ -461,33 +426,33 @@ class SSMParameterValueEC2SecurityGroupGroupName(CFNType): class SSMParameterValueEC2SecurityGroupId(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value"] - ] = "AWS::SSM::Parameter::Value" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = ( + "AWS::SSM::Parameter::Value" + ) class SSMParameterValueEC2SubnetId(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value"] - ] = "AWS::SSM::Parameter::Value" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = ( + "AWS::SSM::Parameter::Value" + ) class SSMParameterValueEC2VolumeId(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value"] - ] = "AWS::SSM::Parameter::Value" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = ( + "AWS::SSM::Parameter::Value" + ) class SSMParameterValueEC2VPCId(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value"] - ] = "AWS::SSM::Parameter::Value" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value"]] = ( + "AWS::SSM::Parameter::Value" + ) class SSMParameterValueRoute53HostedZoneId(CFNType): @@ -509,9 +474,9 @@ class SSMParameterValueEC2AvailabilityZoneNameList(CFNType): class SSMParameterValueEC2ImageIdList(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value>"] - ] = "AWS::SSM::Parameter::Value>" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value>"]] = ( + "AWS::SSM::Parameter::Value>" + ) class SSMParameterValueEC2InstanceIdList(CFNType): @@ -541,25 +506,25 @@ class SSMParameterValueEC2SecurityGroupIdList(CFNType): class SSMParameterValueEC2SubnetIdList(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value>"] - ] = "AWS::SSM::Parameter::Value>" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value>"]] = ( + "AWS::SSM::Parameter::Value>" + ) class SSMParameterValueEC2VolumeIdList(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value>"] - ] = "AWS::SSM::Parameter::Value>" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value>"]] = ( + "AWS::SSM::Parameter::Value>" + ) class SSMParameterValueEC2VPCIdList(CFNType): """A Systems Manager parameter whose value is an AWS-specific parameter type.""" - parameter_type: ClassVar[ - Literal["AWS::SSM::Parameter::Value>"] - ] = "AWS::SSM::Parameter::Value>" + parameter_type: ClassVar[Literal["AWS::SSM::Parameter::Value>"]] = ( + "AWS::SSM::Parameter::Value>" + ) class SSMParameterValueRoute53HostedZoneIdList(CFNType): diff --git a/runway/cfngin/cfngin.py b/runway/cfngin/cfngin.py index 0dd876783..fbd9e8597 100644 --- a/runway/cfngin/cfngin.py +++ b/runway/cfngin/cfngin.py @@ -3,9 +3,8 @@ from __future__ import annotations import logging -import os from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast +from typing import TYPE_CHECKING, Any, cast from .._logging import PrefixAdaptor from ..compat import cached_property @@ -52,8 +51,8 @@ class and any environment files that are found. def __init__( self, ctx: RunwayContext, - parameters: Optional[Dict[str, Any]] = None, - sys_path: Optional[Path] = None, + parameters: dict[str, Any] | None = None, + sys_path: Path | None = None, ) -> None: """Instantiate class. @@ -84,21 +83,21 @@ def __init__( @cached_property def env_file(self) -> MutableMap: """Contents of a CFNgin environment file.""" - result: Dict[str, Any] = {} + result: dict[str, Any] = {} supported_names = [ f"{self.__ctx.env.name}.env", f"{self.__ctx.env.name}-{self.region}.env", ] for _, file_name in enumerate(supported_names): - file_path = os.path.join(self.sys_path, file_name) - if os.path.isfile(file_path): + file_path = self.sys_path / file_name + if file_path.is_file(): LOGGER.info("found environment file: %s", file_path) self._env_file_name = file_path - with open(file_path, "r", encoding="utf-8") as file_: + with file_path.open(encoding="utf-8") as file_: result.update(parse_environment(file_.read())) return MutableMap(**result) - def deploy(self, force: bool = False, sys_path: Optional[Path] = None) -> None: + def deploy(self, force: bool = False, sys_path: Path | None = None) -> None: """Run the CFNgin deploy action. Args: @@ -113,24 +112,20 @@ def deploy(self, force: bool = False, sys_path: Optional[Path] = None) -> None: sys_path = sys_path or self.sys_path config_file_paths = self.find_config_files(sys_path=sys_path) - with SafeHaven( - environ=self.__ctx.env.vars, sys_modules_exclude=["awacs", "troposphere"] - ): + with SafeHaven(environ=self.__ctx.env.vars, sys_modules_exclude=["awacs", "troposphere"]): for config_path in config_file_paths: - logger = PrefixAdaptor(os.path.basename(config_path), LOGGER) + logger = PrefixAdaptor(config_path.name, LOGGER) logger.notice("deploy (in progress)") with SafeHaven(sys_modules_exclude=["awacs", "troposphere"]): ctx = self.load(config_path) action = deploy.Action( context=ctx, - provider_builder=self._get_provider_builder( - ctx.config.service_role - ), + provider_builder=self._get_provider_builder(ctx.config.service_role), ) action.execute(concurrency=self.concurrency, tail=self.tail) logger.success("deploy (complete)") - def destroy(self, force: bool = False, sys_path: Optional[Path] = None) -> None: + def destroy(self, force: bool = False, sys_path: Path | None = None) -> None: """Run the CFNgin destroy action. Args: @@ -155,35 +150,27 @@ def destroy(self, force: bool = False, sys_path: Optional[Path] = None) -> None: ctx = self.load(config_path) action = destroy.Action( context=ctx, - provider_builder=self._get_provider_builder( - ctx.config.service_role - ), - ) - action.execute( - concurrency=self.concurrency, force=True, tail=self.tail + provider_builder=self._get_provider_builder(ctx.config.service_role), ) + action.execute(concurrency=self.concurrency, force=True, tail=self.tail) logger.success("destroy (complete)") - def init(self, force: bool = False, sys_path: Optional[Path] = None) -> None: + def init(self, force: bool = False, sys_path: Path | None = None) -> None: """Initialize environment.""" if self.should_skip(force): return sys_path = sys_path or self.sys_path config_file_paths = self.find_config_files(sys_path=sys_path) - with SafeHaven( - environ=self.__ctx.env.vars, sys_modules_exclude=["awacs", "troposphere"] - ): + with SafeHaven(environ=self.__ctx.env.vars, sys_modules_exclude=["awacs", "troposphere"]): for config_path in config_file_paths: - logger = PrefixAdaptor(os.path.basename(config_path), LOGGER) + logger = PrefixAdaptor(config_path.name, LOGGER) logger.notice("init (in progress)") with SafeHaven(sys_modules_exclude=["awacs", "troposphere"]): ctx = self.load(config_path) action = init.Action( context=ctx, - provider_builder=self._get_provider_builder( - ctx.config.service_role - ), + provider_builder=self._get_provider_builder(ctx.config.service_role), ) action.execute(concurrency=self.concurrency, tail=self.tail) logger.success("init (complete)") @@ -210,7 +197,7 @@ def load(self, config_path: Path) -> CfnginContext: config.load() return self._get_context(config, config_path) - def plan(self, force: bool = False, sys_path: Optional[Path] = None): + def plan(self, force: bool = False, sys_path: Path | None = None) -> None: """Run the CFNgin plan action. Args: @@ -232,9 +219,7 @@ def plan(self, force: bool = False, sys_path: Optional[Path] = None): ctx = self.load(config_path) action = diff.Action( context=ctx, - provider_builder=self._get_provider_builder( - ctx.config.service_role - ), + provider_builder=self._get_provider_builder(ctx.config.service_role), ) action.execute() logger.success("plan (complete)") @@ -284,9 +269,7 @@ def _get_context(self, config: CfnginConfig, config_path: Path) -> CfnginContext work_dir=self.__ctx.work_dir, ) - def _get_provider_builder( - self, service_role: Optional[str] = None - ) -> ProviderBuilder: + def _get_provider_builder(self, service_role: str | None = None) -> ProviderBuilder: """Initialize provider builder. Args: @@ -330,8 +313,8 @@ def _inject_common_parameters(self) -> None: @classmethod def find_config_files( - cls, exclude: Optional[List[str]] = None, sys_path: Optional[Path] = None - ) -> List[Path]: + cls, exclude: list[str] | None = None, sys_path: Path | None = None + ) -> list[Path]: """Find CFNgin config files. Args: diff --git a/runway/cfngin/dag/__init__.py b/runway/cfngin/dag/__init__.py index 1966f812f..9ccfd7c7e 100644 --- a/runway/cfngin/dag/__init__.py +++ b/runway/cfngin/dag/__init__.py @@ -4,25 +4,16 @@ import collections import collections.abc +import contextlib import logging +from collections import OrderedDict from copy import copy, deepcopy from threading import Thread -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterable, - List, - OrderedDict, - Set, - Tuple, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Callable, cast if TYPE_CHECKING: import threading + from collections.abc import Iterable LOGGER = logging.getLogger(__name__) @@ -34,7 +25,7 @@ class DAGValidationError(Exception): class DAG: """Directed acyclic graph implementation.""" - graph: OrderedDict[str, Set[str]] + graph: OrderedDict[str, set[str]] def __init__(self) -> None: """Instantiate a new DAG with no nodes or edges.""" @@ -54,7 +45,7 @@ def add_node(self, node_name: str) -> None: graph = self.graph if node_name in graph: raise KeyError(f"node {node_name} already exists") - graph[node_name] = cast(Set[str], set()) + graph[node_name] = cast(set[str], set()) def add_node_if_not_exists(self, node_name: str) -> None: """Add a node if it does not exist yet, ignoring duplicates. @@ -63,10 +54,8 @@ def add_node_if_not_exists(self, node_name: str) -> None: node_name: The name of the node to add. """ - try: + with contextlib.suppress(KeyError): self.add_node(node_name) - except KeyError: - pass def delete_node(self, node_name: str) -> None: """Delete this node and all edges referencing it. @@ -83,7 +72,7 @@ def delete_node(self, node_name: str) -> None: raise KeyError(f"node {node_name} does not exist") graph.pop(node_name) - for _node, edges in graph.items(): + for edges in graph.values(): if node_name in edges: edges.remove(node_name) @@ -97,10 +86,8 @@ def delete_node_if_exists(self, node_name: str) -> None: node_name: The name of the node to delete. """ - try: + with contextlib.suppress(KeyError): self.delete_node(node_name) - except KeyError: - pass def add_edge(self, ind_node: str, dep_node: str) -> None: """Add an edge (dependency) between the specified nodes. @@ -150,7 +137,7 @@ def transpose(self) -> DAG: """Build a new graph with the edges reversed.""" graph = self.graph transposed = DAG() - for node, _edges in graph.items(): + for node in graph: transposed.add_node(node) for node, edges in graph.items(): # for each edge A -> B, transpose it so that B -> A @@ -185,12 +172,12 @@ def transitive_reduction(self) -> None: See https://en.wikipedia.org/wiki/Transitive_reduction """ - combinations: List[List[str]] = [] + combinations: list[list[str]] = [] for node, edges in self.graph.items(): combinations += [[node, edge] for edge in edges] while True: - new_combinations: List[List[str]] = [] + new_combinations: list[list[str]] = [] for comb1 in combinations: for comb2 in combinations: if comb1[-1] != comb2[0]: @@ -221,25 +208,24 @@ def rename_edges(self, old_node_name: str, new_node_name: str) -> None: graph[new_node_name] = copy(edges) del graph[old_node_name] - else: - if old_node_name in edges: - edges.remove(old_node_name) - edges.add(new_node_name) + elif old_node_name in edges: + edges.remove(old_node_name) + edges.add(new_node_name) - def predecessors(self, node: str) -> List[str]: + def predecessors(self, node: str) -> list[str]: """Return a list of all immediate predecessors of the given node. Args: node (str): The node whose predecessors you want to find. Returns: - List[str]: A list of nodes that are immediate predecessors to node. + list[str]: A list of nodes that are immediate predecessors to node. """ graph = self.graph return [key for key in graph if node in graph[key]] - def downstream(self, node: str) -> List[str]: + def downstream(self, node: str) -> list[str]: """Return a list of all nodes this node has edges towards. Args: @@ -254,7 +240,7 @@ def downstream(self, node: str) -> List[str]: raise KeyError(f"node {node} is not in graph") return list(graph[node]) - def all_downstreams(self, node: str) -> List[str]: + def all_downstreams(self, node: str) -> list[str]: """Return a list of all nodes downstream in topological order. Args: @@ -265,7 +251,7 @@ def all_downstreams(self, node: str) -> List[str]: """ nodes = [node] - nodes_seen: Set[str] = set() + nodes_seen: set[str] = set() nodes_iter = nodes for node__ in nodes_iter: downstreams = self.downstream(node__) @@ -275,7 +261,7 @@ def all_downstreams(self, node: str) -> List[str]: nodes.append(downstream_node) return [node_ for node_ in self.topological_sort() if node_ in nodes_seen] - def filter(self, nodes: List[str]) -> DAG: + def filter(self, nodes: list[str]) -> DAG: """Return a new DAG with only the given nodes and their dependencies. Args: @@ -297,12 +283,12 @@ def filter(self, nodes: List[str]) -> DAG: return filtered_dag - def all_leaves(self) -> List[str]: + def all_leaves(self) -> list[str]: """Return a list of all leaves (nodes with no downstreams).""" graph = self.graph return [key for key in graph if not graph[key]] - def from_dict(self, graph_dict: Dict[str, Union[Iterable[str], Any]]) -> None: + def from_dict(self, graph_dict: dict[str, Iterable[str] | Any]) -> None: """Reset the graph and build it from the passed dictionary. The dictionary takes the form of {node_name: [directed edges]} @@ -327,7 +313,7 @@ def reset_graph(self) -> None: """Restore the graph to an empty state.""" self.graph = collections.OrderedDict() - def ind_nodes(self) -> List[str]: + def ind_nodes(self) -> list[str]: """Return a list of all nodes in the graph with no dependencies.""" graph = self.graph @@ -335,7 +321,7 @@ def ind_nodes(self) -> List[str]: return [node_ for node_ in graph if node_ not in dependent_nodes] - def validate(self) -> Tuple[bool, str]: + def validate(self) -> tuple[bool, str]: """Return (Boolean, message) of whether DAG is valid.""" if not self.ind_nodes(): return (False, "no independent nodes detected") @@ -345,7 +331,7 @@ def validate(self) -> Tuple[bool, str]: return False, str(err) return True, "valid" - def topological_sort(self) -> List[str]: + def topological_sort(self) -> list[str]: """Return a topological ordering of the DAG. Raises: @@ -359,12 +345,12 @@ def topological_sort(self) -> List[str]: for val in graph[node]: in_degree[val] += 1 - queue: "collections.deque[str]" = collections.deque() + queue: collections.deque[str] = collections.deque() for node, value in in_degree.items(): if value == 0: queue.appendleft(node) - sorted_graph: List[str] = [] + sorted_graph: list[str] = [] while queue: node = queue.pop() sorted_graph.append(node) @@ -404,7 +390,7 @@ def release(self) -> Any: class ThreadedWalker: """Walk a DAG as quickly as the graph topology allows, using threads.""" - def __init__(self, semaphore: Union[threading.Semaphore, UnlimitedSemaphore]): + def __init__(self, semaphore: threading.Semaphore | UnlimitedSemaphore) -> None: """Instantiate class. Args: @@ -431,11 +417,11 @@ def walk(self, dag: DAG, walk_func: Callable[[str], Any]) -> None: nodes.reverse() # This maps a node name to a thread of execution. - threads: Dict[str, Any] = {} + threads: dict[str, Any] = {} # Blocks until all of the given nodes have completed execution (whether # successfully, or errored). Returns True if all nodes returned True. - def wait_for(nodes: List[str]): + def wait_for(nodes: list[str]) -> None: """Wait for nodes.""" for node in nodes: thread = threads[node] @@ -447,11 +433,9 @@ def wait_for(nodes: List[str]): # nodes dependencies have executed. for node in nodes: - def _fn(node_: str, deps: List[str]) -> Any: + def _fn(node_: str, deps: list[str]) -> Any: if deps: - LOGGER.debug( - "%s waiting for %s to complete", node_, ", ".join(deps) - ) + LOGGER.debug("%s waiting for %s to complete", node_, ", ".join(deps)) # Wait for all dependencies to complete. wait_for(deps) diff --git a/runway/cfngin/environment.py b/runway/cfngin/environment.py index c9a7c3c73..0cf405a5e 100644 --- a/runway/cfngin/environment.py +++ b/runway/cfngin/environment.py @@ -1,18 +1,18 @@ """CFNgin environment file parsing.""" -from typing import Any, Dict +from typing import Any -def parse_environment(raw_environment: str) -> Dict[str, Any]: +def parse_environment(raw_environment: str) -> dict[str, Any]: """Parse environment file contents. Args: raw_environment: Environment file read into a string. """ - environment: Dict[str, Any] = {} - for line in raw_environment.split("\n"): - line = line.strip() + environment: dict[str, Any] = {} + for raw_line in raw_environment.split("\n"): + line = raw_line.strip() if not line: continue diff --git a/runway/cfngin/exceptions.py b/runway/cfngin/exceptions.py index e1ebee2be..3edd4d06b 100644 --- a/runway/cfngin/exceptions.py +++ b/runway/cfngin/exceptions.py @@ -3,7 +3,7 @@ from __future__ import annotations from pathlib import Path -from typing import TYPE_CHECKING, Any, List, Optional, Union +from typing import TYPE_CHECKING, Any from ..exceptions import RunwayError @@ -74,12 +74,10 @@ def __init__(self, *, bucket_name: str) -> None: class CfnginBucketRequired(CfnginError): """CFNgin bucket is required to use a feature but it not provided/disabled.""" - config_path: Optional[Path] + config_path: Path | None message: str - def __init__( - self, *, config_path: Optional[AnyPath] = None, reason: Optional[str] = None - ) -> None: + def __init__(self, *, config_path: AnyPath | None = None, reason: str | None = None) -> None: """Instantiate class. Args: @@ -106,9 +104,7 @@ class CfnginOnlyLookupError(CfnginError): def __init__(self, lookup_name: str) -> None: """Instantiate class.""" self.lookup_name = lookup_name - self.message = ( - f"attempted to use CFNgin only lookup {lookup_name} outside of CFNgin" - ) + self.message = f"attempted to use CFNgin only lookup {lookup_name} outside of CFNgin" super().__init__() @@ -165,6 +161,8 @@ def __init__(self, kls: Any, error: Exception, *args: Any, **kwargs: Any) -> Non Args: kls: The class that was improperly configured. error: The exception that was raised when trying to use cls. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.message = f'Class "{kls}" is improperly configured: {error}' @@ -174,10 +172,10 @@ def __init__(self, kls: Any, error: Exception, *args: Any, **kwargs: Any) -> Non class InvalidConfig(CfnginError): """Provided config file is invalid.""" - errors: Union[str, List[Union[Exception, str]]] + errors: str | list[Exception | str] message: str - def __init__(self, errors: Union[str, List[Union[Exception, str]]]) -> None: + def __init__(self, errors: str | list[Exception | str]) -> None: """Instantiate class. Args: @@ -227,6 +225,8 @@ def __init__( blueprint_name: Name of the blueprint with invalid userdata placeholder. exception_message: Message from the exception that was raised while parsing the userdata. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.message = ( @@ -246,6 +246,8 @@ def __init__(self, key: str, *args: Any, **kwargs: Any) -> None: Args: key: The key that was used but doesn't exist in the environment. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.key = key @@ -258,17 +260,17 @@ class MissingParameterException(CfnginError): message: str - def __init__(self, parameters: List[str], *args: Any, **kwargs: Any) -> None: + def __init__(self, parameters: list[str], *args: Any, **kwargs: Any) -> None: """Instantiate class. Args: parameters: A list of the parameters that are missing. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.parameters = parameters - self.message = ( - f"Missing required cloudformation parameters: {', '.join(parameters)}" - ) + self.message = f"Missing required cloudformation parameters: {', '.join(parameters)}" super().__init__(*args, **kwargs) @@ -277,19 +279,17 @@ class MissingVariable(CfnginError): message: str - def __init__( - self, blueprint_name: str, variable_name: str, *args: Any, **kwargs: Any - ) -> None: + def __init__(self, blueprint_name: str, variable_name: str, *args: Any, **kwargs: Any) -> None: """Instantiate class. Args: blueprint_name: Name of the blueprint. variable_name: Name of the variable missing a value. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ - self.message = ( - f'Variable "{variable_name}" in blueprint "{blueprint_name}" is missing' - ) + self.message = f'Variable "{variable_name}" in blueprint "{blueprint_name}" is missing' super().__init__(*args, **kwargs) @@ -339,7 +339,7 @@ class PersistentGraphCannotUnlock(CfnginError): message: str - def __init__(self, reason: Union[Exception, str]) -> None: + def __init__(self, reason: Exception | str) -> None: """Instantiate class.""" self.message = f"Could not unlock persistent graph; {reason}" super().__init__() @@ -354,17 +354,12 @@ class PersistentGraphLocked(CfnginError): message: str - def __init__( - self, *, message: Optional[str] = None, reason: Optional[str] = None - ) -> None: + def __init__(self, *, message: str | None = None, reason: str | None = None) -> None: """Instantiate class.""" if message: self.message = message else: - reason = ( - reason - or "This action requires the graph to be unlocked to be executed." - ) + reason = reason or "This action requires the graph to be unlocked to be executed." self.message = f"Persistent graph is locked. {reason}" super().__init__() @@ -379,7 +374,7 @@ class PersistentGraphLockCodeMismatch(CfnginError): message: str - def __init__(self, provided_code: str, s3_code: Optional[str]) -> None: + def __init__(self, provided_code: str, s3_code: str | None) -> None: """Instantiate class.""" self.message = ( f"The provided lock code '{provided_code}' does not match the S3 " @@ -397,16 +392,12 @@ class PersistentGraphUnlocked(CfnginError): message: str - def __init__( - self, message: Optional[str] = None, reason: Optional[str] = None - ) -> None: + def __init__(self, message: str | None = None, reason: str | None = None) -> None: """Instantiate class.""" if message: self.message = message else: - reason = ( - reason or "This action requires the graph to be locked to be executed." - ) + reason = reason or "This action requires the graph to be locked to be executed." self.message = f"Persistent graph is unlocked. {reason}" super().__init__() @@ -416,11 +407,13 @@ class PlanFailed(CfnginError): message: str - def __init__(self, failed_steps: List[Step], *args: Any, **kwargs: Any) -> None: + def __init__(self, failed_steps: list[Step], *args: Any, **kwargs: Any) -> None: """Instantiate class. Args: failed_steps: The steps that failed. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.failed_steps = failed_steps @@ -447,6 +440,8 @@ def __init__(self, stack_name: str, *args: Any, **kwargs: Any) -> None: Args: stack_name: Name of the stack that does not exist. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.message = ( @@ -470,6 +465,8 @@ def __init__( stack_name: Name of the stack. stack_status: The stack's status. reason: The reason for the current status. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.stack_name = stack_name @@ -491,7 +488,7 @@ class StackFailed(CfnginError): message: str - def __init__(self, stack_name: str, status_reason: Optional[str] = None) -> None: + def __init__(self, stack_name: str, status_reason: str | None = None) -> None: """Instantiate class. Args: @@ -513,9 +510,7 @@ class UnableToExecuteChangeSet(CfnginError): message: str - def __init__( - self, stack_name: str, change_set_id: str, execution_status: str - ) -> None: + def __init__(self, stack_name: str, change_set_id: str, execution_status: str) -> None: """Instantiate class. Args: @@ -575,20 +570,19 @@ class UnresolvedBlueprintVariable(CfnginError): message: str - def __init__( - self, blueprint_name: str, variable: Variable, *args: Any, **kwargs: Any - ) -> None: + def __init__(self, blueprint_name: str, variable: Variable, *args: Any, **kwargs: Any) -> None: """Instantiate class. Args: blueprint_name: Name of the blueprint that tried to use the unresolved variables. variable: The unresolved variable. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.message = ( - f'Variable "{variable.name}" in blueprint "{blueprint_name}" ' - "hasn't been resolved" + f'Variable "{variable.name}" in blueprint "{blueprint_name}" hasn\'t been resolved' ) super().__init__(*args, **kwargs) @@ -604,6 +598,8 @@ def __init__(self, blueprint_name: str, *args: Any, **kwargs: Any) -> None: Args: blueprint_name: Name of the blueprint that tried to use the unresolved variables. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.message = f"Blueprint: \"{blueprint_name}\" hasn't resolved it's variables" @@ -620,7 +616,7 @@ def __init__( variable: str, validator: str, value: str, - exception: Optional[Exception] = None, + exception: Exception | None = None, ) -> None: """Instantiate class. @@ -641,12 +637,10 @@ def __init__( ) if self.exception: - self.message += ( - f": {self.exception.__class__.__name__}: {str(self.exception)}" - ) + self.message += f": {self.exception.__class__.__name__}: {self.exception!s}" super().__init__() - def __str__(self): + def __str__(self) -> str: """Return the exception's message when converting to a string.""" return self.message @@ -656,18 +650,17 @@ class VariableTypeRequired(CfnginError): message: str - def __init__( - self, blueprint_name: str, variable_name: str, *args: Any, **kwargs: Any - ) -> None: + def __init__(self, blueprint_name: str, variable_name: str, *args: Any, **kwargs: Any) -> None: """Instantiate class. Args: blueprint_name: Name of the blueprint. variable_name: Name of the variable missing a type. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.message = ( - f'Variable "{variable_name}" in blueprint "{blueprint_name}" ' - "does not have a type" + f'Variable "{variable_name}" in blueprint "{blueprint_name}" does not have a type' ) super().__init__(*args, **kwargs) diff --git a/runway/cfngin/hooks/acm.py b/runway/cfngin/hooks/acm.py index fb1cc22cc..347775cbe 100644 --- a/runway/cfngin/hooks/acm.py +++ b/runway/cfngin/hooks/acm.py @@ -4,12 +4,11 @@ import logging import time -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Type +from typing import TYPE_CHECKING, Any, ClassVar from botocore.exceptions import ClientError from troposphere import Ref from troposphere.certificatemanager import Certificate as CertificateResource -from typing_extensions import Literal from ...utils import MutableMap from ..blueprints.variables.types import CFNString @@ -23,11 +22,10 @@ from mypy_boto3_acm.type_defs import ResourceRecordTypeDef from mypy_boto3_route53.client import Route53Client from mypy_boto3_route53.type_defs import ChangeTypeDef + from typing_extensions import Literal from ...context import CfnginContext - from ..blueprints.base import Blueprint from ..providers.aws.default import Provider - from ..stack import Stack from ..status import Status LOGGER = logging.getLogger(__name__) @@ -36,10 +34,10 @@ class HookArgs(HookArgsBaseModel): """Hook arguments.""" - alt_names: List[str] = [] + alt_names: list[str] = [] domain: str hosted_zone_id: str - stack_name: Optional[str] = None + stack_name: str | None = None ttl: int = 300 @@ -47,7 +45,7 @@ class Certificate(Hook): r"""Hook for managing a **AWS::CertificateManager::Certificate**. Keyword Args: - alt_names (Optional[List[str]]): Additional FQDNs to be included in the + alt_names (list[str]): Additional FQDNs to be included in the Subject Alternative Name extension of the ACM certificate. For example, you can add www.example.net to a certificate for which the domain field is www.example.com if users can reach your site by @@ -61,10 +59,10 @@ class Certificate(Hook): hosted_zone_id (str): The ID of the Route 53 Hosted Zone that contains the resource record sets that you want to change. This must exist in the same account that the certificate will be created in. - stack_name (Optional[str]): Provide a name for the stack used to + stack_name (str | None): Provide a name for the stack used to create the certificate. If not provided, the domain is used (replacing ``.`` with ``-``). - ttl (Optional[int]): The resource record cache time to live (TTL), + ttl (int): The resource record cache time to live (TTL), in seconds. (*default:* ``300``) .. rubric:: Example @@ -80,23 +78,20 @@ class Certificate(Hook): """ - ARGS_PARSER: ClassVar[Type[HookArgs]] = HookArgs + ARGS_PARSER: ClassVar[type[HookArgs]] = HookArgs acm_client: ACMClient args: HookArgs - blueprint: Blueprint r53_client: Route53Client - stack: Stack template_description: str - def __init__( - self, context: CfnginContext, provider: Provider, **kwargs: Any - ) -> None: + def __init__(self, context: CfnginContext, provider: Provider, **kwargs: Any) -> None: """Instantiate class. Args: context: Context instance. (passed in by CFNgin) provider: Provider instance. (passed in by CFNgin) + **kwargs: Arbitrary keyword arguments. """ super().__init__(context, provider, **kwargs) @@ -105,12 +100,10 @@ def __init__( self.stack_name = self.args.stack_name or self.args.domain.replace(".", "-") self.properties = MutableMap( - **{ - "DomainName": self.args.domain, - "SubjectAlternativeNames": self.args.alt_names, - "Tags": self.tags, - "ValidationMethod": "DNS", - } + DomainName=self.args.domain, + SubjectAlternativeNames=self.args.alt_names, + Tags=self.tags, + ValidationMethod="DNS", ) self.blueprint = self._create_blueprint() @@ -124,7 +117,7 @@ def __init__( } ) - def _create_blueprint(self) -> Blueprint: + def _create_blueprint(self) -> BlankBlueprint: """Create CFNgin Blueprint.""" var_description = ( "NO NOT CHANGE MANUALLY! Used to track the " @@ -155,25 +148,21 @@ class _BlankBlueprint(BlankBlueprint): def domain_changed(self) -> bool: """Check to ensure domain has not changed for existing stack.""" + if not self.stack: # cov: ignore + raise NotImplementedError("stack not present on hook") try: stack_info = self.provider.get_stack(self.stack.fqn) if self.provider.is_stack_recreatable(stack_info): - LOGGER.debug( - "stack is in a recreatable state; domain change does not matter" - ) + LOGGER.debug("stack is in a recreatable state; domain change does not matter") return False if self.provider.is_stack_in_progress( stack_info ) or self.provider.is_stack_rolling_back(stack_info): LOGGER.debug("stack is in progress; can't check for domain change") return False - if ( - self.args.domain - != self.provider.get_outputs(self.stack.fqn)["DomainName"] - ): + if self.args.domain != self.provider.get_outputs(self.stack.fqn)["DomainName"]: LOGGER.error( - '"domain" can\'t be changed for existing ' - 'certificate in stack "%s"', + '"domain" can\'t be changed for existing certificate in stack "%s"', self.stack.fqn, ) return True @@ -196,20 +185,21 @@ def get_certificate(self, interval: int = 5) -> str: Certificate ARN. """ + if not self.stack: # cov: ignore + raise NotImplementedError("stack not present on hook") response = self.provider.cloudformation.describe_stack_resources( StackName=self.stack.fqn, LogicalResourceId="Certificate" )["StackResources"] - if response: - # can be returned without having a PhysicalResourceId - if "PhysicalResourceId" in response[0]: - return response[0]["PhysicalResourceId"] + # can be returned without having a PhysicalResourceId + if response and "PhysicalResourceId" in response[0]: + return response[0]["PhysicalResourceId"] LOGGER.debug("waiting for certificate to be created...") time.sleep(interval) return self.get_certificate(interval=interval) def get_validation_record( self, - cert_arn: Optional[str] = None, + cert_arn: str | None = None, *, interval: int = 5, status: str = "PENDING_VALIDATION", @@ -228,25 +218,18 @@ def get_validation_record( """ if not cert_arn: cert_arn = self.get_certificate() - cert = self.acm_client.describe_certificate(CertificateArn=cert_arn).get( - "Certificate", {} - ) + cert = self.acm_client.describe_certificate(CertificateArn=cert_arn).get("Certificate", {}) - try: - domain_validation = [ - opt - for opt in cert["DomainValidationOptions"] - if opt["ValidationStatus"] == status - ] - except KeyError: + if "DomainValidationOptions" not in cert: LOGGER.debug( - "waiting for DomainValidationOptions to become " - "available for the certificate..." + "waiting for DomainValidationOptions to become available for the certificate..." ) time.sleep(interval) - return self.get_validation_record( - cert_arn=cert_arn, interval=interval, status=status - ) + return self.get_validation_record(cert_arn=cert_arn, interval=interval, status=status) + + domain_validation = [ + opt for opt in cert["DomainValidationOptions"] if opt.get("ValidationStatus") == status + ] if not domain_validation: raise ValueError( @@ -257,18 +240,14 @@ def get_validation_record( f"Found {len(domain_validation)} validation options of status " f'"{status}" for "{self.args.domain}"; only one option is supported' ) - try: - # the validation option can exists before the record set is ready + if "ResourceRecord" in domain_validation[0]: return domain_validation[0]["ResourceRecord"] - except KeyError: - LOGGER.debug( - "waiting for DomainValidationOptions.ResourceRecord " - "to become available for the certificate..." - ) - time.sleep(interval) - return self.get_validation_record( - cert_arn=cert_arn, interval=interval, status=status - ) + LOGGER.debug( + "waiting for DomainValidationOptions.ResourceRecord to become available " + "for the certificate..." + ) + time.sleep(interval) + return self.get_validation_record(cert_arn=cert_arn, interval=interval, status=status) def put_record_set(self, record_set: ResourceRecordTypeDef) -> None: """Create/update a record set on a Route 53 Hosted Zone. @@ -277,14 +256,10 @@ def put_record_set(self, record_set: ResourceRecordTypeDef) -> None: record_set: Record set to be added to Route 53. """ - LOGGER.info( - "adding validation record to hosted zone: %s", self.args.hosted_zone_id - ) + LOGGER.info("adding validation record to hosted zone: %s", self.args.hosted_zone_id) self.__change_record_set("CREATE", [record_set]) - def remove_validation_records( - self, records: Optional[List[ResourceRecordTypeDef]] = None - ) -> None: + def remove_validation_records(self, records: list[ResourceRecordTypeDef] | None = None) -> None: """Remove all record set entries used to validate an ACM Certificate. Args: @@ -324,7 +299,7 @@ def update_record_set(self, record_set: ResourceRecordTypeDef) -> None: def __change_record_set( self, action: Literal["CREATE", "DELETE", "UPSERT"], - record_sets: List[ResourceRecordTypeDef], + record_sets: list[ResourceRecordTypeDef], ) -> None: """Wrap boto3.client('acm').change_resource_record_sets. @@ -336,7 +311,7 @@ def __change_record_set( if not record_sets: raise ValueError("Must provide one of more record sets") - changes: List[ChangeTypeDef] = [ + changes: list[ChangeTypeDef] = [ { "Action": action, "ResourceRecordSet": { @@ -360,7 +335,7 @@ def __change_record_set( ChangeBatch={"Comment": self.template_description, "Changes": changes}, ) - def deploy(self, status: Optional[Status] = None) -> Dict[str, str]: + def deploy(self, status: Status | None = None) -> dict[str, str]: """Deploy an ACM Certificate.""" record = None try: @@ -410,7 +385,7 @@ def deploy(self, status: Optional[Status] = None) -> Dict[str, str]: LOGGER.error(err) self.destroy( records=[record] if record else None, - skip_r53=isinstance( # type: ignore + skip_r53=isinstance( err, ( self.r53_client.exceptions.InvalidChangeBatch, @@ -425,7 +400,7 @@ def deploy(self, status: Optional[Status] = None) -> Dict[str, str]: def destroy( self, - records: Optional[List[ResourceRecordTypeDef]] = None, + records: list[ResourceRecordTypeDef] | None = None, skip_r53: bool = False, ) -> bool: """Destroy an ACM certificate. @@ -437,6 +412,8 @@ def destroy( skip_r53: Skip the removal of validation records. """ + if not self.stack: # cov: ignore + raise NotImplementedError("stack not present on hook") if not skip_r53: try: self.remove_validation_records(records) @@ -447,23 +424,19 @@ def destroy( ) as err: # these error are fine if they happen during destruction but # could require manual steps to finish cleanup. - LOGGER.warning( - "deletion of the validation records failed with error:\n%s", err - ) + LOGGER.warning("deletion of the validation records failed with error:\n%s", err) except ClientError as err: if err.response["Error"]["Message"] != ( f"Stack with id {self.stack.fqn} does not exist" ): raise - LOGGER.warning( - "deletion of the validation records failed with error:\n%s", err - ) + LOGGER.warning("deletion of the validation records failed with error:\n%s", err) else: LOGGER.info("deletion of validation records was skipped") self.destroy_stack(wait=True) return True - def post_deploy(self) -> Dict[str, str]: + def post_deploy(self) -> dict[str, str]: """Run during the **post_deploy** stage.""" return self.deploy() @@ -471,7 +444,7 @@ def post_destroy(self) -> bool: """Run during the **post_destroy** stage.""" return self.destroy() - def pre_deploy(self) -> Dict[str, str]: + def pre_deploy(self) -> dict[str, str]: """Run during the **pre_deploy** stage.""" return self.deploy() diff --git a/runway/cfngin/hooks/aws_lambda.py b/runway/cfngin/hooks/aws_lambda.py index f8ab897d8..00c515f8e 100644 --- a/runway/cfngin/hooks/aws_lambda.py +++ b/runway/cfngin/hooks/aws_lambda.py @@ -1,6 +1,5 @@ """AWS Lambda hook.""" -# pylint: disable=too-many-lines from __future__ import annotations import hashlib @@ -19,13 +18,6 @@ from typing import ( TYPE_CHECKING, Any, - Dict, - Iterable, - Iterator, - List, - Optional, - Tuple, - Union, cast, ) from zipfile import ZIP_DEFLATED, ZipFile @@ -35,7 +27,6 @@ import docker import docker.types import formic -from docker.models.images import Image from troposphere.awslambda import Code from typing_extensions import Literal, TypedDict @@ -43,9 +34,13 @@ from ..utils import ensure_s3_bucket if TYPE_CHECKING: + from collections.abc import Iterable, Iterator + + from docker.models.images import Image from mypy_boto3_s3.client import S3Client from mypy_boto3_s3.literals import ObjectCannedACLType from mypy_boto3_s3.type_defs import HeadObjectOutputTypeDef + from typing_extensions import TypeAlias from ...context import CfnginContext from ..providers.aws.default import Provider @@ -59,21 +54,16 @@ # list from python tags of https://hub.docker.com/r/lambci/lambda/tags SUPPORTED_RUNTIMES = ["python3.7", "python3.8"] -DockerizePipArgTypeDef = Optional[ - Union[ - bool, - Literal[ - "false", "False", "no", "No", "non-linux", "true", "True", "yes", "Yes" - ], - ] -] +DockerizePipArgTypeDef: TypeAlias = ( + 'bool | Literal["false", "False", "no", "No", "non-linux", "true", "True", "yes", "Yes"] | None' +) def copydir( source: str, destination: str, - includes: List[str], - excludes: Optional[List[str]] = None, + includes: list[str], + excludes: list[str] | None = None, follow_symlinks: bool = False, ) -> None: """Extend the functionality of shutil. @@ -93,24 +83,24 @@ def copydir( def _mkdir(dir_name: str) -> None: """Recursively create directories.""" - parent = os.path.dirname(dir_name) - if not os.path.isdir(parent): + parent = os.path.dirname(dir_name) # noqa: PTH120 + if not os.path.isdir(parent): # noqa: PTH112 _mkdir(parent) LOGGER.debug("creating directory: %s", dir_name) - os.mkdir(dir_name) + os.mkdir(dir_name) # noqa: PTH102 for file_name in files: - src = os.path.join(source, file_name) - dest = os.path.join(destination, file_name) + src = os.path.join(source, file_name) # noqa: PTH118 + dest = os.path.join(destination, file_name) # noqa: PTH118 try: LOGGER.debug('copying file "%s" to "%s"', src, dest) copyfile(src, dest) except OSError: - _mkdir(os.path.dirname(dest)) + _mkdir(os.path.dirname(dest)) # noqa: PTH120 copyfile(src, dest) -def find_requirements(root: str) -> Optional[Dict[str, bool]]: +def find_requirements(root: str) -> dict[str, bool] | None: """Identify Python requirement files. Args: @@ -122,7 +112,7 @@ def find_requirements(root: str) -> Optional[Dict[str, bool]]: """ findings = { - file_name: os.path.isfile(os.path.join(root, file_name)) + file_name: os.path.isfile(os.path.join(root, file_name)) # noqa: PTH118, PTH113 for file_name in ["requirements.txt", "Pipfile", "Pipfile.lock"] } @@ -151,12 +141,12 @@ def should_use_docker(dockerize_pip: DockerizePipArgTypeDef = None) -> bool: return False -def str2bool(v: str): +def str2bool(v: str) -> bool: """Return boolean value of string.""" return v.lower() in ("yes", "true", "t", "1", "on", "y") -def _zip_files(files: Iterable[str], root: str) -> Tuple[bytes, str]: +def _zip_files(files: Iterable[str], root: str) -> tuple[bytes, str]: """Generate a ZIP file in-memory from a list of files. Files will be stored in the archive with relative names, and have their @@ -175,7 +165,7 @@ def _zip_files(files: Iterable[str], root: str) -> Tuple[bytes, str]: files = list(files) # create copy of list also converts generator to list with ZipFile(zip_data, "w", ZIP_DEFLATED) as zip_file: for file_name in files: - zip_file.write(os.path.join(root, file_name), file_name) + zip_file.write(os.path.join(root, file_name), file_name) # noqa: PTH118 # Fix file permissions to avoid any issues - only care whether a file # is executable or not, choosing between modes 755 and 644 accordingly. @@ -183,12 +173,8 @@ def _zip_files(files: Iterable[str], root: str) -> Tuple[bytes, str]: perms = (zip_entry.external_attr & ZIP_PERMS_MASK) >> 16 new_perms = 0o755 if perms & stat.S_IXUSR != 0 else 0o644 if new_perms != perms: - LOGGER.debug( - "fixing perms: %s: %o => %o", zip_entry.filename, perms, new_perms - ) - new_attr = (zip_entry.external_attr & ~ZIP_PERMS_MASK) | ( - new_perms << 16 - ) + LOGGER.debug("fixing perms: %s: %o => %o", zip_entry.filename, perms, new_perms) + new_attr = (zip_entry.external_attr & ~ZIP_PERMS_MASK) | (new_perms << 16) zip_entry.external_attr = new_attr contents = zip_data.getvalue() @@ -207,25 +193,24 @@ def _calculate_hash(files: Iterable[str], root: str) -> str: root: base directory to analyze files in. """ - file_hash = hashlib.md5() + file_hash = hashlib.md5() # noqa: S324 for file_name in sorted(files): - file_path = os.path.join(root, file_name) + file_path = os.path.join(root, file_name) # noqa: PTH118 file_hash.update((file_name + "\0").encode()) - with open(file_path, "rb") as file_: - # pylint: disable=cell-var-from-loop + with open(file_path, "rb") as file_: # noqa: PTH123 for chunk in iter(lambda: file_.read(4096), ""): if not chunk: break file_hash.update(chunk) - file_hash.update("\0".encode()) + file_hash.update(b"\0") return file_hash.hexdigest() def _find_files( root: str, - includes: Union[List[str], str], - excludes: Optional[List[str]] = None, + includes: list[str] | str, + excludes: list[str] | None = None, follow_symlinks: bool = False, ) -> Iterator[str]: """List files inside a directory based on include and exclude rules. @@ -249,7 +234,7 @@ def _find_files( http://www.aviser.asia/formic/doc/index.html """ - root = os.path.abspath(root) + root = os.path.abspath(root) # noqa: PTH100 file_set = formic.FileSet( directory=root, include=includes, exclude=excludes, symlinks=follow_symlinks ) @@ -257,8 +242,8 @@ def _find_files( def _zip_from_file_patterns( - root: str, includes: List[str], excludes: List[str], follow_symlinks: bool -) -> Tuple[bytes, str]: + root: str, includes: list[str], excludes: list[str], follow_symlinks: bool +) -> tuple[bytes, str]: """Generate a ZIP file in-memory from file search patterns. Args: @@ -296,9 +281,9 @@ def _zip_from_file_patterns( def handle_requirements( package_root: str, dest_path: str, - requirements: Dict[str, bool], + requirements: dict[str, bool], pipenv_timeout: int = 300, - python_path: Optional[str] = None, + python_path: str | None = None, use_pipenv: bool = False, ) -> str: """Use the correct requirements file. @@ -330,7 +315,7 @@ def handle_requirements( ) if requirements["requirements.txt"]: LOGGER.info("using requirements.txt for dependencies") - return os.path.join(dest_path, "requirements.txt") + return os.path.join(dest_path, "requirements.txt") # noqa: PTH118 if requirements["Pipfile"] or requirements["Pipfile.lock"]: LOGGER.info("using pipenv for dependencies") return _handle_use_pipenv( @@ -348,7 +333,7 @@ def handle_requirements( def _handle_use_pipenv( package_root: str, dest_path: str, - python_path: Optional[str] = None, + python_path: str | None = None, timeout: int = 300, ) -> str: """Create requirements file from Pipfile. @@ -368,35 +353,31 @@ def _handle_use_pipenv( LOGGER.error("pipenv can only be used with python installed from PyPi") sys.exit(1) LOGGER.info("creating requirements.txt from Pipfile...") - req_path = os.path.join(dest_path, "requirements.txt") + req_path = os.path.join(dest_path, "requirements.txt") # noqa: PTH118 cmd = ["pipenv", "lock", "--requirements", "--keep-outdated"] if python_path: cmd.insert(0, python_path) cmd.insert(1, "-m") - with open(req_path, "w", encoding="utf-8") as requirements: - with subprocess.Popen( + with ( + open(req_path, "w", encoding="utf-8") as requirements, # noqa: PTH123 + subprocess.Popen( cmd, cwd=package_root, stdout=requirements, stderr=subprocess.PIPE - ) as pipenv_process: - if int(sys.version[0]) > 2: - _stdout, stderr = pipenv_process.communicate(timeout=timeout) - else: - _stdout, stderr = pipenv_process.communicate() - if pipenv_process.returncode == 0: - return req_path - if int(sys.version[0]) > 2: - stderr = stderr.decode("UTF-8") - LOGGER.error( - '"%s" failed with the following output:\n%s', " ".join(cmd), stderr - ) - raise PipenvError + ) as pipenv_process, + ): + _stdout, stderr = pipenv_process.communicate(timeout=timeout) + if pipenv_process.returncode == 0: + return req_path + stderr = stderr.decode("UTF-8") + LOGGER.error('"%s" failed with the following output:\n%s', " ".join(cmd), stderr) + raise PipenvError -def dockerized_pip( +def dockerized_pip( # noqa: C901, PLR0912 work_dir: str, - client: Optional[docker.DockerClient] = None, - runtime: Optional[str] = None, - docker_file: Optional[str] = None, - docker_image: Optional[str] = None, + client: docker.DockerClient | None = None, + runtime: str | None = None, + docker_file: str | None = None, + docker_image: str | None = None, python_dontwritebytecode: bool = False, **_: Any, ) -> None: @@ -414,31 +395,30 @@ def dockerized_pip( python_dontwritebytecode: Don't write bytecode. """ - # TODO use kwargs to pass args to docker for advanced config + # TODO (craig): use kwargs to pass args to docker for advanced config if bool(docker_file) + bool(docker_image) + bool(runtime) != 1: # exactly one of these is needed. converting to bool will give us a # 'False' (0) for 'None' and 'True' (1) for anything else. raise InvalidDockerizePipConfiguration( - "exactly only one of [docker_file, docker_file, runtime] must be " - "provided" + "exactly only one of [docker_file, docker_file, runtime] must be provided" ) if not client: client = docker.from_env() if docker_file: - if not os.path.isfile(docker_file): + if not os.path.isfile(docker_file): # noqa: PTH113 raise ValueError(f'could not find docker_file "{docker_file}"') LOGGER.info('building docker image from "%s"', docker_file) response = cast( - Union[Image, Tuple[Image, Iterator[Dict[str, str]]]], + "Image | tuple[Image, Iterator[dict[str, str]]]", client.images.build( - path=os.path.dirname(docker_file), - dockerfile=os.path.basename(docker_file), + path=os.path.dirname(docker_file), # noqa: PTH120 + dockerfile=os.path.basename(docker_file), # noqa: PTH119 forcerm=True, ), ) - # the response can be either a tuple of (Image, Generator[Dict[str, str]]) + # the response can be either a tuple of (Image, Generator[dict[str, str]]) # or just Image depending on API version. if isinstance(response, tuple): docker_image = response[0].id @@ -450,26 +430,20 @@ def dockerized_pip( LOGGER.info('docker image "%s" created', docker_image) if runtime: if runtime not in SUPPORTED_RUNTIMES: - raise ValueError( - f'invalid runtime "{runtime}" must be one of {SUPPORTED_RUNTIMES}' - ) + raise ValueError(f'invalid runtime "{runtime}" must be one of {SUPPORTED_RUNTIMES}') docker_image = f"lambci/lambda:build-{runtime}" - LOGGER.debug( - 'selected docker image "%s" based on provided runtime', docker_image - ) + LOGGER.debug('selected docker image "%s" based on provided runtime', docker_image) if sys.platform.lower() == "win32": LOGGER.debug("formatted docker mount path for Windows") work_dir = work_dir.replace("\\", "/") - work_dir_mount = docker.types.Mount( - target="/var/task", source=work_dir, type="bind" - ) + work_dir_mount = docker.types.Mount(target="/var/task", source=work_dir, type="bind") pip_cmd = "python -m pip install -t /var/task -r /var/task/requirements.txt" LOGGER.info('using docker image "%s" to build deployment package...', docker_image) - docker_run_args: Dict[str, Any] = {} + docker_run_args: dict[str, Any] = {} if python_dontwritebytecode: docker_run_args["environment"] = "1" @@ -512,9 +486,7 @@ def _pip_has_no_color_option(python_path: str) -> bool: [ python_path, "-c", - "from __future__ import print_function;" - "import pip;" - "print(pip.__version__)", + "from __future__ import print_function;import pip;print(pip.__version__)", ] ) if isinstance(pip_version_string, bytes): # type: ignore @@ -526,24 +498,24 @@ def _pip_has_no_color_option(python_path: str) -> bool: return False -# TODO refactor logic to breakup logic into smaller chunks -def _zip_package( # pylint: disable=too-many-locals,too-many-statements +# TODO (kyle): refactor logic to breakup logic into smaller chunks +def _zip_package( # noqa: PLR0915, PLR0912, C901, D417 package_root: str, *, dockerize_pip: DockerizePipArgTypeDef = False, - excludes: Optional[List[str]] = None, + excludes: list[str] | None = None, follow_symlinks: bool = False, - includes: List[str], + includes: list[str], pipenv_timeout: int = 300, python_dontwritebytecode: bool = False, python_exclude_bin_dir: bool = False, python_exclude_setuptools_dirs: bool = False, - python_path: Optional[str] = None, - requirements_files: Dict[str, bool], + python_path: str | None = None, + requirements_files: dict[str, bool], use_pipenv: bool = False, work_dir: Path, **kwargs: Any, -) -> Tuple[bytes, str]: +) -> tuple[bytes, str]: """Create zip file in memory with package dependencies. Args: @@ -578,9 +550,8 @@ def _zip_package( # pylint: disable=too-many-locals,too-many-statements excludes = excludes or [] excludes.append(".venv/") - # pylint: disable=consider-using-with tmpdir = tempfile.TemporaryDirectory(prefix="cfngin", dir=work_dir) - tmp_req = os.path.join(tmpdir.name, "requirements.txt") + tmp_req = os.path.join(tmpdir.name, "requirements.txt") # noqa: PTH118 copydir(package_root, tmpdir.name, includes, excludes, follow_symlinks) tmp_req = handle_requirements( package_root=package_root, @@ -607,7 +578,7 @@ def _zip_package( # pylint: disable=too-many-locals,too-many-statements "--no-color", ] - subprocess_args: Dict[str, Any] = {} + subprocess_args: dict[str, Any] = {} if python_dontwritebytecode: subprocess_args["env"] = dict(os.environ, PYTHONDONTWRITEBYTECODE="1") @@ -646,14 +617,16 @@ def _zip_package( # pylint: disable=too-many-locals,too-many-statements if tmp_script.is_file(): tmp_script.unlink() - if python_exclude_bin_dir and os.path.isdir(os.path.join(tmpdir.name, "bin")): + if python_exclude_bin_dir and os.path.isdir( # noqa: PTH112 + os.path.join(tmpdir.name, "bin") # noqa: PTH118 + ): LOGGER.debug("Removing python /bin directory from Lambda files") - shutil.rmtree(os.path.join(tmpdir.name, "bin")) + shutil.rmtree(os.path.join(tmpdir.name, "bin")) # noqa: PTH118 if python_exclude_setuptools_dirs: for i in os.listdir(tmpdir.name): - if i.endswith(".egg-info") or i.endswith(".dist-info"): + if i.endswith((".egg-info", ".dist-info")): LOGGER.debug("Removing directory %s from Lambda files", i) - shutil.rmtree(os.path.join(tmpdir.name, i)) + shutil.rmtree(os.path.join(tmpdir.name, i)) # noqa: PTH118 req_files = _find_files(tmpdir.name, includes="**", follow_symlinks=False) contents, content_hash = _zip_files(req_files, tmpdir.name) @@ -673,9 +646,7 @@ def _zip_package( # pylint: disable=too-many-locals,too-many-statements return contents, content_hash -def _head_object( - s3_conn: S3Client, bucket: str, key: str -) -> Optional[HeadObjectOutputTypeDef]: +def _head_object(s3_conn: S3Client, bucket: str, key: str) -> HeadObjectOutputTypeDef | None: """Retrieve information about an object in S3 if it exists. Args: @@ -705,7 +676,7 @@ def _upload_code( bucket: str, prefix: str, name: str, - contents: Union[bytes, str], + contents: bytes | str, content_hash: str, payload_acl: ObjectCannedACLType, ) -> Code: @@ -753,10 +724,10 @@ def _upload_code( def _check_pattern_list( - patterns: Optional[Union[List[str], str]], + patterns: list[str] | str | None, key: str, - default: Optional[List[str]] = None, -) -> Optional[List[str]]: + default: list[str] | None = None, +) -> list[str] | None: """Validate file search patterns from user configuration. Acceptable input is a string (which will be converted to a singleton list), @@ -785,9 +756,7 @@ def _check_pattern_list( if isinstance(patterns, list) and all(isinstance(p, str) for p in patterns): # type: ignore return patterns - raise ValueError( - f"Invalid file patterns in key '{key}': must be a string or " "list of strings" - ) + raise ValueError(f"Invalid file patterns in key '{key}': must be a string or list of strings") class _UploadFunctionOptionsTypeDef(TypedDict): @@ -802,8 +771,8 @@ class _UploadFunctionOptionsTypeDef(TypedDict): """ - exclude: Optional[List[str]] - include: Optional[List[str]] + exclude: list[str] | None + include: list[str] | None path: str @@ -845,11 +814,9 @@ def _upload_function( """ try: - root = os.path.expanduser(options["path"]) + root = os.path.expanduser(options["path"]) # noqa: PTH111 except KeyError as exc: - raise ValueError( - f"missing required property '{exc.args[0]}' in function '{name}'" - ) from exc + raise ValueError(f"missing required property '{exc.args[0]}' in function '{name}'") from exc includes = _check_pattern_list(options.get("include"), "include", default=["**"]) excludes = _check_pattern_list(options.get("exclude"), "exclude", default=[]) @@ -858,13 +825,13 @@ def _upload_function( # os.path.join will ignore other parameters if the right-most one is an # absolute path, which is exactly what we want. - if not os.path.isabs(root): - root = os.path.abspath(os.path.join(sys_path, root)) + if not os.path.isabs(root): # noqa: PTH117 + root = os.path.abspath(os.path.join(sys_path, root)) # noqa: PTH118, PTH100 requirements_files = find_requirements(root) if requirements_files: zip_contents, content_hash = _zip_package( root, - includes=cast(List[str], includes), + includes=cast("list[str]", includes), excludes=excludes, follow_symlinks=follow_symlinks, requirements_files=requirements_files, @@ -873,18 +840,16 @@ def _upload_function( ) else: zip_contents, content_hash = _zip_from_file_patterns( - root, cast(List[str], includes), cast(List[str], excludes), follow_symlinks + root, cast("list[str]", includes), cast("list[str]", excludes), follow_symlinks ) - return _upload_code( - s3_conn, bucket, prefix, name, zip_contents, content_hash, payload_acl - ) + return _upload_code(s3_conn, bucket, prefix, name, zip_contents, content_hash, payload_acl) def select_bucket_region( - custom_bucket: Optional[str], - hook_region: Optional[str], - cfngin_bucket_region: Optional[str], + custom_bucket: str | None, + hook_region: str | None, + cfngin_bucket_region: str | None, provider_region: str, ) -> str: """Return the appropriate region to use when uploading functions. @@ -908,7 +873,9 @@ def select_bucket_region( return region or provider_region -def upload_lambda_functions(context: CfnginContext, provider: Provider, **kwargs: Any): +def upload_lambda_functions( # noqa: D417 + context: CfnginContext, provider: Provider, **kwargs: Any +) -> dict[str, Any]: """Build Lambda payloads from user configuration and upload them to S3. Constructs ZIP archives containing files matching specified patterns for @@ -932,41 +899,41 @@ def upload_lambda_functions(context: CfnginContext, provider: Provider, **kwargs context: Context instance. (passed in by CFNgin) Keyword Args: - bucket (Optional[str]): Custom bucket to upload functions to. + bucket (str | None): Custom bucket to upload functions to. Omitting it will cause the default CFNgin bucket to be used. - bucket_region (Optional[str]): The region in which the bucket should + bucket_region (str | None): The region in which the bucket should exist. If not given, the region will be either be that of the global ``cfngin_bucket_region`` setting, or else the region in use by the provider. - prefix (Optional[str]): S3 key prefix to prepend to the uploaded + prefix (str | None): S3 key prefix to prepend to the uploaded zip name. - follow_symlinks (Optional[bool]): Will determine if symlinks should + follow_symlinks (str | None): Will determine if symlinks should be followed and included with the zip artifact. (*default:* ``False``) - payload_acl (Optional[str]): The canned S3 object ACL to be applied + payload_acl (str | None): The canned S3 object ACL to be applied to the uploaded payload. (*default: private*) - functions (Dict[str, Any]): Configurations of desired payloads to + functions (dict[str, Any]): Configurations of desired payloads to build. Keys correspond to function names, used to derive key names for the payload. Each value should itself be a dictionary, with the following data: - **docker_file (Optional[str])** + **docker_file (str | None)** Path to a local DockerFile that will be built and used for ``dockerize_pip``. Must provide exactly one of ``docker_file``, ``docker_image``, or ``runtime``. - **docker_image (Optional[str])** + **docker_image (str | None)** Custom Docker image to use with ``dockerize_pip``. Must provide exactly one of ``docker_file``, ``docker_image``, or ``runtime``. - **dockerize_pip (Optional[Union[str, bool]])** + **dockerize_pip (bool | str | None)** Whether to use Docker when preparing package dependencies with pip. Can be set to True/False or the special string 'non-linux' which will only run on non Linux systems. To use this option Docker must be installed. - **exclude (Optional[Union[str, List[str]]])** + **exclude (str | list[str] | None)** Pattern or list of patterns of files to exclude from the payload. If provided, any files that match will be ignored, regardless of whether they match an inclusion pattern. @@ -975,7 +942,7 @@ def upload_lambda_functions(context: CfnginContext, provider: Provider, **kwargs such as ``.git``, ``.svn``, ``__pycache__``, ``*.pyc``, ``.gitignore``, etc. - **include (Optional[Union[str, List[str]]])** + **include (str | list[str] | None)** Pattern or list of patterns of files to include in the payload. If provided, only files that match these patterns will be included in the payload. @@ -999,25 +966,25 @@ def upload_lambda_functions(context: CfnginContext, provider: Provider, **kwargs directly under this directory will be added to the root of the ZIP file. - **pipenv_lock_timeout (Optional[int])** + **pipenv_lock_timeout (int | None)** Time in seconds to wait while creating lock file with pipenv. - **pipenv_timeout (Optional[int])** + **pipenv_timeout (int | None)** Time in seconds to wait while running pipenv. - **python_path (Optional[str])** + **python_path (str | None)** Absolute path to a python interpreter to use for ``pip``/``pipenv`` actions. If not provided, the current python interpreter will be used for ``pip`` and ``pipenv`` will be used from the current ``$PATH``. - **runtime (Optional[str])** + **runtime (str | None)** Runtime of the AWS Lambda Function being uploaded. Used with ``dockerize_pip`` to automatically select the appropriate Docker image to use. Must provide exactly one of ``docker_file``, ``docker_image``, or ``runtime``. - **use_pipenv (Optional[bool])** + **use_pipenv (bool)** Explicitly use Pipfile/Pipfile.lock to prepare package dependencies even if a requirements.txt file is found. @@ -1074,8 +1041,8 @@ def create_template(self): "see documentation for replacement", __name__, ) - # TODO add better handling for misconfiguration (e.g. forgetting function names) - # TODO support defining dockerize_pip options at the top level of args + # TODO (craig): add better handling for misconfiguration (e.g. forgetting function names) + # TODO (craig): support defining dockerize_pip options at the top level of args custom_bucket = cast(str, kwargs.get("bucket", "")) if not custom_bucket: if not context.bucket_name: @@ -1114,11 +1081,11 @@ def create_template(self): prefix = kwargs.get("prefix", "") - results: Dict[str, Any] = {} + results: dict[str, Any] = {} for name, options in kwargs["functions"].items(): sys_path = ( - os.path.dirname(context.config_path) - if os.path.isfile(context.config_path) + os.path.dirname(context.config_path) # noqa: PTH120 + if os.path.isfile(context.config_path) # noqa: PTH113 else context.config_path ) results[name] = _upload_function( diff --git a/runway/cfngin/hooks/awslambda/_python_hooks.py b/runway/cfngin/hooks/awslambda/_python_hooks.py index a6eb1e9ee..c1888af2a 100644 --- a/runway/cfngin/hooks/awslambda/_python_hooks.py +++ b/runway/cfngin/hooks/awslambda/_python_hooks.py @@ -1,7 +1,5 @@ """Hook for creating an AWS Lambda Function using Python runtime.""" -# pylint errors are python3.7 only -# pylint: disable=inherit-non-class,no-value-for-parameter from __future__ import annotations import logging @@ -31,7 +29,7 @@ class PythonFunction(AwsLambdaHook[PythonProject]): def __init__(self, context: CfnginContext, **kwargs: Any) -> None: """Instantiate class.""" super().__init__(context) - self.args = PythonHookArgs.parse_obj(kwargs) + self.args = PythonHookArgs.model_validate(kwargs) @cached_property def deployment_package(self) -> DeploymentPackage[PythonProject]: @@ -58,7 +56,7 @@ def pre_deploy(self) -> Any: """Run during the **pre_deploy** stage.""" try: self.deployment_package.upload() - return self.build_response("deploy").dict(by_alias=True) + return self.build_response("deploy").model_dump(by_alias=True) except BaseException: self.cleanup_on_error() raise diff --git a/runway/cfngin/hooks/awslambda/base_classes.py b/runway/cfngin/hooks/awslambda/base_classes.py index f4846ab6d..9cd2501cc 100644 --- a/runway/cfngin/hooks/awslambda/base_classes.py +++ b/runway/cfngin/hooks/awslambda/base_classes.py @@ -3,23 +3,16 @@ from __future__ import annotations import logging -from pathlib import Path from typing import ( TYPE_CHECKING, Any, ClassVar, Generic, - List, - Optional, - Set, - Tuple, TypeVar, cast, overload, ) -from typing_extensions import Literal - from ....compat import cached_property from ..protocols import CfnginHookProtocol from .exceptions import RuntimeMismatchError @@ -28,6 +21,10 @@ from .source_code import SourceCode if TYPE_CHECKING: + from pathlib import Path + + from typing_extensions import Literal + from ...._logging import RunwayLogger from ....context import CfnginContext from ....utils import BaseModel @@ -54,9 +51,7 @@ class Project(Generic[_AwsLambdaHookArgsTypeVar_co]): ctx: CfnginContext """CFNgin context object.""" - def __init__( - self, args: _AwsLambdaHookArgsTypeVar_co, context: CfnginContext - ) -> None: + def __init__(self, args: _AwsLambdaHookArgsTypeVar_co, context: CfnginContext) -> None: """Instantiate class. Args: @@ -78,7 +73,7 @@ def build_directory(self) -> Path: return result @cached_property - def cache_dir(self) -> Optional[Path]: + def cache_dir(self) -> Path | None: """Directory where a dependency manager's cache data will be stored. Returns: @@ -98,12 +93,12 @@ def cache_dir(self) -> Optional[Path]: return cache_dir @cached_property - def compatible_architectures(self) -> Optional[List[str]]: + def compatible_architectures(self) -> list[str] | None: """List of compatible instruction set architectures.""" return getattr(self.args, "compatible_architectures", None) @cached_property - def compatible_runtimes(self) -> Optional[List[str]]: + def compatible_runtimes(self) -> list[str] | None: """List of compatible runtimes. Value should be valid Lambda Function runtimes @@ -114,7 +109,7 @@ def compatible_runtimes(self) -> Optional[List[str]]: compatible runtimes. """ - runtimes = getattr(self.args, "compatible_runtimes", cast(List[str], [])) + runtimes = getattr(self.args, "compatible_runtimes", cast("list[str]", [])) if runtimes and self.runtime not in runtimes: raise ValueError( f"runtime ({self.runtime}) not in compatible runtimes ({', '.join(runtimes)})" @@ -129,7 +124,7 @@ def dependency_directory(self) -> Path: return result @cached_property - def license(self) -> Optional[str]: + def license(self) -> str | None: """Software license for the project. Can be any of the following: @@ -142,8 +137,8 @@ def license(self) -> Optional[str]: """ return getattr(self.args, "license", None) - @cached_property # pylint error is python3.7 only - def metadata_files(self) -> Tuple[Path, ...]: + @cached_property + def metadata_files(self) -> tuple[Path, ...]: """Project metadata files (e.g. ``project.json``, ``pyproject.toml``).""" return () @@ -163,9 +158,9 @@ def runtime(self) -> str: raise ValueError("runtime could not be determined from the build system") @cached_property - def _runtime_from_docker(self) -> Optional[str]: + def _runtime_from_docker(self) -> str | None: """runtime from Docker if class can use Docker.""" - docker: Optional[DockerDependencyInstaller] = getattr(self, "docker", None) + docker: DockerDependencyInstaller | None = getattr(self, "docker", None) if not docker: return None return docker.runtime @@ -226,11 +221,7 @@ def project_root(self) -> Path: top_lvl_dir = ( self.ctx.config_path.parent if self.ctx.config_path.is_file() - else ( - self.ctx.config_path - if self.ctx.config_path.is_dir() - else self.args.source_code - ) + else (self.ctx.config_path if self.ctx.config_path.is_dir() else self.args.source_code) ) if top_lvl_dir == self.args.source_code: return top_lvl_dir @@ -238,8 +229,7 @@ def project_root(self) -> Path: parents = list(self.args.source_code.parents) if top_lvl_dir not in parents: LOGGER.info( - "ignoring project directory; " - "source code located outside of project directory" + "ignoring project directory; source code located outside of project directory" ) return self.args.source_code @@ -269,8 +259,8 @@ def project_type(self) -> str: """ raise NotImplementedError - @cached_property # pylint error is python3.7 only - def supported_metadata_files(self) -> Set[str]: + @cached_property + def supported_metadata_files(self) -> set[str]: """Names of all supported metadata files. Returns: @@ -320,13 +310,9 @@ class AwsLambdaHook(CfnginHookProtocol, Generic[_ProjectTypeVar]): BUILD_LAYER: ClassVar[bool] = False """Flag to denote if the hook creates a Lambda Function or Layer deployment package.""" - args: AwsLambdaHookArgs - """Parsed hook arguments.""" - ctx: CfnginContext """CFNgin context object.""" - # pylint: disable=super-init-not-called def __init__(self, context: CfnginContext, **_kwargs: Any) -> None: """Instantiate class. @@ -350,19 +336,15 @@ def project(self) -> _ProjectTypeVar: raise NotImplementedError @overload - def build_response( - self, stage: Literal["deploy"] - ) -> AwsLambdaHookDeployResponse: ... + def build_response(self, stage: Literal["deploy"]) -> AwsLambdaHookDeployResponse: ... @overload - def build_response(self, stage: Literal["destroy"]) -> Optional[BaseModel]: ... + def build_response(self, stage: Literal["destroy"]) -> BaseModel | None: ... @overload def build_response(self, stage: Literal["plan"]) -> AwsLambdaHookDeployResponse: ... - def build_response( - self, stage: Literal["deploy", "destroy", "plan"] - ) -> Optional[BaseModel]: + def build_response(self, stage: Literal["deploy", "destroy", "plan"]) -> BaseModel | None: """Build response object that will be returned by this hook. Args: @@ -390,7 +372,7 @@ def _build_response_deploy(self) -> AwsLambdaHookDeployResponse: runtime=self.deployment_package.runtime, ) - def _build_response_destroy(self) -> Optional[BaseModel]: + def _build_response_destroy(self) -> BaseModel | None: """Build response for destroy stage.""" return None @@ -467,7 +449,7 @@ def plan(self) -> AwsLambdaHookDeployResponseTypedDict: """Run during the **plan** stage.""" return cast( "AwsLambdaHookDeployResponseTypedDict", - self.build_response("plan").dict(by_alias=True), + self.build_response("plan").model_dump(by_alias=True), ) def post_deploy(self) -> Any: diff --git a/runway/cfngin/hooks/awslambda/deployment_package.py b/runway/cfngin/hooks/awslambda/deployment_package.py index 6724c6ea3..335488134 100644 --- a/runway/cfngin/hooks/awslambda/deployment_package.py +++ b/runway/cfngin/hooks/awslambda/deployment_package.py @@ -7,26 +7,10 @@ import logging import mimetypes import stat -import sys import zipfile -from contextlib import suppress -from typing import ( - TYPE_CHECKING, - ClassVar, - Dict, - Generic, - Iterator, - List, - Optional, - TypeVar, - Union, - cast, - overload, -) +from typing import TYPE_CHECKING, ClassVar, Final, Generic, TypeVar, cast, overload from urllib.parse import urlencode -from typing_extensions import Final, Literal - from ....compat import cached_property from ....core.providers.aws.s3 import Bucket from ....core.providers.aws.s3.exceptions import ( @@ -42,10 +26,12 @@ from .models.args import AwsLambdaHookArgs if TYPE_CHECKING: + from collections.abc import Iterator from pathlib import Path import igittigitt from mypy_boto3_s3.type_defs import HeadObjectOutputTypeDef, PutObjectOutputTypeDef + from typing_extensions import Literal from ...._logging import RunwayLogger @@ -64,7 +50,7 @@ class DeploymentPackage(DelCachedPropMixin, Generic[_ProjectTypeVar]): """ - META_TAGS: ClassVar[Dict[str, str]] = { + META_TAGS: ClassVar[dict[str, str]] = { "code_sha256": "runway.cfngin:awslambda.code_sha256", "compatible_architectures": "runway.cfngin:awslambda.compatible_architectures", "compatible_runtimes": "runway.cfngin:awslambda.compatible_runtimes", @@ -78,9 +64,7 @@ class DeploymentPackage(DelCachedPropMixin, Generic[_ProjectTypeVar]): SIZE_EOCD: Final[Literal[22]] = 22 """Size of a zip file's End of Central Directory Record (empty zip).""" - ZIPFILE_PERMISSION_MASK: ClassVar[int] = ( - stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO - ) << 16 + ZIPFILE_PERMISSION_MASK: ClassVar[int] = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) << 16 """Mask to retrieve unix file permissions from the external attributes property of a ``zipfile.ZipInfo``. """ @@ -91,7 +75,7 @@ class DeploymentPackage(DelCachedPropMixin, Generic[_ProjectTypeVar]): usage_type: Literal["function", "layer"] """How the deployment package can be used by AWS Lambda.""" - _put_object_response: Optional[PutObjectOutputTypeDef] = None + _put_object_response: PutObjectOutputTypeDef | None = None def __init__( self, @@ -154,26 +138,24 @@ def code_sha256(self) -> str: return base64.b64encode(file_hash.digest).decode() @cached_property - def compatible_architectures(self) -> Optional[List[str]]: + def compatible_architectures(self) -> list[str] | None: """List of compatible instruction set architectures.""" return self.project.compatible_architectures @cached_property - def compatible_runtimes(self) -> Optional[List[str]]: + def compatible_runtimes(self) -> list[str] | None: """List of compatible runtimes.""" return self.project.compatible_runtimes @cached_property def exists(self) -> bool: """Whether the deployment package exists.""" - if self.archive_file.exists(): - return True - return False + return bool(self.archive_file.exists()) @cached_property def gitignore_filter( self, - ) -> Optional[igittigitt.IgnoreParser]: + ) -> igittigitt.IgnoreParser | None: """Filter to use when zipping dependencies. This should be overridden by subclasses if a filter should be used. @@ -182,7 +164,7 @@ def gitignore_filter( return None @cached_property - def license(self) -> Optional[str]: + def license(self) -> str | None: """Software license for the project.""" return self.project.license @@ -197,7 +179,7 @@ def md5_checksum(self) -> str: FileNotFoundError: Property accessed before archive file has been built. """ - file_hash = FileHash(hashlib.md5()) + file_hash = FileHash(hashlib.md5()) # noqa: S324 file_hash.add_file(self.archive_file) return base64.b64encode(file_hash.digest).decode() @@ -206,16 +188,14 @@ def object_key(self) -> str: """Key to use when upload object to AWS S3.""" prefix = f"awslambda/{self.usage_type}s" if self.project.args.object_prefix: - prefix = ( - f"{prefix}/{self.project.args.object_prefix.lstrip('/').rstrip('/')}" - ) + prefix = f"{prefix}/{self.project.args.object_prefix.lstrip('/').rstrip('/')}" return ( # this can't contain runtime - causes a cyclic dependency f"{prefix}/{self.project.source_code.root_directory.name}." f"{self.project.source_code.md5_hash}.zip" ) @cached_property - def object_version_id(self) -> Optional[str]: + def object_version_id(self) -> str | None: """S3 object version ID. Returns: @@ -223,10 +203,7 @@ def object_version_id(self) -> Optional[str]: if versioning is enabled on the bucket. """ - if ( - not self._put_object_response - or "VersionId" not in self._put_object_response - ): + if not self._put_object_response or "VersionId" not in self._put_object_response: return None return self._put_object_response["VersionId"] @@ -244,9 +221,7 @@ def build(self) -> Path: # we need to use runtime BEFORE the build process starts to allow runtime # errors to be raised early. LOGGER.info("building %s (%s)...", self.archive_file.name, self.runtime) - with zipfile.ZipFile( - self.archive_file, "w", zipfile.ZIP_DEFLATED - ) as archive_file: + with zipfile.ZipFile(self.archive_file, "w", zipfile.ZIP_DEFLATED) as archive_file: self._build_zip_dependencies(archive_file) self._build_zip_source_code(archive_file) self._build_fix_file_permissions(archive_file) @@ -273,9 +248,7 @@ def _build_fix_file_permissions(self, archive_file: zipfile.ZipFile) -> None: """ for file_info in archive_file.filelist: - current_perms = ( - file_info.external_attr & self.ZIPFILE_PERMISSION_MASK - ) >> 16 + current_perms = (file_info.external_attr & self.ZIPFILE_PERMISSION_MASK) >> 16 required_perm = 0o755 if current_perms & stat.S_IXUSR != 0 else 0o644 if current_perms != required_perm: LOGGER.debug( @@ -304,9 +277,9 @@ def _build_zip_dependencies( archive_file.write( dep, ( - self.insert_layer_dir( - dep, self.project.dependency_directory - ).relative_to(self.project.dependency_directory) + self.insert_layer_dir(dep, self.project.dependency_directory).relative_to( + self.project.dependency_directory + ) if self.usage_type == "layer" else dep.relative_to(self.project.dependency_directory) ), @@ -336,14 +309,12 @@ def _build_zip_source_code(self, archive_file: zipfile.ZipFile) -> None: def build_tag_set(self, *, url_encoded: Literal[True] = ...) -> str: ... @overload - def build_tag_set(self, *, url_encoded: Literal[False] = ...) -> Dict[str, str]: ... + def build_tag_set(self, *, url_encoded: Literal[False] = ...) -> dict[str, str]: ... @overload - def build_tag_set( - self, *, url_encoded: bool = ... - ) -> Union[Dict[str, str], str]: ... + def build_tag_set(self, *, url_encoded: bool = ...) -> dict[str, str] | str: ... - def build_tag_set(self, *, url_encoded: bool = True) -> Union[Dict[str, str], str]: + def build_tag_set(self, *, url_encoded: bool = True) -> dict[str, str] | str: """Build tag set to be applied to the S3 object. Args: @@ -382,21 +353,13 @@ def build_tag_set(self, *, url_encoded: bool = True) -> Union[Dict[str, str], st def delete(self) -> None: """Delete deployment package.""" - if sys.version_info < (3, 8): # cov: ignore - with suppress(FileNotFoundError): # acts the same as `missing_ok=true` - self.archive_file.unlink() # python3.7 does not support `missing_ok` - else: # cov: ignore - self.archive_file.unlink(missing_ok=True) + self.archive_file.unlink(missing_ok=True) LOGGER.verbose("deleted local deployment package %s", self.archive_file) # clear cached properties so they can recalculate - self._del_cached_property( - "code_sha256", "exists", "md5_checksum", "object_version_id" - ) + self._del_cached_property("code_sha256", "exists", "md5_checksum", "object_version_id") @staticmethod - def insert_layer_dir( - file_path: Path, relative_to: Path # pylint: disable=unused-argument - ) -> Path: + def insert_layer_dir(file_path: Path, relative_to: Path) -> Path: # noqa: ARG004 """Insert directory into local file path for layer archive. If required, this should be overridden by a subclass for language @@ -448,7 +411,11 @@ def upload(self, *, build: bool = True) -> None: ContentMD5=self.md5_checksum, Key=self.object_key, Tagging=self.build_tag_set(), - **{"ContentType": content_type} if content_type else {}, + **( + {"ContentType": content_type} # pyright: ignore[reportArgumentType] + if content_type + else {} + ), ) # clear cached properties so they can recalculate self._del_cached_property("object_version_id") @@ -518,16 +485,14 @@ def code_sha256(self) -> str: return self.object_tags[self.META_TAGS["code_sha256"]] @cached_property - def compatible_architectures(self) -> Optional[List[str]]: + def compatible_architectures(self) -> list[str] | None: """List of compatible instruction set architectures.""" if self.META_TAGS["compatible_architectures"] in self.object_tags: - return self.object_tags[self.META_TAGS["compatible_architectures"]].split( - "+" - ) + return self.object_tags[self.META_TAGS["compatible_architectures"]].split("+") return None @cached_property - def compatible_runtimes(self) -> Optional[List[str]]: + def compatible_runtimes(self) -> list[str] | None: """List of compatible runtimes.""" if self.META_TAGS["compatible_runtimes"] in self.object_tags: return self.object_tags[self.META_TAGS["compatible_runtimes"]].split("+") @@ -536,21 +501,15 @@ def compatible_runtimes(self) -> Optional[List[str]]: @cached_property def exists(self) -> bool: """Whether the S3 object exists.""" - if self.head and not self.head.get("DeleteMarker", False): - return True - return False + return bool(self.head and not self.head.get("DeleteMarker", False)) @cached_property - def head(self) -> Optional[HeadObjectOutputTypeDef]: + def head(self) -> HeadObjectOutputTypeDef | None: """Response from HeadObject API call.""" try: - return self.bucket.client.head_object( - Bucket=self.bucket.name, Key=self.object_key - ) + return self.bucket.client.head_object(Bucket=self.bucket.name, Key=self.object_key) except self.bucket.client.exceptions.ClientError as exc: - status_code = exc.response.get("ResponseMetadata", {}).get( - "HTTPStatusCode", 0 - ) + status_code = exc.response.get("ResponseMetadata", {}).get("HTTPStatusCode", 0) if status_code == 404: LOGGER.verbose( "%s not found", @@ -566,7 +525,7 @@ def head(self) -> Optional[HeadObjectOutputTypeDef]: raise @cached_property - def license(self) -> Optional[str]: + def license(self) -> str | None: """Software license for the project.""" if self.META_TAGS["license"] in self.object_tags: return self.object_tags[self.META_TAGS["license"]] @@ -591,7 +550,7 @@ def md5_checksum(self) -> str: return self.object_tags[self.META_TAGS["md5_checksum"]] @cached_property - def object_tags(self) -> Dict[str, str]: + def object_tags(self) -> dict[str, str]: """S3 object tags.""" response = self.bucket.client.get_object_tagging( Bucket=self.bucket.name, Key=self.object_key @@ -602,7 +561,7 @@ def object_tags(self) -> Dict[str, str]: return {t["Key"]: t["Value"] for t in response["TagSet"]} @cached_property - def object_version_id(self) -> Optional[str]: + def object_version_id(self) -> str | None: """S3 object version ID. Returns: @@ -649,9 +608,7 @@ def build(self) -> Path: def delete(self) -> None: """Delete deployment package.""" if self.exists: - self.bucket.client.delete_object( - Bucket=self.bucket.name, Key=self.object_key - ) + self.bucket.client.delete_object(Bucket=self.bucket.name, Key=self.object_key) LOGGER.verbose( "deleted deployment package S3 object %s", self.bucket.format_bucket_path_uri(key=self.object_key), @@ -682,8 +639,7 @@ def update_tags(self) -> None: ) LOGGER.info("updated S3 object's tags") - # pylint: disable=unused-argument - def upload(self, *, build: bool = True) -> None: + def upload(self, *, build: bool = True) -> None: # noqa: ARG002 """Upload deployment package. The object should already exist. This method only exists as a "placeholder" diff --git a/runway/cfngin/hooks/awslambda/docker.py b/runway/cfngin/hooks/awslambda/docker.py index 2612e76b7..98ad810fd 100644 --- a/runway/cfngin/hooks/awslambda/docker.py +++ b/runway/cfngin/hooks/awslambda/docker.py @@ -5,23 +5,10 @@ import logging import os import platform -from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - Dict, - Iterator, - List, - Optional, - Type, - TypeVar, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, ClassVar, cast from docker import DockerClient from docker.errors import DockerException, ImageNotFound -from docker.models.images import Image from docker.types import Mount from ...._logging import PrefixAdaptor @@ -30,8 +17,12 @@ from .constants import AWS_SAM_BUILD_IMAGE_PREFIX, DEFAULT_IMAGE_NAME, DEFAULT_IMAGE_TAG if TYPE_CHECKING: + from collections.abc import Iterator from pathlib import Path + from docker.models.images import Image + from typing_extensions import Self + from ...._logging import RunwayLogger from ....context import CfnginContext, RunwayContext from .base_classes import Project @@ -40,9 +31,6 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) -_T = TypeVar("_T") - - class DockerDependencyInstaller: """Docker dependency installer.""" @@ -61,7 +49,7 @@ class DockerDependencyInstaller: client: DockerClient """Docker client.""" - ctx: Union[CfnginContext, RunwayContext] + ctx: CfnginContext | RunwayContext """Context object.""" options: DockerOptions @@ -71,8 +59,8 @@ def __init__( self, project: Project[AwsLambdaHookArgs], *, - client: Optional[DockerClient] = None, - context: Optional[Union[CfnginContext, RunwayContext]] = None, + client: DockerClient | None = None, + context: CfnginContext | RunwayContext | None = None, ) -> None: """Instantiate class. @@ -96,7 +84,7 @@ def __init__( self.project = project @cached_property - def bind_mounts(self) -> List[Mount]: + def bind_mounts(self) -> list[Mount]: """Bind mounts that will be used by the container.""" mounts = [ Mount( @@ -121,7 +109,7 @@ def bind_mounts(self) -> List[Mount]: return mounts @cached_property - def environment_variables(self) -> Dict[str, str]: + def environment_variables(self) -> dict[str, str]: """Environment variables to pass to the Docker container. This is a subset of the environment variables stored in the context @@ -131,7 +119,7 @@ def environment_variables(self) -> Dict[str, str]: return {k: v for k, v in self.ctx.env.vars.items() if k.startswith("DOCKER")} @cached_property - def image(self) -> Union[Image, str]: + def image(self) -> Image | str: """Docker image that will be used. Raises: @@ -149,13 +137,13 @@ def image(self) -> Union[Image, str]: ) raise ValueError("docker.file, docker.image, or runtime is required") - @cached_property # pylint error is python3.7 only - def install_commands(self) -> List[str]: + @cached_property + def install_commands(self) -> list[str]: """Commands to run to install dependencies.""" return [] @cached_property - def post_install_commands(self) -> List[str]: + def post_install_commands(self) -> list[str]: """Commands to run after dependencies have been installed.""" cmds = [ *[ @@ -171,7 +159,7 @@ def post_install_commands(self) -> List[str]: ] if platform.system() != "Windows": # methods only exist on POSIX systems - gid, uid = os.getgid(), os.getuid() # pylint: disable=no-member + gid, uid = os.getgid(), os.getuid() cmds.append( f"chown -R {uid}:{gid} {self.DEPENDENCY_DIR}", ) @@ -180,7 +168,7 @@ def post_install_commands(self) -> List[str]: return cmds @cached_property - def pre_install_commands(self) -> List[str]: + def pre_install_commands(self) -> list[str]: """Commands to run before dependencies have been installed.""" cmds = [ f"chown -R 0:0 {self.DEPENDENCY_DIR}", @@ -189,8 +177,8 @@ def pre_install_commands(self) -> List[str]: cmds.append(f"chown -R 0:0 {self.CACHE_DIR}") return cmds - @cached_property # pylint error is python3.7 only - def runtime(self) -> Optional[str]: + @cached_property + def runtime(self) -> str | None: """AWS Lambda runtime determined from the Docker container.""" return None @@ -198,8 +186,8 @@ def build_image( self, docker_file: Path, *, - name: Optional[str] = None, - tag: Optional[str] = None, + name: str | None = None, + tag: str | None = None, ) -> Image: """Build Docker image from Dockerfile. @@ -233,7 +221,7 @@ def build_image( def log_docker_msg_bytes( self, stream: Iterator[bytes], *, level: int = logging.INFO - ) -> List[str]: + ) -> list[str]: """Log Docker output message from blocking generator that return bytes. Args: @@ -244,7 +232,7 @@ def log_docker_msg_bytes( List of log messages. """ - result: List[str] = [] + result: list[str] = [] for raw_msg in stream: msg = raw_msg.decode().strip() result.append(msg) @@ -252,8 +240,8 @@ def log_docker_msg_bytes( return result def log_docker_msg_dict( - self, stream: Iterator[Dict[str, Any]], *, level: int = logging.INFO - ) -> List[str]: + self, stream: Iterator[dict[str, Any]], *, level: int = logging.INFO + ) -> list[str]: """Log Docker output message from blocking generator that return dict. Args: @@ -264,7 +252,7 @@ def log_docker_msg_dict( list of log messages. """ - result: List[str] = [] + result: list[str] = [] for raw_msg in stream: for key in ["stream", "status"]: if key in raw_msg: @@ -283,7 +271,7 @@ def install(self) -> None: - :attr:`~runway.cfngin.hooks.awslambda.docker.DockerDependencyInstaller.install_commands` - :attr:`~runway.cfngin.hooks.awslambda.docker.DockerDependencyInstaller.post_install_commands` - """ # noqa + """ for cmd in self.pre_install_commands: self.run_command(cmd) for cmd in self.install_commands: @@ -315,7 +303,7 @@ def pull_image(self, name: str, *, force: bool = True) -> Image: LOGGER.info("image not found; pulling docker image %s...", name) return self.client.images.pull(repository=name) - def run_command(self, command: str, *, level: int = logging.INFO) -> List[str]: + def run_command(self, command: str, *, level: int = logging.INFO) -> list[str]: """Execute equivalent of ``docker container run``. Args: @@ -350,9 +338,7 @@ def run_command(self, command: str, *, level: int = logging.INFO) -> List[str]: raise DockerExecFailedError(response) @classmethod - def from_project( - cls: Type[_T], project: Project[AwsLambdaHookArgs] - ) -> Optional[_T]: + def from_project(cls: type[Self], project: Project[AwsLambdaHookArgs]) -> Self | None: """Instantiate class from a project. High-level method that wraps instantiation in error handling. diff --git a/runway/cfngin/hooks/awslambda/models/args.py b/runway/cfngin/hooks/awslambda/models/args.py index 5729a711f..c6fcd837c 100644 --- a/runway/cfngin/hooks/awslambda/models/args.py +++ b/runway/cfngin/hooks/awslambda/models/args.py @@ -1,24 +1,22 @@ """Argument data models.""" -# pylint: disable=no-self-argument from __future__ import annotations from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast +from typing import Annotated -from pydantic import DirectoryPath, Extra, Field, FilePath, validator +from pydantic import ConfigDict, DirectoryPath, Field, FilePath, ValidationInfo, field_validator from .....config.models.utils import resolve_path_field from .....utils import BaseModel from ...base import HookArgsBaseModel -if TYPE_CHECKING: - from typing import Callable - class DockerOptions(BaseModel): """Docker options.""" + model_config = ConfigDict(extra="ignore") + disabled: bool = False """Explicitly disable the use of docker (default ``False``). @@ -33,7 +31,7 @@ class DockerOptions(BaseModel): """ - extra_files: List[str] = [] + extra_files: list[str] = [] """List of absolute file paths within the Docker container to copy into the deployment package. @@ -57,7 +55,7 @@ class DockerOptions(BaseModel): """ - file: Optional[FilePath] = None + file: FilePath | None = None """Dockerfile to use to build an image for use in this process. This, ``image`` , or ``runtime`` must be provided. @@ -72,7 +70,7 @@ class DockerOptions(BaseModel): """ - image: Optional[str] = None + image: str | None = None """Docker image to use. If the image does not exist locally, it will be pulled. This, ``file`` (takes precedence), or ``runtime`` must be provided. @@ -88,7 +86,7 @@ class DockerOptions(BaseModel): """ - name: Optional[str] = None + name: str | None = None """When providing a Dockerfile, this will be the name applied to the resulting image. It is the equivalent to ``name`` in the ``name:tag`` syntax of the ``docker build [--tag, -t]`` command option. @@ -121,15 +119,7 @@ class DockerOptions(BaseModel): """ - class Config: - """Model configuration.""" - - extra = Extra.ignore - - _resolve_path_fields = cast( - "classmethod[Callable[..., Any]]", - validator("file", allow_reuse=True)(resolve_path_field), - ) + _resolve_path_fields = field_validator("file")(resolve_path_field) class AwsLambdaHookArgs(HookArgsBaseModel): @@ -141,13 +131,13 @@ class AwsLambdaHookArgs(HookArgsBaseModel): """ - cache_dir: Optional[Path] = None + cache_dir: Path | None = None """Explicitly define the directory location. Must be an absolute path or it will be relative to the CFNgin module directory. """ - compatible_architectures: Optional[List[str]] = None + compatible_architectures: list[str] | None = None """A list of compatible instruction set architectures. (https://docs.aws.amazon.com/lambda/latest/dg/foundation-arch.html) @@ -163,7 +153,7 @@ class AwsLambdaHookArgs(HookArgsBaseModel): """ - compatible_runtimes: Optional[List[str]] = None + compatible_runtimes: list[str] | None = None """A list of compatible function runtimes. When provided, the ``runtime`` being used to build the deployment package must be included in the list or an error will be raised. @@ -183,7 +173,7 @@ class AwsLambdaHookArgs(HookArgsBaseModel): docker: DockerOptions = DockerOptions() """Docker options.""" - extend_gitignore: List[str] = [] + extend_gitignore: list[str] = [] """gitignore rules that should be added to the rules already defined in a ``.gitignore`` file in the source code directory. This can be used with or without an existing file. @@ -205,7 +195,7 @@ class AwsLambdaHookArgs(HookArgsBaseModel): """ - license: Optional[str] = Field(default=None, max_length=256) + license: Annotated[str | None, Field(max_length=256)] = None """The layer's software license. Can be any of the following: - A SPDX license identifier (e.g. ``Apache-2.0``). @@ -223,7 +213,7 @@ class AwsLambdaHookArgs(HookArgsBaseModel): """ - object_prefix: Optional[str] = None + object_prefix: str | None = None """Prefix to add to the S3 Object key. The object will always be prefixed with ``awslambda/functions``. @@ -232,7 +222,7 @@ class AwsLambdaHookArgs(HookArgsBaseModel): """ - runtime: Optional[str] = None + runtime: Annotated[str | None, Field(validate_default=True)] = None """Runtime of the Lambda Function (https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html). @@ -270,19 +260,15 @@ class AwsLambdaHookArgs(HookArgsBaseModel): use_cache: bool = True """Whether to use a cache directory with pip that will persist builds (default ``True``).""" - _resolve_path_fields = cast( - "classmethod[Callable[..., Any]]", - validator("cache_dir", "source_code", allow_reuse=True)(resolve_path_field), - ) + _resolve_path_fields = field_validator("cache_dir", "source_code")(resolve_path_field) - @validator("runtime", always=True, allow_reuse=True) - def _validate_runtime_or_docker( - cls, v: Optional[str], values: Dict[str, Any] - ) -> Optional[str]: + @field_validator("runtime", mode="before") + @classmethod + def _validate_runtime_or_docker(cls, v: str | None, info: ValidationInfo) -> str | None: """Validate that either runtime is provided or Docker image is provided.""" if v: # if runtime was provided, we don't need to check anything else return v - docker: DockerOptions = values["docker"] + docker: DockerOptions = info.data["docker"] if docker.disabled: raise ValueError("runtime must be provided if docker.disabled is True") if not (docker.file or docker.image): @@ -293,7 +279,9 @@ def _validate_runtime_or_docker( class PythonHookArgs(AwsLambdaHookArgs): """Hook arguments for a Python AWS Lambda deployment package.""" - extend_pip_args: Optional[List[str]] = None + model_config = ConfigDict(extra="ignore") + + extend_pip_args: list[str] | None = None """Additional arguments that should be passed to ``pip install``. .. important:: @@ -335,8 +323,3 @@ class PythonHookArgs(AwsLambdaHookArgs): use_poetry: bool = True """Whether poetry should be used if determined appropriate.""" - - class Config: - """Model configuration.""" - - extra = Extra.ignore diff --git a/runway/cfngin/hooks/awslambda/models/responses.py b/runway/cfngin/hooks/awslambda/models/responses.py index a2b6a4578..d8e47a1e9 100644 --- a/runway/cfngin/hooks/awslambda/models/responses.py +++ b/runway/cfngin/hooks/awslambda/models/responses.py @@ -1,8 +1,10 @@ """Response data models.""" -from typing import List, Optional +from __future__ import annotations -from pydantic import Extra +from typing import Annotated + +from pydantic import ConfigDict, Field from runway.utils import BaseModel @@ -18,31 +20,35 @@ class AwsLambdaHookDeployResponse(BaseModel): """ - bucket_name: str + model_config = ConfigDict(extra="forbid", populate_by_name=True) + + bucket_name: Annotated[str, Field(alias="S3Bucket")] """Name of the S3 Bucket where the deployment package is located. (alias ``S3Bucket``)""" - code_sha256: str + code_sha256: Annotated[str, Field(alias="CodeSha256")] """SHA256 of the deployment package. This can be used by CloudFormation as the value of ``AWS::Lambda::Version.CodeSha256``. (alias ``CodeSha256``) """ - compatible_architectures: Optional[List[str]] = None + compatible_architectures: Annotated[ + list[str] | None, Field(alias="CompatibleArchitectures") + ] = None """A list of compatible instruction set architectures. (https://docs.aws.amazon.com/lambda/latest/dg/foundation-arch.html) (alias ``CompatibleArchitectures``) """ - compatible_runtimes: Optional[List[str]] = None + compatible_runtimes: Annotated[list[str] | None, Field(alias="CompatibleRuntimes")] = None """A list of compatible function runtimes. Used for filtering with ``ListLayers`` and ``ListLayerVersions``. (alias ``CompatibleRuntimes``) """ - license: Optional[str] = None + license: Annotated[str | None, Field(alias="License")] = None """The layer's software license (alias ``License``). Can be any of the following: - A SPDX license identifier (e.g. ``MIT``). @@ -52,31 +58,15 @@ class AwsLambdaHookDeployResponse(BaseModel): """ - object_key: str + object_key: Annotated[str, Field(alias="S3Key")] """Key (file path) of the deployment package S3 Object. (alias ``S3Key``)""" - object_version_id: Optional[str] = None + object_version_id: Annotated[str | None, Field(alias="S3ObjectVersion")] = None """The version ID of the deployment package S3 Object. This will only have a value if the S3 Bucket has versioning enabled. (alias ``S3ObjectVersion``) """ - runtime: str + runtime: Annotated[str, Field(alias="Runtime")] """Runtime of the Lambda Function. (alias ``Runtime``)""" - - class Config: - """Model configuration.""" - - allow_population_by_field_name = True - extra = Extra.forbid - fields = { - "bucket_name": {"alias": "S3Bucket"}, - "code_sha256": {"alias": "CodeSha256"}, - "compatible_architectures": {"alias": "CompatibleArchitectures"}, - "compatible_runtimes": {"alias": "CompatibleRuntimes"}, - "license": {"alias": "License"}, - "object_key": {"alias": "S3Key"}, - "object_version_id": {"alias": "S3ObjectVersion"}, - "runtime": {"alias": "Runtime"}, - } diff --git a/runway/cfngin/hooks/awslambda/python_requirements/_deployment_package.py b/runway/cfngin/hooks/awslambda/python_requirements/_deployment_package.py index 2e625bacd..90938d87b 100644 --- a/runway/cfngin/hooks/awslambda/python_requirements/_deployment_package.py +++ b/runway/cfngin/hooks/awslambda/python_requirements/_deployment_package.py @@ -2,8 +2,7 @@ from __future__ import annotations -from pathlib import Path -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from igittigitt import IgnoreParser @@ -11,6 +10,8 @@ from ..deployment_package import DeploymentPackage if TYPE_CHECKING: + from pathlib import Path + from . import PythonProject @@ -20,7 +21,7 @@ class PythonDeploymentPackage(DeploymentPackage["PythonProject"]): project: PythonProject @cached_property - def gitignore_filter(self) -> Optional[IgnoreParser]: + def gitignore_filter(self) -> IgnoreParser | None: """Filter to use when zipping dependencies. This should be overridden by subclasses if a filter should be used. @@ -28,15 +29,9 @@ def gitignore_filter(self) -> Optional[IgnoreParser]: """ if self.project.args.slim: gitignore_filter = IgnoreParser() - gitignore_filter.add_rule( - "**/*.dist-info*", self.project.dependency_directory - ) - gitignore_filter.add_rule( - "**/*.py[c|d|i|o]", self.project.dependency_directory - ) - gitignore_filter.add_rule( - "**/__pycache__*", self.project.dependency_directory - ) + gitignore_filter.add_rule("**/*.dist-info*", self.project.dependency_directory) + gitignore_filter.add_rule("**/*.py[c|d|i|o]", self.project.dependency_directory) + gitignore_filter.add_rule("**/__pycache__*", self.project.dependency_directory) if self.project.args.strip: gitignore_filter.add_rule("**/*.so", self.project.dependency_directory) return gitignore_filter diff --git a/runway/cfngin/hooks/awslambda/python_requirements/_docker.py b/runway/cfngin/hooks/awslambda/python_requirements/_docker.py index ed968dd0c..1995f7475 100644 --- a/runway/cfngin/hooks/awslambda/python_requirements/_docker.py +++ b/runway/cfngin/hooks/awslambda/python_requirements/_docker.py @@ -4,7 +4,7 @@ import logging import re -from typing import TYPE_CHECKING, Dict, List, Optional, Union +from typing import TYPE_CHECKING from docker.types.services import Mount @@ -28,8 +28,8 @@ def __init__( self, project: PythonProject, *, - client: Optional[DockerClient] = None, - context: Optional[Union[CfnginContext, RunwayContext]] = None, + client: DockerClient | None = None, + context: CfnginContext | RunwayContext | None = None, ) -> None: """Instantiate class. @@ -42,7 +42,7 @@ def __init__( super().__init__(project, client=client, context=context) @cached_property - def bind_mounts(self) -> List[Mount]: + def bind_mounts(self) -> list[Mount]: """Bind mounts that will be used by the container.""" mounts = [*super().bind_mounts] if self.project.requirements_txt: @@ -56,7 +56,7 @@ def bind_mounts(self) -> List[Mount]: return mounts @cached_property - def environment_variables(self) -> Dict[str, str]: + def environment_variables(self) -> dict[str, str]: """Environment variables to pass to the docker container. This is a subset of the environment variables stored in the context @@ -64,13 +64,11 @@ def environment_variables(self) -> Dict[str, str]: """ docker_env_vars = super().environment_variables - pip_env_vars = { - k: v for k, v in self.ctx.env.vars.items() if k.startswith("PIP") - } + pip_env_vars = {k: v for k, v in self.ctx.env.vars.items() if k.startswith("PIP")} return {**docker_env_vars, **pip_env_vars} @cached_property - def install_commands(self) -> List[str]: + def install_commands(self) -> list[str]: """Commands to run to install dependencies.""" if self.project.requirements_txt: return [ @@ -88,7 +86,7 @@ def install_commands(self) -> List[str]: return [] @cached_property - def python_version(self) -> Optional[Version]: + def python_version(self) -> Version | None: """Version of Python installed in the docker container.""" match = re.search( r"Python (?P\S*)", @@ -99,7 +97,7 @@ def python_version(self) -> Optional[Version]: return Version(match.group("version")) @cached_property - def runtime(self) -> Optional[str]: + def runtime(self) -> str | None: """AWS Lambda runtime determined from the docker container's Python version.""" if not self.python_version: return None diff --git a/runway/cfngin/hooks/awslambda/python_requirements/_project.py b/runway/cfngin/hooks/awslambda/python_requirements/_project.py index 25eaacd4d..a85c3fa4c 100644 --- a/runway/cfngin/hooks/awslambda/python_requirements/_project.py +++ b/runway/cfngin/hooks/awslambda/python_requirements/_project.py @@ -4,9 +4,7 @@ import logging import shutil -from typing import TYPE_CHECKING, ClassVar, Optional, Set, Tuple - -from typing_extensions import Literal +from typing import TYPE_CHECKING, ClassVar from .....compat import cached_property from .....dependency_managers import ( @@ -23,6 +21,8 @@ if TYPE_CHECKING: from pathlib import Path + from typing_extensions import Literal + LOGGER = logging.getLogger(__name__.replace("._", ".")) @@ -33,29 +33,23 @@ class PythonProject(Project[PythonHookArgs]): """Name of the default cache directory.""" @cached_property - def docker(self) -> Optional[PythonDockerDependencyInstaller]: + def docker(self) -> PythonDockerDependencyInstaller | None: """Docker interface that can be used to build the project.""" return PythonDockerDependencyInstaller.from_project(self) @cached_property - def metadata_files(self) -> Tuple[Path, ...]: + def metadata_files(self) -> tuple[Path, ...]: """Project metadata files. Files are only included in return value if they exist. """ if self.project_type == "poetry": - config_files = [ - self.project_root / config_file for config_file in Poetry.CONFIG_FILES - ] + config_files = [self.project_root / config_file for config_file in Poetry.CONFIG_FILES] elif self.project_type == "pipenv": - config_files = [ - self.project_root / config_file for config_file in Pipenv.CONFIG_FILES - ] + config_files = [self.project_root / config_file for config_file in Pipenv.CONFIG_FILES] else: - config_files = [ - self.project_root / config_file for config_file in Pip.CONFIG_FILES - ] + config_files = [self.project_root / config_file for config_file in Pip.CONFIG_FILES] return tuple(path for path in config_files if path.exists()) @cached_property @@ -78,7 +72,7 @@ def pip(self) -> Pip: return Pip(self.ctx, self.project_root) @cached_property - def pipenv(self) -> Optional[Pipenv]: + def pipenv(self) -> Pipenv | None: """Pipenv dependency manager. Return: @@ -96,7 +90,7 @@ def pipenv(self) -> Optional[Pipenv]: raise PipenvNotFoundError @cached_property - def poetry(self) -> Optional[Poetry]: + def poetry(self) -> Poetry | None: """Poetry dependency manager. Return: @@ -119,19 +113,15 @@ def project_type(self) -> Literal["pip", "pipenv", "poetry"]: if Poetry.dir_is_project(self.project_root): if self.args.use_poetry: return "poetry" - LOGGER.warning( - "poetry project detected but use of poetry is explicitly disabled" - ) + LOGGER.warning("poetry project detected but use of poetry is explicitly disabled") if Pipenv.dir_is_project(self.project_root): if self.args.use_pipenv: return "pipenv" - LOGGER.warning( - "pipenv project detected but use of pipenv is explicitly disabled" - ) + LOGGER.warning("pipenv project detected but use of pipenv is explicitly disabled") return "pip" @cached_property - def requirements_txt(self) -> Optional[Path]: + def requirements_txt(self) -> Path | None: """Dependency file for the project.""" if self.poetry: # prioritize poetry return self.poetry.export(output=self.tmp_requirements_txt) @@ -143,7 +133,7 @@ def requirements_txt(self) -> Optional[Path]: return None @cached_property - def supported_metadata_files(self) -> Set[str]: + def supported_metadata_files(self) -> set[str]: """Names of all supported metadata files. Returns: @@ -190,8 +180,6 @@ def install_dependencies(self) -> None: requirements=self.requirements_txt, target=self.dependency_directory, ) - LOGGER.debug( - "dependencies successfully installed to %s", self.dependency_directory - ) + LOGGER.debug("dependencies successfully installed to %s", self.dependency_directory) else: LOGGER.info("skipped installing dependencies; none found") diff --git a/runway/cfngin/hooks/awslambda/source_code.py b/runway/cfngin/hooks/awslambda/source_code.py index e815c79ca..c351958e2 100644 --- a/runway/cfngin/hooks/awslambda/source_code.py +++ b/runway/cfngin/hooks/awslambda/source_code.py @@ -5,7 +5,7 @@ import hashlib import logging from pathlib import Path -from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Union +from typing import TYPE_CHECKING import igittigitt @@ -13,6 +13,8 @@ from runway.utils import FileHash if TYPE_CHECKING: + from collections.abc import Iterator, Sequence + from _typeshed import StrPath LOGGER = logging.getLogger(__name__) @@ -41,9 +43,9 @@ def __init__( self, root_directory: StrPath, *, - gitignore_filter: Optional[igittigitt.IgnoreParser] = None, - include_files_in_hash: Optional[Sequence[Path]] = None, - project_root: Optional[StrPath] = None, + gitignore_filter: igittigitt.IgnoreParser | None = None, + include_files_in_hash: Sequence[Path] | None = None, + project_root: StrPath | None = None, ) -> None: """Instantiate class. @@ -88,7 +90,7 @@ def md5_hash(self) -> str: for include_file in self._include_files_in_hash: if include_file not in sorted_files: sorted_files.append(include_file) - file_hash = FileHash(hashlib.md5()) + file_hash = FileHash(hashlib.md5()) # noqa: S324 file_hash.add_files(sorted(sorted_files), relative_to=self.project_root) return file_hash.hexdigest @@ -101,7 +103,7 @@ def add_filter_rule(self, pattern: str) -> None: """ self.gitignore_filter.add_rule(pattern=pattern, base_path=self.root_directory) - def sorted(self, *, reverse: bool = False) -> List[Path]: + def sorted(self, *, reverse: bool = False) -> list[Path]: """Sorted list of source code files. Args: @@ -120,7 +122,7 @@ def __eq__(self, other: object) -> bool: return self.root_directory == other.root_directory return False - def __fspath__(self) -> Union[str, bytes]: + def __fspath__(self) -> str | bytes: """Return the file system path representation of the object.""" return str(self.root_directory) diff --git a/runway/cfngin/hooks/awslambda/type_defs.py b/runway/cfngin/hooks/awslambda/type_defs.py index d3aacb37b..168a86396 100644 --- a/runway/cfngin/hooks/awslambda/type_defs.py +++ b/runway/cfngin/hooks/awslambda/type_defs.py @@ -2,16 +2,14 @@ from __future__ import annotations -from typing import Optional - from typing_extensions import TypedDict class AwsLambdaHookDeployResponseTypedDict(TypedDict): - """Dict output of :class:`runway.cfngin.hooks.awslambda.models.response.AwsLambdaHookDeployResponse` using aliases.""" # noqa + """Dict output of :class:`runway.cfngin.hooks.awslambda.models.response.AwsLambdaHookDeployResponse` using aliases.""" # noqa: E501 CodeSha256: str Runtime: str S3Bucket: str S3Key: str - S3ObjectVersion: Optional[str] + S3ObjectVersion: str | None diff --git a/runway/cfngin/hooks/base.py b/runway/cfngin/hooks/base.py index 9c48d517d..668460ede 100644 --- a/runway/cfngin/hooks/base.py +++ b/runway/cfngin/hooks/base.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, ClassVar, Dict, Optional, Type, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, cast from troposphere import Tags @@ -28,7 +28,7 @@ class HookArgsBaseModel(BaseModel): """Base model for hook args.""" - tags: Dict[str, str] = {} + tags: dict[str, str] = {} class Hook(CfnginHookProtocol): @@ -46,33 +46,31 @@ class Hook(CfnginHookProtocol): """ - ARGS_PARSER: ClassVar[Type[HookArgsBaseModel]] = HookArgsBaseModel + ARGS_PARSER: ClassVar = HookArgsBaseModel """Class used to parse arguments passed to the hook.""" - args: HookArgsBaseModel - blueprint: Optional[Blueprint] = None + blueprint: Blueprint | None = None context: CfnginContext provider: Provider - stack: Optional[Stack] = None + stack: Stack | None = None stack_name: str = "stack" - def __init__( # pylint: disable=super-init-not-called - self, context: CfnginContext, provider: Provider, **kwargs: Any - ) -> None: + def __init__(self, context: CfnginContext, provider: Provider, **kwargs: Any) -> None: """Instantiate class. Args: context: Context instance. (passed in by CFNgin) provider: Provider instance. (passed in by CFNgin) + **kwargs: Arbitrary keyword arguments. """ kwargs.setdefault("tags", {}) - self.args = self.ARGS_PARSER.parse_obj(kwargs) + self.args = self.ARGS_PARSER.model_validate(kwargs) self.args.tags.update(context.tags) self.context = context self.provider = provider - # TODO BREAKING remove these from the primary base class + # TODO (kyle): BREAKING remove these from the primary base class self._deploy_action = HookDeployAction(self.context, self.provider) self._destroy_action = HookDestroyAction(self.context, self.provider) @@ -83,14 +81,14 @@ def tags(self) -> Tags: def generate_stack(self, **kwargs: Any) -> Stack: """Create a CFNgin Stack object.""" - definition = CfnginStackDefinitionModel.construct( + definition = CfnginStackDefinitionModel.model_construct( name=self.stack_name, tags=self.args.tags, **kwargs ) stack = Stack(definition, self.context) - stack._blueprint = self.blueprint # pylint: disable=protected-access + stack._blueprint = self.blueprint # noqa: SLF001 return stack - def get_template_description(self, suffix: Optional[str] = None) -> str: + def get_template_description(self, suffix: str | None = None) -> str: """Generate a template description. Args: @@ -104,7 +102,7 @@ def get_template_description(self, suffix: Optional[str] = None) -> str: return template.format(self.__class__.__module__, suffix) return template.format(self.__class__.__module__) - def deploy_stack(self, stack: Optional[Stack] = None, wait: bool = False) -> Status: + def deploy_stack(self, stack: Stack | None = None, wait: bool = False) -> Status: """Deploy a stack. Args: @@ -115,13 +113,9 @@ def deploy_stack(self, stack: Optional[Stack] = None, wait: bool = False) -> Sta Ending status of the stack. """ - return self._run_stack_action( - action=self._deploy_action, stack=stack, wait=wait - ) + return self._run_stack_action(action=self._deploy_action, stack=stack, wait=wait) - def destroy_stack( - self, stack: Optional[Stack] = None, wait: bool = False - ) -> Status: + def destroy_stack(self, stack: Stack | None = None, wait: bool = False) -> Status: """Destroy a stack. Args: @@ -132,9 +126,7 @@ def destroy_stack( Ending status of the stack. """ - return self._run_stack_action( - action=self._destroy_action, stack=stack, wait=wait - ) + return self._run_stack_action(action=self._destroy_action, stack=stack, wait=wait) def post_deploy(self) -> Any: """Run during the **post_deploy** stage.""" @@ -175,8 +167,8 @@ def _log_stack(stack: Stack, status: Status) -> None: def _run_stack_action( self, - action: Union[HookDeployAction, HookDestroyAction], - stack: Optional[Stack] = None, + action: HookDeployAction | HookDestroyAction, + stack: Stack | None = None, wait: bool = False, ) -> Status: """Run a CFNgin hook modified for use in hooks. @@ -197,18 +189,16 @@ def _run_stack_action( self._log_stack(stack, status) if wait and status != SKIPPED: - status = self._wait_for_stack( - action=action, stack=stack, last_status=status - ) + status = self._wait_for_stack(action=action, stack=stack, last_status=status) return status def _wait_for_stack( self, - action: Union[HookDeployAction, HookDestroyAction], - last_status: Optional[Status] = None, - stack: Optional[Stack] = None, - till_reason: Optional[str] = None, - ): + action: HookDeployAction | HookDestroyAction, + last_status: Status | None = None, + stack: Stack | None = None, + till_reason: str | None = None, + ) -> Status: """Wait for a CloudFormation stack to complete. Args: @@ -249,11 +239,11 @@ def _wait_for_stack( return status -# TODO BREAKING find a better place for this - can cause cyclic imports +# TODO (kyle): BREAKING find a better place for this - can cause cyclic imports class HookDeployAction(deploy.Action): """Deploy action that can be used from hooks.""" - def __init__(self, context: CfnginContext, provider: Provider): + def __init__(self, context: CfnginContext, provider: Provider) -> None: """Instantiate class. Args: diff --git a/runway/cfngin/hooks/cleanup_s3.py b/runway/cfngin/hooks/cleanup_s3.py index f6fcfad66..dba656331 100644 --- a/runway/cfngin/hooks/cleanup_s3.py +++ b/runway/cfngin/hooks/cleanup_s3.py @@ -24,16 +24,14 @@ class PurgeBucketHookArgs(BaseModel): def purge_bucket(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool: """Delete objects in bucket.""" - args = PurgeBucketHookArgs.parse_obj(kwargs) + args = PurgeBucketHookArgs.model_validate(kwargs) session = context.get_session() s3_resource = session.resource("s3") try: s3_resource.meta.client.head_bucket(Bucket=args.bucket_name) except ClientError as exc: if exc.response["Error"]["Code"] == "404": - LOGGER.info( - 'bucket "%s" does not exist; unable to complete purge', args.bucket_name - ) + LOGGER.info('bucket "%s" does not exist; unable to complete purge', args.bucket_name) return True raise diff --git a/runway/cfngin/hooks/cleanup_ssm.py b/runway/cfngin/hooks/cleanup_ssm.py index 6b4f69cc2..b50fbc5b6 100644 --- a/runway/cfngin/hooks/cleanup_ssm.py +++ b/runway/cfngin/hooks/cleanup_ssm.py @@ -22,7 +22,7 @@ class DeleteParamHookArgs(BaseModel): def delete_param(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool: """Delete SSM parameter.""" - args = DeleteParamHookArgs.parse_obj(kwargs) + args = DeleteParamHookArgs.model_validate(kwargs) session = context.get_session() ssm_client = session.client("ssm") diff --git a/runway/cfngin/hooks/command.py b/runway/cfngin/hooks/command.py index 110bf5b32..d30ee1eb1 100644 --- a/runway/cfngin/hooks/command.py +++ b/runway/cfngin/hooks/command.py @@ -1,9 +1,11 @@ """Command hook.""" +from __future__ import annotations + import logging import os import subprocess -from typing import Any, Dict, List, Optional, Union +from typing import Any from typing_extensions import TypedDict @@ -19,10 +21,10 @@ class RunCommandHookArgs(BaseModel): capture: bool = False """If enabled, capture the command's stdout and stderr, and return them in the hook result.""" - command: Union[str, List[str]] + command: str | list[str] """Command(s) to run.""" - env: Optional[Dict[str, str]] = None + env: dict[str, str] | None = None """Dictionary of environment variable overrides for the command context. Will be merged with the current environment. @@ -43,7 +45,7 @@ class RunCommandHookArgs(BaseModel): """ - stdin: Optional[str] = None + stdin: str | None = None """String to send to the stdin of the command. Implicitly disables ``interactive``.""" @@ -55,7 +57,7 @@ class RunCommandResponseTypeDef(TypedDict, total=False): stdout: str -def run_command(*__args: Any, **kwargs: Any) -> RunCommandResponseTypeDef: +def run_command(*_args: Any, **kwargs: Any) -> RunCommandResponseTypeDef: # noqa: C901, PLR0912 """Run a custom command as a hook. Arguments not parsed by the data model will be forwarded to the @@ -90,10 +92,10 @@ def run_command(*__args: Any, **kwargs: Any) -> RunCommandResponseTypeDef: shell: true """ - args = RunCommandHookArgs.parse_obj(kwargs) + args = RunCommandHookArgs.model_validate(kwargs) # remove parsed args from kwargs - for field in RunCommandHookArgs.__fields__: + for field in RunCommandHookArgs.model_fields: kwargs.pop(field, None) # remove unneeded args from kwargs @@ -106,7 +108,7 @@ def run_command(*__args: Any, **kwargs: Any) -> RunCommandResponseTypeDef: ValueError("Cannot enable `quiet` and `capture` options simultaneously"), ) - with open(os.devnull, "wb") as devnull: + with open(os.devnull, "wb") as devnull: # noqa: PTH123 if args.quiet: out_err_type = devnull elif args.capture: @@ -147,10 +149,8 @@ def run_command(*__args: Any, **kwargs: Any) -> RunCommandResponseTypeDef: if LOGGER.isEnabledFor(logging.INFO): # cov: ignore LOGGER.warning("command failed with returncode %d", status) else: - LOGGER.warning( - "command failed with returncode %d: %s", status, args.command - ) + LOGGER.warning("command failed with returncode %d: %s", status, args.command) return {} - except Exception: # pylint: disable=broad-except # cov: ignore + except Exception: # cov: ignore # noqa: BLE001 return {} diff --git a/runway/cfngin/hooks/docker/_login.py b/runway/cfngin/hooks/docker/_login.py index 9eebfbc8f..b037eccd7 100644 --- a/runway/cfngin/hooks/docker/_login.py +++ b/runway/cfngin/hooks/docker/_login.py @@ -1,19 +1,19 @@ """Docker login hook.""" -# pylint: disable=no-self-argument from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Annotated, Any -from pydantic import Field, validator +from pydantic import ConfigDict, Field, field_validator, model_validator +from ....context import CfnginContext from ....utils import BaseModel from .data_models import ElasticContainerRegistry from .hook_data import DockerHookData if TYPE_CHECKING: - from ....context import CfnginContext + from pydantic import ValidationInfo LOGGER = logging.getLogger(__name__.replace("._", ".")) @@ -21,42 +21,46 @@ class LoginArgs(BaseModel): """Args passed to the docker.login hook.""" - _ctx: Optional[CfnginContext] = Field(default=None, alias="context", exclude=True) + model_config = ConfigDict(arbitrary_types_allowed=True) - dockercfg_path: Optional[str] = None + ctx: Annotated[CfnginContext | None, Field(alias="context", exclude=True)] = None + + dockercfg_path: str | None = None """Path to a non-default Docker config file.""" - ecr: Optional[ElasticContainerRegistry] = Field(default=None, exclude=True) + ecr: ElasticContainerRegistry | None = Field(default=None, exclude=True) """Information describing an ECR registry.""" - email: Optional[str] = None + email: str | None = None """The email for the registry account.""" password: str """The plaintext password for the registry account.""" - registry: Optional[str] = None + registry: Annotated[str | None, Field(validate_default=True)] = None """URI of the registry to login to.""" username: str = "AWS" """The registry username.""" - @validator("ecr", pre=True, allow_reuse=True) - def _set_ecr(cls, v: Any, values: Dict[str, Any]) -> Any: + @model_validator(mode="before") + @classmethod + def _set_ecr(cls, values: dict[str, Any]) -> dict[str, Any]: """Set the value of ``ecr``.""" - if v and isinstance(v, dict): - return ElasticContainerRegistry.parse_obj( - {"context": values.get("context"), **v} + if "ecr" in values and isinstance(values["ecr"], dict): + values["ecr"] = ElasticContainerRegistry.model_validate( + {"context": values.get("context"), **values["ecr"]} ) - return v + return values - @validator("registry", pre=True, always=True, allow_reuse=True) - def _set_registry(cls, v: Any, values: Dict[str, Any]) -> Any: + @field_validator("registry", mode="before") + @classmethod + def _set_registry(cls, v: Any, info: ValidationInfo) -> Any: """Set the value of ``registry``.""" if v: return v - ecr: Optional[ElasticContainerRegistry] = values.get("ecr") + ecr: ElasticContainerRegistry | None = info.data.get("ecr") if ecr: return ecr.fqn @@ -71,8 +75,8 @@ def login(*, context: CfnginContext, **kwargs: Any) -> DockerHookData: kwargs are parsed by :class:`~runway.cfngin.hooks.docker.LoginArgs`. """ - args = LoginArgs.parse_obj({"context": context, **kwargs}) + args = LoginArgs.model_validate({"context": context, **kwargs}) docker_hook_data = DockerHookData.from_cfngin_context(context) - docker_hook_data.client.login(**args.dict()) + docker_hook_data.client.login(**args.model_dump()) LOGGER.info("logged into %s", args.registry) return docker_hook_data.update_context(context) diff --git a/runway/cfngin/hooks/docker/data_models.py b/runway/cfngin/hooks/docker/data_models.py index d8b97f5fc..6773b6b08 100644 --- a/runway/cfngin/hooks/docker/data_models.py +++ b/runway/cfngin/hooks/docker/data_models.py @@ -5,13 +5,12 @@ """ -# pylint: disable=no-self-argument from __future__ import annotations -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, cast +from typing import TYPE_CHECKING, Annotated, Any, ClassVar, cast from docker.models.images import Image -from pydantic import Field, root_validator +from pydantic import ConfigDict, Field, PrivateAttr, model_validator from ....core.providers.aws import AccountDetails from ....utils import BaseModel, MutableMap @@ -19,10 +18,7 @@ if TYPE_CHECKING: from ....context import CfnginContext - -ECR_REPO_FQN_TEMPLATE = ( - "{aws_account_id}.dkr.ecr.{aws_region}.amazonaws.com/{repo_name}" -) +ECR_REPO_FQN_TEMPLATE = "{aws_account_id}.dkr.ecr.{aws_region}.amazonaws.com/{repo_name}" class ElasticContainerRegistry(BaseModel): @@ -31,19 +27,18 @@ class ElasticContainerRegistry(BaseModel): PUBLIC_URI_TEMPLATE: ClassVar[str] = "public.ecr.aws/{registry_alias}/" URI_TEMPLATE: ClassVar[str] = "{aws_account_id}.dkr.ecr.{aws_region}.amazonaws.com/" - _ctx: Optional[CfnginContext] = Field(default=None, alias="context", exclude=True) - """CFNgin context.""" + model_config = ConfigDict(arbitrary_types_allowed=True, populate_by_name=True) - account_id: Optional[str] = None + account_id: str | None = None """AWS account ID that owns the registry being logged into.""" - alias: Optional[str] = None + alias: str | None = None """If it is a public repository, the alias of the repository.""" public: bool = True """Whether the repository is public.""" - region: Optional[str] = Field(default=None, alias="aws_region") + region: str | None = Field(default=None, alias="aws_region") """AWS region where the registry is located.""" @property @@ -51,42 +46,35 @@ def fqn(self) -> str: """Fully qualified ECR name.""" if self.public: return self.PUBLIC_URI_TEMPLATE.format(registry_alias=self.alias) - return self.URI_TEMPLATE.format( - aws_account_id=self.account_id, aws_region=self.region - ) + return self.URI_TEMPLATE.format(aws_account_id=self.account_id, aws_region=self.region) - @root_validator(allow_reuse=True, pre=True) - def _set_defaults(cls, values: Dict[str, Any]) -> Dict[str, Any]: + @model_validator(mode="before") + @classmethod + def _set_defaults(cls, values: dict[str, Any]) -> dict[str, Any]: """Set default values based on other values.""" values.setdefault("public", bool(values.get("alias"))) if not values["public"]: account_id = values.get("account_id") - ctx: Optional[CfnginContext] = values.get("context") - region = values.get("aws_region") - if not ctx and not (account_id or region): + ctx: CfnginContext | None = values.get("context") + aws_region = values.get("aws_region") + if not ctx and not (account_id or aws_region): raise ValueError("context is required to resolve values") if ctx: if not account_id: values["account_id"] = AccountDetails(ctx).id - if not region: + if not aws_region: values["aws_region"] = ctx.env.aws_region or "us-east-1" - return values class DockerImage(BaseModel): """Wrapper for :class:`docker.models.images.Image`.""" - image: Image - _repo: Optional[str] = None - - class Config: - """Model configuration.""" + model_config = ConfigDict(arbitrary_types_allowed=True) - arbitrary_types_allowed = True - fields = {"_repo": {"exclude": True}} - underscore_attrs_are_private = True + _repo: str | None = PrivateAttr(default=None) + image: Image @property def id(self) -> str: @@ -106,7 +94,7 @@ def short_id(self) -> str: return self.image.short_id @property - def tags(self) -> List[str]: + def tags(self) -> list[str]: """List of image tags.""" self.image.reload() return [uri.split(":")[-1] for uri in self.image.tags] @@ -116,11 +104,17 @@ def uri(self) -> MutableMap: """Return a mapping of tag to image URI.""" return MutableMap(**{uri.split(":")[-1]: uri for uri in self.image.tags}) + def __bool__(self) -> bool: + """Evaluate the boolean value of the object instance.""" + return True + class ElasticContainerRegistryRepository(BaseModel): """AWS Elastic Container Registry (ECR) Repository.""" - name: str = Field(..., alias="repo_name") + model_config = ConfigDict(populate_by_name=True) + + name: Annotated[str, Field(alias="repo_name")] """The name of the repository.""" registry: ElasticContainerRegistry diff --git a/runway/cfngin/hooks/docker/hook_data.py b/runway/cfngin/hooks/docker/hook_data.py index ad58cce04..73e6c8d66 100644 --- a/runway/cfngin/hooks/docker/hook_data.py +++ b/runway/cfngin/hooks/docker/hook_data.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional, overload +from typing import TYPE_CHECKING, overload from docker import DockerClient @@ -17,7 +17,7 @@ class DockerHookData(MutableMap): """Docker hook_data object.""" - image: Optional["DockerImage"] = None + image: DockerImage | None = None @cached_property def client(self) -> DockerClient: @@ -25,16 +25,12 @@ def client(self) -> DockerClient: return DockerClient.from_env() @overload - def update_context(self, context: CfnginContext = ...) -> DockerHookData: # noqa - ... + def update_context(self, context: CfnginContext = ...) -> DockerHookData: ... @overload - def update_context(self, context: None = ...) -> None: # noqa - ... + def update_context(self, context: None = ...) -> None: ... - def update_context( - self, context: Optional[CfnginContext] = None - ) -> Optional[DockerHookData]: + def update_context(self, context: CfnginContext | None = None) -> DockerHookData | None: """Update context object with new the current DockerHookData.""" if not context: return None diff --git a/runway/cfngin/hooks/docker/image/_build.py b/runway/cfngin/hooks/docker/image/_build.py index 7cc31eeb1..90dbc34bb 100644 --- a/runway/cfngin/hooks/docker/image/_build.py +++ b/runway/cfngin/hooks/docker/image/_build.py @@ -4,26 +4,15 @@ """ -# pylint: disable=no-self-argument from __future__ import annotations import logging from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterator, - List, - Optional, - Tuple, - Union, - cast, -) +from typing import TYPE_CHECKING, Annotated, Any -from docker.models.images import Image -from pydantic import DirectoryPath, Field, validator +from pydantic import ConfigDict, DirectoryPath, Field, field_validator +from .....context import CfnginContext from .....utils import BaseModel from ..data_models import ( DockerImage, @@ -33,7 +22,7 @@ from ..hook_data import DockerHookData if TYPE_CHECKING: - from .....context import CfnginContext + from pydantic import ValidationInfo LOGGER = logging.getLogger(__name__.replace("._", ".")) @@ -41,13 +30,13 @@ class DockerImageBuildApiOptions(BaseModel): """Options for controlling Docker.""" - buildargs: Dict[str, Any] = {} + buildargs: dict[str, Any] = {} """Dict of build-time variables that will be passed to Docker.""" custom_context: bool = False """Whether to use custom context when providing a file object.""" - extra_hosts: Dict[str, str] = {} + extra_hosts: dict[str, str] = {} """Extra hosts to add to `/etc/hosts` in the build containers. Defined as a mapping of hostname to IP address. @@ -56,16 +45,16 @@ class DockerImageBuildApiOptions(BaseModel): forcerm: bool = False """Always remove intermediate containers, even after unsuccessful builds.""" - isolation: Optional[str] = None + isolation: str | None = None """Isolation technology used during build.""" - network_mode: Optional[str] = None + network_mode: str | None = None """Network mode for the run commands during build.""" nocache: bool = False """Whether to use cache.""" - platform: Optional[str] = None + platform: str | None = None """Set platform if server is multi-platform capable. Uses format ``os[/arch[/variant]]``. @@ -80,13 +69,13 @@ class DockerImageBuildApiOptions(BaseModel): squash: bool = False """Whether to squash the resulting image layers into a single layer.""" - tag: Optional[str] = None + tag: str | None = None """Optional name and tag to apply to the base image when it is built.""" - target: Optional[str] = None + target: str | None = None """Name of the build-stage to build in a multi-stage Dockerfile.""" - timeout: Optional[int] = None + timeout: int | None = None """HTTP timeout.""" use_config_proxy: bool = False @@ -101,9 +90,11 @@ class DockerImageBuildApiOptions(BaseModel): class ImageBuildArgs(BaseModel): """Args passed to image.build.""" - _ctx: Optional[CfnginContext] = Field(default=None, alias="context", export=False) + model_config = ConfigDict(arbitrary_types_allowed=True) + + ctx: Annotated[CfnginContext | None, Field(alias="context", exclude=True)] = None - ecr_repo: Optional[ElasticContainerRegistryRepository] = None # depends on _ctx + ecr_repo: ElasticContainerRegistryRepository | None = None # depends on ctx """AWS Elastic Container Registry repository information. Providing this will automatically construct the repo URI. If provided, do not provide ``repo``. @@ -119,23 +110,26 @@ class ImageBuildArgs(BaseModel): dockerfile: str = "Dockerfile" # depends on path for validation """Path within the build context to the Dockerfile.""" - repo: Optional[str] = None # depends on ecr_repo + repo: Annotated[str | None, Field(validate_default=True)] = None # depends on ecr_repo & image """URI of a non Docker Hub repository where the image will be stored.""" - docker: DockerImageBuildApiOptions = DockerImageBuildApiOptions() # depends on repo + docker: Annotated[ # depends on repo + DockerImageBuildApiOptions, Field(validate_default=True) + ] = DockerImageBuildApiOptions() """Options for ``docker image build``.""" - tags: List[str] = ["latest"] + tags: Annotated[list[str], Field(validate_default=True)] = ["latest"] """List of tags to apply to the image.""" - @validator("docker", pre=True, always=True, allow_reuse=True) + @field_validator("docker", mode="before") + @classmethod def _set_docker( cls, - v: Union[Dict[str, Any], DockerImageBuildApiOptions, Any], - values: Dict[str, Any], + v: dict[str, Any] | DockerImageBuildApiOptions | Any, + info: ValidationInfo, ) -> Any: """Set the value of ``docker``.""" - repo = values["repo"] + repo = info.data["repo"] if repo: if isinstance(v, dict): v.setdefault("tag", repo) @@ -143,46 +137,47 @@ def _set_docker( v.tag = repo return v - @validator("ecr_repo", pre=True, allow_reuse=True) - def _set_ecr_repo(cls, v: Any, values: Dict[str, Any]) -> Any: + @field_validator("ecr_repo", mode="before") + @classmethod + def _set_ecr_repo(cls, v: Any, info: ValidationInfo) -> Any: """Set the value of ``ecr_repo``.""" if v and isinstance(v, dict): - return ElasticContainerRegistryRepository.parse_obj( + return ElasticContainerRegistryRepository.model_validate( { "repo_name": v.get("repo_name"), - "registry": ElasticContainerRegistry.parse_obj( + "registry": ElasticContainerRegistry.model_validate( { "account_id": v.get("account_id"), "alias": v.get("registry_alias"), "aws_region": v.get("aws_region"), - "context": values.get("context"), + "context": info.data.get("context"), } ), } ) return v - @validator("repo", pre=True, always=True, allow_reuse=True) - def _set_repo(cls, v: Optional[str], values: Dict[str, Any]) -> Optional[str]: + @field_validator("repo", mode="before") + @classmethod + def _set_repo(cls, v: str | None, info: ValidationInfo) -> str | None: """Set the value of ``repo``.""" if v: return v - ecr_repo: Optional[ElasticContainerRegistryRepository] = values.get("ecr_repo") + ecr_repo: ElasticContainerRegistryRepository | None = info.data.get("ecr_repo") if ecr_repo: return ecr_repo.fqn return None - @validator("dockerfile", pre=True, always=True, allow_reuse=True) - def _validate_dockerfile(cls, v: Any, values: Dict[str, Any]) -> Any: + @field_validator("dockerfile", mode="before") + @classmethod + def _validate_dockerfile(cls, v: Any, info: ValidationInfo) -> Any: """Validate ``dockerfile``.""" - path: Path = values["path"] + path: Path = info.data["path"] dockerfile = path / v if not dockerfile.is_file(): - raise ValueError( - f"Dockerfile does not exist at path provided: {dockerfile}" - ) + raise ValueError(f"Dockerfile does not exist at path provided: {dockerfile}") return v @@ -194,11 +189,10 @@ def build(*, context: CfnginContext, **kwargs: Any) -> DockerHookData: kwargs are parsed by :class:`~runway.cfngin.hooks.docker.image.ImageBuildArgs`. """ - args = ImageBuildArgs.parse_obj({"context": context, **kwargs}) + args = ImageBuildArgs.model_validate({"context": context, **kwargs}) docker_hook_data = DockerHookData.from_cfngin_context(context) - image, logs = cast( - Tuple[Image, Iterator[Dict[str, str]]], - docker_hook_data.client.images.build(path=str(args.path), **args.docker.dict()), + image, logs = docker_hook_data.client.images.build( + path=str(args.path), **args.docker.model_dump() ) for msg in logs: # iterate through JSON log messages if "stream" in msg: # log if they contain a message diff --git a/runway/cfngin/hooks/docker/image/_push.py b/runway/cfngin/hooks/docker/image/_push.py index 4f4921afb..f4da40288 100644 --- a/runway/cfngin/hooks/docker/image/_push.py +++ b/runway/cfngin/hooks/docker/image/_push.py @@ -4,14 +4,14 @@ """ -# pylint: disable=no-self-argument from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Annotated, Any -from pydantic import Field, validator +from pydantic import ConfigDict, Field, field_validator +from .....context import CfnginContext from .....utils import BaseModel from ..data_models import ( DockerImage, @@ -21,7 +21,7 @@ from ..hook_data import DockerHookData if TYPE_CHECKING: - from .....context import CfnginContext + from pydantic import ValidationInfo LOGGER = logging.getLogger(__name__.replace("._", ".")) @@ -29,9 +29,11 @@ class ImagePushArgs(BaseModel): """Args passed to image.push.""" - _ctx: Optional[CfnginContext] = Field(default=None, alias="context", export=False) + model_config = ConfigDict(arbitrary_types_allowed=True) - ecr_repo: Optional[ElasticContainerRegistryRepository] = None # depends on _ctx + ctx: Annotated[CfnginContext | None, Field(alias="context", exclude=True)] = None + + ecr_repo: ElasticContainerRegistryRepository | None = None # depends on ctx """AWS Elastic Container Registry repository information. Providing this will automatically construct the repo URI. If provided, do not provide ``repo``. @@ -41,57 +43,60 @@ class ImagePushArgs(BaseModel): """ - image: Optional[DockerImage] = None + image: DockerImage | None = None """Image to push.""" - repo: Optional[str] = None # depends on ecr_repo & image + repo: Annotated[str | None, Field(validate_default=True)] = None # depends on ecr_repo & image """URI of a non Docker Hub repository where the image will be stored.""" - tags: List[str] = [] # depends on image + tags: Annotated[list[str], Field(validate_default=True)] = [] # depends on image """List of tags to push.""" - @validator("ecr_repo", pre=True, allow_reuse=True) - def _set_ecr_repo(cls, v: Any, values: Dict[str, Any]) -> Any: + @field_validator("ecr_repo", mode="before") + @classmethod + def _set_ecr_repo(cls, v: Any, info: ValidationInfo) -> Any: """Set the value of ``ecr_repo``.""" if v and isinstance(v, dict): - return ElasticContainerRegistryRepository.parse_obj( + return ElasticContainerRegistryRepository.model_validate( { "repo_name": v.get("repo_name"), - "registry": ElasticContainerRegistry.parse_obj( + "registry": ElasticContainerRegistry.model_validate( { "account_id": v.get("account_id"), "alias": v.get("registry_alias"), "aws_region": v.get("aws_region"), - "context": values.get("context"), + "context": info.data.get("context"), } ), } ) return v - @validator("repo", pre=True, always=True, allow_reuse=True) - def _set_repo(cls, v: Optional[str], values: Dict[str, Any]) -> Optional[str]: + @field_validator("repo", mode="before") + @classmethod + def _set_repo(cls, v: str | None, info: ValidationInfo) -> str | None: """Set the value of ``repo``.""" if v: return v - image: Optional[DockerImage] = values.get("image") + image: DockerImage | None = info.data.get("image") if image: return image.repo - ecr_repo: Optional[ElasticContainerRegistryRepository] = values.get("ecr_repo") + ecr_repo: ElasticContainerRegistryRepository | None = info.data.get("ecr_repo") if ecr_repo: return ecr_repo.fqn return None - @validator("tags", pre=True, always=True, allow_reuse=True) - def _set_tags(cls, v: List[str], values: Dict[str, Any]) -> List[str]: + @field_validator("tags", mode="before") + @classmethod + def _set_tags(cls, v: list[str], info: ValidationInfo) -> list[str]: """Set the value of ``tags``.""" if v: return v - image: Optional[DockerImage] = values.get("image") + image: DockerImage | None = info.data.get("image") if image: return image.tags @@ -106,7 +111,7 @@ def push(*, context: CfnginContext, **kwargs: Any) -> DockerHookData: kwargs are parsed by :class:`~runway.cfngin.hooks.docker.image.ImagePushArgs`. """ - args = ImagePushArgs.parse_obj({"context": context, **kwargs}) + args = ImagePushArgs.model_validate({"context": context, **kwargs}) docker_hook_data = DockerHookData.from_cfngin_context(context) LOGGER.info("pushing image %s...", args.repo) for tag in args.tags: diff --git a/runway/cfngin/hooks/docker/image/_remove.py b/runway/cfngin/hooks/docker/image/_remove.py index d2476c388..108df5cc7 100644 --- a/runway/cfngin/hooks/docker/image/_remove.py +++ b/runway/cfngin/hooks/docker/image/_remove.py @@ -4,15 +4,15 @@ """ -# pylint: disable=no-self-argument from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Annotated, Any from docker.errors import ImageNotFound -from pydantic import Field, validator +from pydantic import ConfigDict, Field, field_validator +from .....context import CfnginContext from .....utils import BaseModel from ..data_models import ( DockerImage, @@ -22,7 +22,7 @@ from ..hook_data import DockerHookData if TYPE_CHECKING: - from .....context import CfnginContext + from pydantic import ValidationInfo LOGGER = logging.getLogger(__name__.replace("._", ".")) @@ -30,9 +30,11 @@ class ImageRemoveArgs(BaseModel): """Args passed to image.remove.""" - _ctx: Optional[CfnginContext] = Field(default=None, alias="context", export=False) + model_config = ConfigDict(arbitrary_types_allowed=True) - ecr_repo: Optional[ElasticContainerRegistryRepository] = None # depends on _ctx + ctx: Annotated[CfnginContext | None, Field(alias="context", exclude=True)] = None + + ecr_repo: ElasticContainerRegistryRepository | None = None # depends on ctx """AWS Elastic Container Registry repository information. Providing this will automatically construct the repo URI. If provided, do not provide ``repo``. @@ -45,60 +47,63 @@ class ImageRemoveArgs(BaseModel): force: bool = False """Whether to force the removal of the image.""" - image: Optional[DockerImage] = None + image: DockerImage | None = None """Image to push.""" noprune: bool = False """Whether to delete untagged parents.""" - repo: Optional[str] = None # depends on ecr_repo & image + repo: Annotated[str | None, Field(validate_default=True)] = None # depends on ecr_repo & image """URI of a non Docker Hub repository where the image will be stored.""" - tags: List[str] = [] # depends on image - """List of tags to remove.""" + tags: Annotated[list[str], Field(validate_default=True)] = [] + """List of tags to apply to the image.""" - @validator("ecr_repo", pre=True, allow_reuse=True) - def _set_ecr_repo(cls, v: Any, values: Dict[str, Any]) -> Any: + @field_validator("ecr_repo", mode="before") + @classmethod + def _set_ecr_repo(cls, v: Any, info: ValidationInfo) -> Any: """Set the value of ``ecr_repo``.""" if v and isinstance(v, dict): - return ElasticContainerRegistryRepository.parse_obj( + return ElasticContainerRegistryRepository.model_validate( { "repo_name": v.get("repo_name"), - "registry": ElasticContainerRegistry.parse_obj( + "registry": ElasticContainerRegistry.model_validate( { "account_id": v.get("account_id"), "alias": v.get("registry_alias"), "aws_region": v.get("aws_region"), - "context": values.get("context"), + "context": info.data.get("context"), } ), } ) return v - @validator("repo", pre=True, always=True, allow_reuse=True) - def _set_repo(cls, v: Optional[str], values: Dict[str, Any]) -> Optional[str]: + @field_validator("repo", mode="before") + @classmethod + def _set_repo(cls, v: str | None, info: ValidationInfo) -> str | None: """Set the value of ``repo``.""" if v: return v - image: Optional[DockerImage] = values.get("image") + image: DockerImage | None = info.data.get("image") if image: return image.repo - ecr_repo: Optional[ElasticContainerRegistryRepository] = values.get("ecr_repo") + ecr_repo: ElasticContainerRegistryRepository | None = info.data.get("ecr_repo") if ecr_repo: return ecr_repo.fqn return None - @validator("tags", pre=True, always=True, allow_reuse=True) - def _set_tags(cls, v: List[str], values: Dict[str, Any]) -> List[str]: + @field_validator("tags", mode="before") + @classmethod + def _set_tags(cls, v: list[str], info: ValidationInfo) -> list[str]: """Set the value of ``tags``.""" if v: return v - image: Optional[DockerImage] = values.get("image") + image: DockerImage | None = info.data.get("image") if image: return image.tags @@ -113,7 +118,7 @@ def remove(*, context: CfnginContext, **kwargs: Any) -> DockerHookData: kwargs are parsed by :class:`~runway.cfngin.hooks.docker.image.ImageRemoveArgs`. """ - args = ImageRemoveArgs.parse_obj({"context": context, **kwargs}) + args = ImageRemoveArgs.model_validate({"context": context, **kwargs}) docker_hook_data = DockerHookData.from_cfngin_context(context) LOGGER.info("removing local image %s...", args.repo) for tag in args.tags: @@ -125,7 +130,10 @@ def remove(*, context: CfnginContext, **kwargs: Any) -> DockerHookData: LOGGER.info("successfully removed local image %s", image) except ImageNotFound: LOGGER.warning("local image %s does not exist", image) - if docker_hook_data.image and kwargs.get("image"): - if kwargs["image"].id == docker_hook_data.image.id: - docker_hook_data.image = None # clear out the image that was set + if ( + docker_hook_data.image + and kwargs.get("image") + and kwargs["image"].id == docker_hook_data.image.id + ): + docker_hook_data.image = None # clear out the image that was set return docker_hook_data.update_context(context) diff --git a/runway/cfngin/hooks/ecr/_purge_repository.py b/runway/cfngin/hooks/ecr/_purge_repository.py index 34ef64a23..f117f5b7d 100644 --- a/runway/cfngin/hooks/ecr/_purge_repository.py +++ b/runway/cfngin/hooks/ecr/_purge_repository.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, List +from typing import TYPE_CHECKING, Any from ....utils import BaseModel @@ -25,34 +25,27 @@ class HookArgs(BaseModel): def delete_ecr_images( client: ECRClient, - image_ids: List[ImageIdentifierTypeDef], + image_ids: list[ImageIdentifierTypeDef], repository_name: str, ) -> None: """Delete images from an ECR repository.""" - response = client.batch_delete_image( - repositoryName=repository_name, imageIds=image_ids - ) - if "failures" in response and response["failures"]: + response = client.batch_delete_image(repositoryName=repository_name, imageIds=image_ids) + if response.get("failures"): for msg in response["failures"]: LOGGER.info( "failed to delete image %s: (%s) %s", - msg.get("imageId", {}).get("imageDigest") - or msg.get("imageId", {}).get("imageTag"), + msg.get("imageId", {}).get("imageDigest") or msg.get("imageId", {}).get("imageTag"), msg.get("failureCode"), msg.get("failureReason"), ) raise ValueError("failures present in response") -def list_ecr_images( - client: ECRClient, repository_name: str -) -> List[ImageIdentifierTypeDef]: +def list_ecr_images(client: ECRClient, repository_name: str) -> list[ImageIdentifierTypeDef]: """List all images in an ECR repository.""" - image_ids: List[ImageIdentifierTypeDef] = [] + image_ids: list[ImageIdentifierTypeDef] = [] try: - response = client.list_images( - repositoryName=repository_name, filter={"tagStatus": "ANY"} - ) + response = client.list_images(repositoryName=repository_name, filter={"tagStatus": "ANY"}) image_ids.extend(response["imageIds"]) while response.get("nextToken"): response = client.list_images( @@ -63,25 +56,23 @@ def list_ecr_images( image_ids.extend(response["imageIds"]) return [ {"imageDigest": digest} - for digest in { - image["imageDigest"] for image in image_ids if image.get("imageDigest") - } + for digest in {image.get("imageDigest") for image in image_ids} + if digest ] except client.exceptions.RepositoryNotFoundException: LOGGER.info("repository %s does not exist", repository_name) return [] -def purge_repository( - context: CfnginContext, *__args: Any, **kwargs: Any -) -> Dict[str, str]: +def purge_repository(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, str]: """Purge all images from an ECR repository. Args: context: CFNgin context object. + **kwargs: Arbitrary keyword arguments. """ - args = HookArgs.parse_obj(kwargs) + args = HookArgs.model_validate(kwargs) client = context.get_session().client("ecr") image_ids = list_ecr_images(client, repository_name=args.repository_name) if not image_ids: diff --git a/runway/cfngin/hooks/ecs.py b/runway/cfngin/hooks/ecs.py index c5950b29a..0bcb03938 100644 --- a/runway/cfngin/hooks/ecs.py +++ b/runway/cfngin/hooks/ecs.py @@ -1,12 +1,11 @@ """AWS ECS hook.""" -# pylint: disable=no-self-argument from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, List, Union +from typing import TYPE_CHECKING, Any -from pydantic import validator +from pydantic import field_validator from typing_extensions import TypedDict from ...utils import BaseModel @@ -22,11 +21,12 @@ class CreateClustersHookArgs(BaseModel): """Hook arguments for ``create_clusters``.""" - clusters: List[str] + clusters: list[str] """List of cluster names to create.""" - @validator("clusters", allow_reuse=True, pre=True) - def _convert_clusters(cls, v: Union[List[str], str]) -> List[str]: + @field_validator("clusters", mode="before") + @classmethod + def _convert_clusters(cls, v: list[str] | str) -> list[str]: """Convert value of ``clusters`` from str to list.""" if isinstance(v, str): return [v] @@ -36,22 +36,23 @@ def _convert_clusters(cls, v: Union[List[str], str]) -> List[str]: class CreateClustersResponseTypeDef(TypedDict): """Response from create_clusters.""" - clusters: Dict[str, CreateClusterResponseTypeDef] + clusters: dict[str, CreateClusterResponseTypeDef] def create_clusters( - context: CfnginContext, *__args: Any, **kwargs: Any + context: CfnginContext, *_args: Any, **kwargs: Any ) -> CreateClustersResponseTypeDef: """Create ECS clusters. Args: context: CFNgin context object. + **kwargs: Arbitrary keyword arguments. """ - args = CreateClustersHookArgs.parse_obj(kwargs) + args = CreateClustersHookArgs.model_validate(kwargs) ecs_client = context.get_session().client("ecs") - cluster_info: Dict[str, Any] = {} + cluster_info: dict[str, Any] = {} for cluster in args.clusters: LOGGER.debug("creating ECS cluster: %s", cluster) response = ecs_client.create_cluster(clusterName=cluster) diff --git a/runway/cfngin/hooks/iam.py b/runway/cfngin/hooks/iam.py index b23998ba9..143336234 100644 --- a/runway/cfngin/hooks/iam.py +++ b/runway/cfngin/hooks/iam.py @@ -4,7 +4,7 @@ import copy import logging -from typing import TYPE_CHECKING, Any, Dict, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast from awacs import ecs from awacs.aws import Allow, Policy, Statement @@ -56,31 +56,30 @@ class EnsureServerCertExistsHookArgs(BaseModel): cert_name: str """Name of the certificate that should exist.""" - path_to_certificate: Optional[str] = None + path_to_certificate: str | None = None """Path to certificate file.""" - path_to_chain: Optional[str] = None + path_to_chain: str | None = None """Path to chain file.""" - path_to_private_key: Optional[str] = None + path_to_private_key: str | None = None """Path to private key file.""" prompt: bool = True """Whether to prompt to upload a certificate if one does not exist.""" -def create_ecs_service_role( - context: CfnginContext, *__args: Any, **kwargs: Any -) -> bool: +def create_ecs_service_role(context: CfnginContext, *_args: Any, **kwargs: Any) -> bool: """Create ecsServiceRole IAM role. https://docs.aws.amazon.com/AmazonECS/latest/developerguide/using-service-linked-roles.html Args: context: Context instance. (passed in by CFNgin) + **kwargs: Arbitrary keyword arguments. """ - args = CreateEcsServiceRoleHookArgs.parse_obj(kwargs) + args = CreateEcsServiceRoleHookArgs.model_validate(kwargs) client = context.get_session().client("iam") try: @@ -100,9 +99,7 @@ def create_ecs_service_role( def _get_cert_arn_from_response( - response: Union[ - GetServerCertificateResponseTypeDef, UploadServerCertificateResponseTypeDef - ] + response: GetServerCertificateResponseTypeDef | UploadServerCertificateResponseTypeDef, ) -> str: result = copy.deepcopy(response) # GET response returns this extra key @@ -117,7 +114,7 @@ def _get_cert_arn_from_response( ) -def _get_cert_contents(kwargs: Dict[str, Any]) -> Dict[str, Any]: +def _get_cert_contents(kwargs: dict[str, Any]) -> dict[str, Any]: # noqa: C901 """Build parameters with server cert file contents. Args: @@ -145,7 +142,7 @@ def _get_cert_contents(kwargs: Dict[str, Any]) -> Dict[str, Any]: paths[key] = path - parameters: Dict[str, str] = {} + parameters: dict[str, str] = {} for key, path in paths.items(): if not path: @@ -155,7 +152,7 @@ def _get_cert_contents(kwargs: Dict[str, Any]) -> Dict[str, Any]: try: contents = path.read() except AttributeError: - with open(utils.full_path(path), encoding="utf-8") as read_file: + with open(utils.full_path(path), encoding="utf-8") as read_file: # noqa: PTH123 contents = read_file.read() if key == "certificate": @@ -171,19 +168,18 @@ def _get_cert_contents(kwargs: Dict[str, Any]) -> Dict[str, Any]: return parameters -def ensure_server_cert_exists( - context: CfnginContext, *__args: Any, **kwargs: Any -) -> Dict[str, str]: +def ensure_server_cert_exists(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, str]: """Ensure server cert exists. Args: context: CFNgin context object. + **kwargs: Arbitrary keyword arguments. Returns: Dict containing ``status``, ``cert_name``, and ``cert_arn``. """ - args = EnsureServerCertExistsHookArgs.parse_obj(kwargs) + args = EnsureServerCertExistsHookArgs.model_validate(kwargs) client = context.get_session().client("iam") status = "unknown" try: @@ -193,13 +189,11 @@ def ensure_server_cert_exists( LOGGER.info("certificate exists: %s (%s)", args.cert_name, cert_arn) except ClientError: if args.prompt: - upload = input( - f"Certificate '{args.cert_name}' wasn't found. Upload it now? (yes/no) " - ) + upload = input(f"Certificate '{args.cert_name}' wasn't found. Upload it now? (yes/no) ") if upload != "yes": return {} - parameters = _get_cert_contents(args.dict()) + parameters = _get_cert_contents(args.model_dump()) if not parameters: return {} response = client.upload_server_certificate(**parameters) diff --git a/runway/cfngin/hooks/keypair.py b/runway/cfngin/hooks/keypair.py index d9c799384..d80c22787 100644 --- a/runway/cfngin/hooks/keypair.py +++ b/runway/cfngin/hooks/keypair.py @@ -5,7 +5,7 @@ import logging import sys from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple +from typing import TYPE_CHECKING, Any from botocore.exceptions import ClientError from typing_extensions import Literal, TypedDict @@ -31,19 +31,19 @@ class EnsureKeypairExistsHookArgs(BaseModel): keypair: str """Name of the key pair to ensure exists.""" - public_key_path: Optional[str] = None + public_key_path: str | None = None """Path to a public key file to be imported instead of generating a new key. Incompatible with the SSM options, as the private key will not be available for storing. """ - ssm_key_id: Optional[str] = None + ssm_key_id: str | None = None """ID of a KMS key to encrypt the SSM parameter with. If omitted, the default key will be used. """ - ssm_parameter_name: Optional[str] = None + ssm_parameter_name: str | None = None """Path to an SSM store parameter to receive the generated private key instead of importing it or storing it locally. @@ -59,7 +59,7 @@ class KeyPairInfo(TypedDict, total=False): status: Literal["created", "exists", "imported"] -def get_existing_key_pair(ec2: EC2Client, keypair_name: str) -> Optional[KeyPairInfo]: +def get_existing_key_pair(ec2: EC2Client, keypair_name: str) -> KeyPairInfo | None: """Get existing keypair.""" resp = ec2.describe_key_pairs() keypair = next( @@ -100,7 +100,7 @@ def import_key_pair( return keypair -def read_public_key_file(path: Path) -> Optional[bytes]: +def read_public_key_file(path: Path) -> bytes | None: """Read public key file.""" try: data = path.read_bytes() @@ -117,7 +117,7 @@ def read_public_key_file(path: Path) -> Optional[bytes]: def create_key_pair_from_public_key_file( ec2: EC2Client, keypair_name: str, public_key_path: Path -) -> Optional[KeyPairInfo]: +) -> KeyPairInfo | None: """Create keypair from public key file.""" public_key_data = read_public_key_file(public_key_path) if not public_key_data: @@ -136,13 +136,13 @@ def create_key_pair_in_ssm( ssm: SSMClient, keypair_name: str, parameter_name: str, - kms_key_id: Optional[str] = None, -) -> Optional[KeyPairInfo]: + kms_key_id: str | None = None, +) -> KeyPairInfo | None: """Create keypair in SSM.""" keypair = create_key_pair(ec2, keypair_name) try: kms_key_label = "default" - kms_args: Dict[str, Any] = {} + kms_args: dict[str, Any] = {} if kms_key_id: kms_key_label = kms_key_id kms_args = {"KeyId": kms_key_id} @@ -191,9 +191,7 @@ def create_key_pair(ec2: EC2Client, keypair_name: str) -> KeyPairTypeDef: return keypair -def create_key_pair_local( - ec2: EC2Client, keypair_name: str, dest_dir: Path -) -> Optional[KeyPairInfo]: +def create_key_pair_local(ec2: EC2Client, keypair_name: str, dest_dir: Path) -> KeyPairInfo | None: """Create local keypair.""" dest_dir = dest_dir.resolve() if not dest_dir.is_dir(): @@ -219,7 +217,7 @@ def create_key_pair_local( def interactive_prompt( keypair_name: str, -) -> Tuple[Optional[Literal["create", "import"]], Optional[str]]: +) -> tuple[Literal["create", "import"] | None, str | None]: """Interactive prompt.""" if not sys.stdin.isatty(): return None, None @@ -246,21 +244,16 @@ def interactive_prompt( return None, None -def ensure_keypair_exists( - context: CfnginContext, *__args: Any, **kwargs: Any -) -> KeyPairInfo: +def ensure_keypair_exists(context: CfnginContext, *__args: Any, **kwargs: Any) -> KeyPairInfo: """Ensure a specific keypair exists within AWS. If the key doesn't exist, upload it. """ - args = EnsureKeypairExistsHookArgs.parse_obj(kwargs) + args = EnsureKeypairExistsHookArgs.model_validate(kwargs) if args.public_key_path and args.ssm_parameter_name: - LOGGER.error( - "public_key_path and ssm_parameter_name cannot be " - "specified at the same time" - ) + LOGGER.error("public_key_path and ssm_parameter_name cannot be specified at the same time") return {} session = context.get_session() @@ -282,9 +275,7 @@ def ensure_keypair_exists( else: action, path = interactive_prompt(args.keypair) if action == "import" and path: - keypair_info = create_key_pair_from_public_key_file( - ec2, args.keypair, Path(path) - ) + keypair_info = create_key_pair_from_public_key_file(ec2, args.keypair, Path(path)) elif action == "create" and path: keypair_info = create_key_pair_local(ec2, args.keypair, Path(path)) else: diff --git a/runway/cfngin/hooks/protocols.py b/runway/cfngin/hooks/protocols.py index 68368cc21..9e0ddbcdf 100644 --- a/runway/cfngin/hooks/protocols.py +++ b/runway/cfngin/hooks/protocols.py @@ -8,7 +8,7 @@ from __future__ import annotations from abc import abstractmethod -from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union, overload +from typing import TYPE_CHECKING, Any, ClassVar, TypeVar, overload from typing_extensions import Protocol, runtime_checkable @@ -30,14 +30,14 @@ class CfnginHookArgsProtocol(Protocol): @overload @abstractmethod - def get(self, __name: str) -> Optional[Any]: ... + def get(self, __name: str) -> Any | None: ... @overload @abstractmethod - def get(self, __name: str, __default: Union[Any, _T]) -> Union[Any, _T]: ... + def get(self, __name: str, __default: Any | _T) -> Any | _T: ... @abstractmethod - def get(self, __name: str, __default: Union[Any, _T] = None) -> Union[Any, _T]: + def get(self, __name: str, __default: Any | _T = None) -> Any | _T: """Safely get the value of an attribute. Args: @@ -48,19 +48,19 @@ def get(self, __name: str, __default: Union[Any, _T] = None) -> Union[Any, _T]: raise NotImplementedError @abstractmethod - def __contains__(self, __name: str) -> bool: + def __contains__(self, __name: str) -> bool: # noqa: D105 raise NotImplementedError @abstractmethod - def __getattribute__(self, __name: str) -> Any: + def __getattribute__(self, __name: str) -> Any: # noqa: D105 raise NotImplementedError @abstractmethod - def __getitem__(self, __name: str) -> Any: + def __getitem__(self, __name: str) -> Any: # noqa: D105 raise NotImplementedError @abstractmethod - def __setitem__(self, __name: str, _value: Any) -> None: + def __setitem__(self, __name: str, _value: Any) -> None: # noqa: D105 raise NotImplementedError @@ -75,20 +75,12 @@ class CfnginHookProtocol(Protocol): """ - args: CfnginHookArgsProtocol - """Arguments passed to the hook and parsed into an object.""" + ARGS_PARSER: ClassVar + """Class used to parse arguments passed to the hook.""" @abstractmethod - def __init__( # pylint: disable=super-init-not-called - self, context: CfnginContext, **_kwargs: Any - ) -> None: - """Structural __init__ method. - - This should not be called. Pylint will erroneously warn about - "super-init-not-called" if using this class as a subclass. This should - be disabled in-line until the bug reports for this issue is resolved. - - """ + def __init__(self, context: CfnginContext, **_kwargs: Any) -> None: + """Structural __init__ method.""" raise NotImplementedError @abstractmethod diff --git a/runway/cfngin/hooks/route53.py b/runway/cfngin/hooks/route53.py index f5885bfb2..288f2c491 100644 --- a/runway/cfngin/hooks/route53.py +++ b/runway/cfngin/hooks/route53.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any from ...utils import BaseModel from ..utils import create_route53_zone @@ -21,19 +21,18 @@ class CreateDomainHookArgs(BaseModel): """Domain name for the Route 53 hosted zone to be created.""" -def create_domain( - context: CfnginContext, *__args: Any, **kwargs: Any -) -> Dict[str, str]: +def create_domain(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, str]: """Create a domain within route53. Args: context: CFNgin context object. + **kwargs: Arbitrary keyword arguments. Returns: Dict containing ``domain`` and ``zone_id``. """ - args = CreateDomainHookArgs.parse_obj(kwargs) + args = CreateDomainHookArgs.model_validate(kwargs) client = context.get_session().client("route53") zone_id = create_route53_zone(client, args.domain) return {"domain": args.domain, "zone_id": zone_id} diff --git a/runway/cfngin/hooks/ssm/parameter.py b/runway/cfngin/hooks/ssm/parameter.py index e03589b00..067cf1eb8 100644 --- a/runway/cfngin/hooks/ssm/parameter.py +++ b/runway/cfngin/hooks/ssm/parameter.py @@ -1,13 +1,12 @@ """AWS SSM Parameter Store hooks.""" -# pylint: disable=no-self-argument from __future__ import annotations import json import logging -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Annotated, Any, ClassVar, cast -from pydantic import Extra, validator +from pydantic import ConfigDict, Field, field_validator from typing_extensions import Literal, TypedDict from ....compat import cached_property @@ -27,10 +26,11 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) + # PutParameterResultTypeDef but without metadata -_PutParameterResultTypeDef = TypedDict( - "_PutParameterResultTypeDef", {"Tier": ParameterTierType, "Version": int} -) +class _PutParameterResultTypeDef(TypedDict): + Tier: ParameterTierType + Version: int class ArgsDataModel(BaseModel): @@ -61,77 +61,103 @@ class ArgsDataModel(BaseModel): """ - allowed_pattern: Optional[str] = None - data_type: Optional[Literal["aws:ec2:image", "text"]] = None - description: Optional[str] = None + model_config = ConfigDict(extra="ignore", populate_by_name=True) + + allowed_pattern: Annotated[str | None, Field(alias="AllowedPattern")] = None + """A regular expression used to validate the parameter value.""" + + data_type: Annotated[ + Literal["aws:ec2:image", "text"] | None, + Field(alias="DataType"), + ] = None + """The data type for a String parameter. + + Supported data types include plain text and Amazon Machine Image IDs. + + """ + + description: Annotated[str | None, Field(alias="Description")] = None + """Information about the parameter.""" + force: bool = False - key_id: Optional[str] = None - name: str - overwrite: bool = True - policies: Optional[str] = None - tags: Optional[List[TagDataModel]] = None - tier: ParameterTierType = "Standard" - type: Literal["String", "StringList", "SecureString"] - value: Optional[str] = None - - class Config: - """Model configuration.""" - - allow_population_by_field_name = True - extra = Extra.ignore - fields = { - "allowed_pattern": {"alias": "AllowedPattern"}, - "data_type": {"alias": "DataType"}, - "description": {"alias": "Description"}, - "key_id": {"alias": "KeyId"}, - "name": {"alias": "Name"}, - "overwrite": {"alias": "Overwrite"}, - "policies": {"alias": "Policies"}, - "tags": {"alias": "Tags"}, - "tier": {"alias": "Tier"}, - "type": {"alias": "Type"}, - "value": {"alias": "Value"}, - } - - @validator("policies", allow_reuse=True, pre=True) - def _convert_policies(cls, v: Union[List[Dict[str, Any]], str, Any]) -> str: + """Skip checking the current value of the parameter, just put it. + + Can be used alongside ``overwrite`` to always update a parameter. + + """ + + key_id: Annotated[str | None, Field(alias="KeyId")] = None + """The KMS Key ID that you want to use to encrypt a parameter. + + Either the default AWS Key Management Service (AWS KMS) key automatically + assigned to your AWS account or a custom key. + Required for parameters that use the ``SecureString`` data type. + + """ + + name: Annotated[str, Field(alias="Name")] + """The fully qualified name of the parameter that you want to add to the system.""" + + overwrite: Annotated[bool, Field(alias="Overwrite")] = True + """Allow overwriting an existing parameter.""" + + policies: Annotated[str | None, Field(alias="Policies")] = None + """One or more policies to apply to a parameter. This field takes a JSON array.""" + + tags: Annotated[list[TagDataModel] | None, Field(alias="Tags")] = None + """Optional metadata that you assign to a resource.""" + + tier: Annotated[ParameterTierType, Field(alias="Tier")] = "Standard" + """The parameter tier to assign to a parameter.""" + + type: Annotated[Literal["String", "StringList", "SecureString"], Field(alias="Type")] + """The type of parameter.""" + + value: Annotated[str | None, Field(alias="Value")] = None + """The parameter value that you want to add to the system. + + Standard parameters have a value limit of 4 KB. + Advanced parameters have a value limit of 8 KB. + + """ + + @field_validator("policies", mode="before") + @classmethod + def _convert_policies(cls, v: list[dict[str, Any]] | str | Any) -> str: """Convert policies to acceptable value.""" if isinstance(v, str): return v if isinstance(v, list): return json.dumps(v, cls=JsonEncoder) - raise TypeError( - f"unexpected type {type(v)}; permitted: Optional[Union[List[Dict[str, Any]], str]]" - ) + raise ValueError(f"unexpected type {type(v)}; permitted: list[dict[str, Any]] | str | None") - @validator("tags", allow_reuse=True, pre=True) - def _convert_tags( - cls, v: Union[Dict[str, str], List[Dict[str, str]], Any] - ) -> List[Dict[str, str]]: + @field_validator("tags", mode="before") + @classmethod + def _convert_tags(cls, v: dict[str, str] | list[dict[str, str]] | Any) -> list[dict[str, str]]: """Convert tags to acceptable value.""" if isinstance(v, list): return v if isinstance(v, dict): return [{"Key": k, "Value": v} for k, v in v.items()] - raise TypeError( - f"unexpected type {type(v)}; permitted: " - "Optional[Union[Dict[str, str], List[Dict[str, str]]]" + raise ValueError( + f"unexpected type {type(v)}; permitted: dict[str, str] | list[dict[str, str]] | none" ) class _Parameter(CfnginHookProtocol): """AWS SSM Parameter Store Parameter.""" + ARGS_PARSER: ClassVar = ArgsDataModel + """Class used to parse arguments passed to the hook.""" + args: ArgsDataModel - def __init__( # pylint: disable=super-init-not-called + def __init__( self, context: CfnginContext, *, name: str, - type: Literal[ # pylint: disable=redefined-builtin - "String", "StringList", "SecureString" - ], + type: Literal["String", "StringList", "SecureString"], # noqa: A002 **kwargs: Any, ) -> None: """Instantiate class. @@ -141,9 +167,10 @@ def __init__( # pylint: disable=super-init-not-called name: The fully qualified name of the parameter that you want to add to the system. type: The type of parameter. + **kwargs: Arbitrary keyword arguments. """ - self.args = ArgsDataModel.parse_obj({"name": name, "type": type, **kwargs}) + self.args = ArgsDataModel.model_validate({"name": name, "type": type, **kwargs}) self.ctx = context @cached_property @@ -165,14 +192,14 @@ def get(self) -> ParameterTypeDef: if self.args.force: # bypass getting current value return {} try: - return self.client.get_parameter( - Name=self.args.name, WithDecryption=True - ).get("Parameter", {}) + return self.client.get_parameter(Name=self.args.name, WithDecryption=True).get( + "Parameter", {} + ) except self.client.exceptions.ParameterNotFound: LOGGER.verbose("parameter %s does not exist", self.args.name) return {} - def get_current_tags(self) -> List[TagTypeDef]: + def get_current_tags(self) -> list[TagTypeDef]: """Get Tags currently applied to Parameter.""" try: return self.client.list_tags_for_resource( @@ -216,7 +243,7 @@ def put(self) -> _PutParameterResultTypeDef: if current_param.get("Value") != self.args.value: try: result = self.client.put_parameter( - **self.args.dict( + **self.args.model_dump( by_alias=True, exclude_none=True, exclude={"force", "tags"} ) ) @@ -242,9 +269,7 @@ def update_tags(self) -> None: """Update tags.""" current_tags = self.get_current_tags() if self.args.tags and current_tags: - diff_tag_keys = list( - {i["Key"] for i in current_tags} ^ {i.key for i in self.args.tags} - ) + diff_tag_keys = list({i["Key"] for i in current_tags} ^ {i.key for i in self.args.tags}) elif self.args.tags: diff_tag_keys = [] else: @@ -258,14 +283,11 @@ def update_tags(self) -> None: ResourceType="Parameter", TagKeys=diff_tag_keys, ) - LOGGER.debug( - "removed tags for parameter %s: %s", self.args.name, diff_tag_keys - ) + LOGGER.debug("removed tags for parameter %s: %s", self.args.name, diff_tag_keys) if self.args.tags: tags_to_add = [ - cast("TagTypeDef", tag.dict(by_alias=True)) - for tag in self.args.tags + cast("TagTypeDef", tag.model_dump(by_alias=True)) for tag in self.args.tags ] self.client.add_tags_to_resource( ResourceId=self.args.name, @@ -278,9 +300,7 @@ def update_tags(self) -> None: [tag["Key"] for tag in tags_to_add], ) except self.client.exceptions.InvalidResourceId: - LOGGER.info( - "skipped updating tags; parameter %s does not exist", self.args.name - ) + LOGGER.info("skipped updating tags; parameter %s does not exist", self.args.name) else: LOGGER.info("updated tags for parameter %s", self.args.name) @@ -301,6 +321,7 @@ def __init__( context: CFNgin context object. name: The fully qualified name of the parameter that you want to add to the system. + **kwargs: Arbitrary keyword arguments. """ for k in ["Type", "type"]: # ensure neither of these are set diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/callback_url_retriever.py b/runway/cfngin/hooks/staticsite/auth_at_edge/callback_url_retriever.py index 9c944e49d..d815e3257 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/callback_url_retriever.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/callback_url_retriever.py @@ -8,7 +8,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any from ...base import HookArgsBaseModel @@ -24,11 +24,11 @@ class HookArgs(HookArgsBaseModel): stack_name: str """The name of the stack to check against.""" - user_pool_arn: Optional[str] = None + user_pool_arn: str | None = None """The ARN of the User Pool to check for a client.""" -def get(context: CfnginContext, *__args: Any, **kwargs: Any) -> Dict[str, Any]: +def get(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, Any]: """Retrieve the callback URLs for User Pool Client Creation. When the User Pool is created a Callback URL is required. During a post @@ -42,9 +42,10 @@ def get(context: CfnginContext, *__args: Any, **kwargs: Any) -> Dict[str, Any]: Args: context: The context instance. + **kwargs: Arbitrary keyword arguments. """ - args = HookArgs.parse_obj(kwargs) + args = HookArgs.model_validate(kwargs) session = context.get_session() cloudformation_client = session.client("cloudformation") cognito_client = session.client("cognito-idp") @@ -54,31 +55,32 @@ def get(context: CfnginContext, *__args: Any, **kwargs: Any) -> Dict[str, Any]: # Return the current stack if one exists stack_desc = cloudformation_client.describe_stacks(StackName=args.stack_name) # Get the client_id from the outputs - outputs = stack_desc["Stacks"][0]["Outputs"] + outputs = stack_desc["Stacks"][0].get("Outputs", []) if args.user_pool_arn: user_pool_id = args.user_pool_arn.split("/")[-1:][0] else: - user_pool_id = [ + user_pool_id = next( o["OutputValue"] for o in outputs - if o["OutputKey"] == "AuthAtEdgeUserPoolId" - ][0] - - client_id = [ - o["OutputValue"] for o in outputs if o["OutputKey"] == "AuthAtEdgeClient" - ][0] + if ("OutputKey" in o and "OutputValue" in o) + and o["OutputKey"] == "AuthAtEdgeUserPoolId" + ) + + client_id = next( + o["OutputValue"] + for o in outputs + if ("OutputKey" in o and "OutputValue" in o) and o["OutputKey"] == "AuthAtEdgeClient" + ) # Poll the user pool client information - resp = cognito_client.describe_user_pool_client( - UserPoolId=user_pool_id, ClientId=client_id - ) + resp = cognito_client.describe_user_pool_client(UserPoolId=user_pool_id, ClientId=client_id) # Retrieve the callbacks - callbacks = resp["UserPoolClient"]["CallbackURLs"] + callbacks = resp["UserPoolClient"].get("CallbackURLs") if callbacks: context_dict["callback_urls"] = callbacks return context_dict - except Exception: # pylint: disable=broad-except + except Exception: # noqa: BLE001 return context_dict diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/client_updater.py b/runway/cfngin/hooks/staticsite/auth_at_edge/client_updater.py index d62c6082d..c4fce55a9 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/client_updater.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/client_updater.py @@ -8,7 +8,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, List +from typing import TYPE_CHECKING, Any from ...base import HookArgsBaseModel @@ -21,7 +21,7 @@ class HookArgs(HookArgsBaseModel): """Hook arguments.""" - alternate_domains: List[str] + alternate_domains: list[str] """A list of any alternate domains that need to be listed with the primary distribution domain. @@ -33,7 +33,7 @@ class HookArgs(HookArgsBaseModel): distribution_domain: str """Distribution domain.""" - oauth_scopes: List[str] + oauth_scopes: list[str] """A list of all available validation scopes for oauth.""" redirect_path_sign_in: str @@ -42,13 +42,13 @@ class HookArgs(HookArgsBaseModel): redirect_path_sign_out: str """The redirect path after sign out.""" - supported_identity_providers: List[str] = [] + supported_identity_providers: list[str] = [] """Supported identity providers.""" def get_redirect_uris( - domains: List[str], redirect_path_sign_in: str, redirect_path_sign_out: str -) -> Dict[str, List[str]]: + domains: list[str], redirect_path_sign_in: str, redirect_path_sign_out: str +) -> dict[str, list[str]]: """Create dict of redirect URIs for AppClient.""" return { "sign_in": [f"{domain}{redirect_path_sign_in}" for domain in domains], @@ -56,7 +56,7 @@ def get_redirect_uris( } -def update(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool: +def update(context: CfnginContext, *_args: Any, **kwargs: Any) -> bool: """Update the callback urls for the User Pool Client. Required to match the redirect_uri being sent which contains @@ -67,14 +67,15 @@ def update(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool: Args: context: The context instance. + **kwargs: Arbitrary keyword arguments. """ - args = HookArgs.parse_obj(kwargs) + args = HookArgs.model_validate(kwargs) session = context.get_session() cognito_client = session.client("cognito-idp") # Combine alternate domains with main distribution - redirect_domains = args.alternate_domains + ["https://" + args.distribution_domain] + redirect_domains = [*args.alternate_domains, "https://" + args.distribution_domain] # Create a list of all domains with their redirect paths redirect_uris = get_redirect_uris( @@ -93,6 +94,6 @@ def update(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool: UserPoolId=context.hook_data["aae_user_pool_id_retriever"]["id"], ) return True - except Exception: # pylint: disable=broad-except + except Exception: LOGGER.exception("unable to update user pool client callback urls") return False diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/domain_updater.py b/runway/cfngin/hooks/staticsite/auth_at_edge/domain_updater.py index 8a2e8d10a..b82c9926d 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/domain_updater.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/domain_updater.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, Union +from typing import TYPE_CHECKING, Any from ...base import HookArgsBaseModel @@ -20,9 +20,7 @@ class HookArgs(HookArgsBaseModel): """The ID of the Cognito User Pool Client.""" -def update( - context: CfnginContext, *__args: Any, **kwargs: Any -) -> Union[Dict[str, Any], bool]: +def update(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, Any] | bool: """Retrieve/Update the domain name of the specified client. A domain name is required in order to make authorization and token @@ -34,18 +32,17 @@ def update( Args: context: The context instance. + **kwargs: Arbitrary keyword arguments. """ - args = HookArgs.parse_obj(kwargs) + args = HookArgs.model_validate(kwargs) session = context.get_session() cognito_client = session.client("cognito-idp") - context_dict: Dict[str, Any] = {} + context_dict: dict[str, Any] = {} user_pool_id = context.hook_data["aae_user_pool_id_retriever"]["id"] - user_pool = cognito_client.describe_user_pool(UserPoolId=user_pool_id).get( - "UserPool", {} - ) + user_pool = cognito_client.describe_user_pool(UserPoolId=user_pool_id).get("UserPool", {}) (user_pool_region, user_pool_hash) = user_pool_id.split("_") domain_prefix = user_pool.get("CustomDomain", user_pool.get("Domain")) @@ -58,19 +55,15 @@ def update( try: domain_prefix = (f"{user_pool_hash}-{args.client_id}").lower() - cognito_client.create_user_pool_domain( - Domain=domain_prefix, UserPoolId=user_pool_id - ) + cognito_client.create_user_pool_domain(Domain=domain_prefix, UserPoolId=user_pool_id) context_dict["domain"] = get_user_pool_domain(domain_prefix, user_pool_region) return context_dict - except Exception: # pylint: disable=broad-except + except Exception: LOGGER.exception("could not update user pool domain: %s", user_pool_id) return False -def delete( - context: CfnginContext, *__args: Any, **kwargs: Any -) -> Union[Dict[str, Any], bool]: +def delete(context: CfnginContext, *_args: Any, **kwargs: Any) -> dict[str, Any] | bool: """Delete the domain if the user pool was created by Runway. If a User Pool was created by Runway, and populated with a domain, that @@ -83,9 +76,10 @@ def delete( Args: context: The context instance. + **kwargs: Arbitrary keyword arguments. """ - args = HookArgs.parse_obj(kwargs) + args = HookArgs.model_validate(kwargs) session = context.get_session() cognito_client = session.client("cognito-idp") @@ -94,14 +88,12 @@ def delete( domain_prefix = (f"{user_pool_hash}-{args.client_id}").lower() try: - cognito_client.delete_user_pool_domain( - UserPoolId=user_pool_id, Domain=domain_prefix - ) + cognito_client.delete_user_pool_domain(UserPoolId=user_pool_id, Domain=domain_prefix) return True except cognito_client.exceptions.InvalidParameterException: LOGGER.info('skipped deletion; no domain with prefix "%s"', domain_prefix) return True - except Exception: # pylint: disable=broad-except + except Exception: LOGGER.exception("could not delete user pool domain") return False diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/lambda_config.py b/runway/cfngin/hooks/staticsite/auth_at_edge/lambda_config.py index 5e403172a..8831681ac 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/lambda_config.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/lambda_config.py @@ -9,7 +9,7 @@ import shutil import tempfile from tempfile import mkstemp -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any from ... import aws_lambda from ...base import HookArgsBaseModel @@ -34,16 +34,16 @@ class HookArgs(HookArgsBaseModel): client_id: str """The ID of the Cognito User Pool Client.""" - cookie_settings: Dict[str, Any] + cookie_settings: dict[str, Any] """The settings for our customized cookies.""" - http_headers: Dict[str, Any] + http_headers: dict[str, Any] """The additional headers added to our requests.""" nonce_signing_secret_param_name: str """SSM param name to store nonce signing secret.""" - oauth_scopes: List[str] + oauth_scopes: list[str] """The validation scopes for our OAuth requests.""" redirect_path_refresh: str @@ -58,14 +58,13 @@ class HookArgs(HookArgsBaseModel): """ - required_group: Optional[str] = None + required_group: str | None = None """Optional User Pool group to which access should be restricted.""" -# pylint: disable=too-many-locals def write( context: CfnginContext, provider: Provider, *__args: Any, **kwargs: Any -) -> Dict[str, Any]: +) -> dict[str, Any]: """Writes/Uploads the configured lambdas for Auth@Edge. Lambda@Edge does not have the ability to allow Environment variables @@ -78,7 +77,7 @@ def write( """ cognito_domain = context.hook_data["aae_domain_updater"].get("domain") - args = HookArgs.parse_obj(kwargs) + args = HookArgs.model_validate(kwargs) config = { "client_id": args.client_id, "cognito_auth_domain": cognito_domain, @@ -96,10 +95,10 @@ def write( } # Shared file that contains the method called for configuration data - path = os.path.join(os.path.dirname(__file__), "templates", "shared.py") - context_dict: Dict[str, Any] = {} + path = os.path.join(os.path.dirname(__file__), "templates", "shared.py") # noqa: PTH120, PTH118 + context_dict: dict[str, Any] = {} - with open(path, encoding="utf-8") as file_: + with open(path, encoding="utf-8") as file_: # noqa: PTH123 # Dynamically replace our configuration values # in the shared.py template file with actual # calculated values @@ -114,7 +113,7 @@ def write( filedir, temppath = mkstemp() # Save the file to a temp path - with open(temppath, "w", encoding="utf-8") as tmp: + with open(temppath, "w", encoding="utf-8") as tmp: # noqa: PTH123 tmp.write(shared) config = temppath os.close(filedir) @@ -127,23 +126,27 @@ def write( # Copy the template code for the specific Lambda function # to the temporary folder shutil.copytree( - os.path.join(os.path.dirname(__file__), "templates", handler), + os.path.join( # noqa: PTH118 + os.path.dirname(__file__), "templates", handler # noqa: PTH120 + ), dirpath, dirs_exist_ok=True, ) # Save our dynamic configuration shared file to the # temporary folder - with open(config, encoding="utf-8") as shared: + with open(config, encoding="utf-8") as shared: # noqa: PTH123 raw = shared.read() filename = "shared.py" - with open(os.path.join(dirpath, filename), "wb") as newfile: + with open(os.path.join(dirpath, filename), "wb") as newfile: # noqa: PTH118, PTH123 newfile.write(raw.encode()) # Copy the shared jose-dependent util module to the temporary folder shutil.copyfile( - os.path.join(os.path.dirname(__file__), "templates", "shared_jose.py"), - os.path.join(dirpath, "shared_jose.py"), + os.path.join( # noqa: PTH118 + os.path.dirname(__file__), "templates", "shared_jose.py" # noqa: PTH120 + ), + os.path.join(dirpath, "shared_jose.py"), # noqa: PTH118 ) # Upload our temporary folder to our S3 bucket for @@ -174,7 +177,7 @@ def get_nonce_signing_secret(param_name: str, context: CfnginContext) -> str: ssm_client = session.client("ssm") try: response = ssm_client.get_parameter(Name=param_name, WithDecryption=True) - return response["Parameter"]["Value"] + return response["Parameter"].get("Value", "") except ssm_client.exceptions.ParameterNotFound: secret = random_key(16) ssm_client.put_parameter( @@ -193,7 +196,5 @@ def random_key(length: int = 16) -> str: length: The length of the random key. """ - secret_allowed_chars = ( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" - ) + secret_allowed_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" return "".join(secrets.choice(secret_allowed_chars) for _ in range(length)) diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/check_auth/__init__.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/check_auth/__init__.py index a2929b225..c96288459 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/check_auth/__init__.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/check_auth/__init__.py @@ -10,7 +10,6 @@ """ -# pylint: disable=consider-using-f-string import base64 import datetime import hashlib @@ -20,9 +19,9 @@ import secrets from urllib.parse import quote_plus, urlencode -from shared_jose import validate_jwt # noqa pylint: disable=import-error +from shared_jose import validate_jwt -from shared import ( # noqa pylint: disable=import-error +from shared import ( decode_token, extract_and_parse_cookies, get_config, @@ -32,9 +31,7 @@ LOGGER = logging.getLogger(__file__) -SECRET_ALLOWED_CHARS = ( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" -) +SECRET_ALLOWED_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" NONCE_LENGTH = 16 PKCE_LENGTH = 43 CONFIG = get_config() @@ -44,7 +41,7 @@ def handler(event, _context): """Handle the request passed in. Args: - event (Dict[str, Any]): The Lambda Event. + event (dict[str, Any]): The Lambda Event. _context (Any): Lambda context object. """ @@ -90,9 +87,7 @@ def handler(event, _context): % ( domain_name, CONFIG.get("redirect_path_auth_refresh"), - urlencode( - {"requestedUri": requested_uri, "nonce": nonce} - ), + urlencode({"requestedUri": requested_uri, "nonce": nonce}), ), } ], @@ -129,7 +124,7 @@ def handler(event, _context): ) return request - except Exception: # noqa pylint: disable=broad-except + except Exception: # We need new authorization. Get the user over to Cognito nonce = generate_nonce() state = { @@ -139,8 +134,7 @@ def handler(event, _context): } login_query_string = urlencode( { - "redirect_uri": "https://%s%s" - % (domain_name, CONFIG["redirect_path_sign_in"]), + "redirect_uri": "https://%s%s" % (domain_name, CONFIG["redirect_path_sign_in"]), "response_type": "code", "client_id": CONFIG["client_id"], "state": base64.urlsafe_b64encode( diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/http_headers/__init__.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/http_headers/__init__.py index 9e60c44e9..c2b93a849 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/http_headers/__init__.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/http_headers/__init__.py @@ -1,6 +1,6 @@ """Add all configured (CloudFront compatible) headers to origin response.""" -from shared import as_cloud_front_headers, get_config # pylint: disable=import-error +from shared import as_cloud_front_headers, get_config CONFIG = get_config() diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/parse_auth/__init__.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/parse_auth/__init__.py index 6d27b1683..7e8cef76d 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/parse_auth/__init__.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/parse_auth/__init__.py @@ -6,7 +6,6 @@ add to the cookie headers. """ -# pylint: disable=consider-using-f-string import base64 import hmac import json @@ -14,12 +13,12 @@ from datetime import datetime from urllib.parse import parse_qs -from shared_jose import ( # noqa pylint: disable=import-error +from shared_jose import ( MissingRequiredGroupError, validate_and_check_id_token, ) -from shared import ( # noqa pylint: disable=import-error +from shared import ( create_error_html, extract_and_parse_cookies, generate_cookie_headers, @@ -44,7 +43,7 @@ def validate_querystring_and_cookies(request, cookies): Args: request (Any): Cloudfront request. - cookies (Dict[str, Any]): Cookies. + cookies (dict[str, Any]): Cookies. """ qsp = parse_qs(request.get("querystring")) @@ -107,8 +106,7 @@ def validate_querystring_and_cookies(request, cookies): calculated_hmac = sign(current_nonce, CONFIG["nonce_signing_secret"]) if not hmac.compare_digest(calculated_hmac, nonce_hmac): raise RequiresConfirmationError( - "Nonce signature mismatch; expected %s but got %s" - % (calculated_hmac, nonce_hmac) + "Nonce signature mismatch; expected %s but got %s" % (calculated_hmac, nonce_hmac) ) return [code, pkce, requested_uri] @@ -140,8 +138,7 @@ def handler(event, _context): body = { "grant_type": "authorization_code", "client_id": CONFIG["client_id"], - "redirect_uri": "https://%s%s" - % (domain_name, CONFIG.get("redirect_path_sign_in")), + "redirect_uri": "https://%s%s" % (domain_name, CONFIG.get("redirect_path_sign_in")), "code": code[0], "code_verifier": pkce, } @@ -183,7 +180,7 @@ def handler(event, _context): }, } return response - except Exception as err: # pylint: disable=broad-except + except Exception as err: if id_token: # ID token found; checking if it is valid try: @@ -203,7 +200,7 @@ def handler(event, _context): **CONFIG.get("cloud_front_headers", {}), }, } - except Exception as err2: # pylint: disable=broad-except + except Exception as err2: LOGGER.debug("Id token not valid") LOGGER.debug(err2) @@ -235,8 +232,6 @@ def handler(event, _context): "status": "200", "headers": { **CONFIG.get("cloud_front_headers", {}), - "content-type": [ - {"key": "Content-Type", "value": "text/html; charset=UTF-8"} - ], + "content-type": [{"key": "Content-Type", "value": "text/html; charset=UTF-8"}], }, } diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/refresh_auth/__init__.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/refresh_auth/__init__.py index 41c9efd88..79ddf7631 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/refresh_auth/__init__.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/refresh_auth/__init__.py @@ -4,7 +4,7 @@ import traceback from urllib.parse import parse_qs -from shared import ( # noqa pylint: disable=import-error +from shared import ( create_error_html, extract_and_parse_cookies, generate_cookie_headers, @@ -62,7 +62,7 @@ def handler(event, _context): tokens["id_token"] = res.get("id_token") tokens["access_token"] = res.get("access_token") cookie_headers_event_type = "new_tokens" - except Exception as err: # pylint: disable=broad-except + except Exception as err: LOGGER.debug(err) cookie_headers_event_type = "refresh_failed" @@ -88,7 +88,7 @@ def handler(event, _context): # Send a basic html error response and inform the user # why refresh was unsuccessful - except Exception as err: # pylint: disable=broad-except + except Exception as err: LOGGER.info(err) LOGGER.info(traceback.print_exc()) @@ -101,9 +101,7 @@ def handler(event, _context): ), "status": "400", "headers": { - "content-type": [ - {"key": "Content-Type", "value": "text/html; charset=UTF-8"} - ], + "content-type": [{"key": "Content-Type", "value": "text/html; charset=UTF-8"}], **CONFIG.get("cloud_front_headers", {}), }, } @@ -117,7 +115,7 @@ def validate_refresh_request(current_nonce, original_nonce, tokens): Args: current_nonce (str): The current nonce code. original_nonce (str): The original nonce code. - tokens (Dict[str, str]): A dictionary of all the token_types + tokens (dict[str, str]): A dictionary of all the token_types and their corresponding token values (id, auth, refresh). """ diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared.py index 69cb6c86a..c0284273e 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared.py @@ -1,6 +1,5 @@ """Shared functionality for the Auth@Edge Lambda suite.""" -# pylint: disable=consider-using-f-string, inconsistent-return-statements import base64 import hmac import json @@ -57,7 +56,7 @@ def as_cloud_front_headers(headers): """Convert a series of headers to CloudFront compliant ones. Args: - headers (Dict[str, str]): The request/response headers in + headers (dict[str, str]): The request/response headers in dictionary format. """ @@ -71,7 +70,7 @@ def extract_and_parse_cookies(headers, client_id, cookie_compatibility="amplify" """Extract and parse the Cognito cookies from the headers. Args: - headers (Dict[str, str]): The request/response headers in + headers (dict[str, str]): The request/response headers in dictionary format. client_id (str): The Cognito UserPool Client ID. cookie_compatibility (str): "amplify" or "elasticsearch". @@ -88,18 +87,12 @@ def extract_and_parse_cookies(headers, client_id, cookie_compatibility="amplify" return { "token_user_name": ( - cookies.get(cookie_names["last_user_key"]) - if "last_user_key" in cookie_names - else None + cookies.get(cookie_names["last_user_key"]) if "last_user_key" in cookie_names else None ), "id_token": cookies.get(cookie_names["id_token_key"]), "access_token": cookies.get(cookie_names["access_token_key"]), "refresh_token": cookies.get(cookie_names["refresh_token_key"]), - "scopes": ( - cookies.get(cookie_names["scope_key"]) - if "scope_key" in cookie_names - else None - ), + "scopes": (cookies.get(cookie_names["scope_key"]) if "scope_key" in cookie_names else None), "nonce": cookies.get("spa-auth-edge-nonce"), "nonce_hmac": cookies.get("spa-auth-edge-nonce-hmac"), "pkce": cookies.get("spa-auth-edge-pkce"), @@ -110,7 +103,7 @@ def extract_cookies_from_headers(headers): """Extract all cookies from the response headers. Args: - headers (Dict[str, Dict[str, str]]): The request/response headers in + headers (dict[str, dict[str, str]]): The request/response headers in dictionary format. """ @@ -204,11 +197,11 @@ def generate_cookie_headers( event (str): "new_tokens" | "sign_out" | "refresh_failed". client_id (str): The Cognito UserPool Client ID. oauth_scopes (List): The scopes for oauth validation. - tokens (Dict[str, str]): The tokens received from + tokens (dict[str, str]): The tokens received from the Cognito Request (id, access, refresh). domain_name (str): The Domain name the cookies are to be associated with. - cookie_settings (Dict[str, str]): The various settings + cookie_settings (dict[str, str]): The various settings that we would like for the various tokens. cookie_compatibility (str): "amplify" | "elasticsearch". @@ -249,9 +242,7 @@ def generate_cookie_headers( cookie_names = get_elasticsearch_cookie_names() cookies = { cookie_names["cognito_enabled_key"]: "True; " - + str( - with_cookie_domain(domain_name, cookie_settings.get("cognitoEnabled")) - ), + + str(with_cookie_domain(domain_name, cookie_settings.get("cognitoEnabled"))), } cookies[cookie_names["id_token_key"]] = f"{tokens.get('id_token')}; " + str( with_cookie_domain(domain_name, cookie_settings.get("idToken")), @@ -259,9 +250,8 @@ def generate_cookie_headers( cookies[cookie_names["access_token_key"]] = f"{tokens.get('access_token')}; " + str( with_cookie_domain(domain_name, cookie_settings.get("accessToken")), ) - cookies[cookie_names["refresh_token_key"]] = ( - f"{tokens.get('refresh_token')}; " - + str(with_cookie_domain(domain_name, cookie_settings.get("refreshToken"))) + cookies[cookie_names["refresh_token_key"]] = f"{tokens.get('refresh_token')}; " + str( + with_cookie_domain(domain_name, cookie_settings.get("refreshToken")) ) cookies_iter = cookies # type: ignore if event == "sign_out": @@ -278,9 +268,7 @@ def generate_cookie_headers( cookies[i] = expire_cookie(cookies[i]) # Return cookies in the form of CF headers - return [ - {"key": "set-cookie", "value": f"{key}={val}"} for key, val in cookies.items() - ] + return [{"key": "set-cookie", "value": f"{key}={val}"} for key, val in cookies.items()] def expire_cookie_filter(cookie): @@ -319,9 +307,9 @@ def http_post_with_retry(url, data, headers): Args: url (str): The URL to make the POST request to. - data (Dict[str, str]): The dictionary of data elements to + data (dict[str, str]): The dictionary of data elements to send with the request (urlencoded internally). - headers (Dict[str, str]): Any headers to send with + headers (dict[str, str]): Any headers to send with the POST request. """ @@ -335,7 +323,6 @@ def http_post_with_retry(url, data, headers): read = res.decode("utf-8") json_data = json.loads(read) return json_data - # pylint: disable=broad-except except Exception as err: LOGGER.error("HTTP POST to %s failed (attempt %s)", url, attempts) LOGGER.error(err) diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared_jose.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared_jose.py index 78cb9ae1d..0c0cffb41 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared_jose.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/shared_jose.py @@ -7,7 +7,7 @@ import re from urllib import request -from jose import jwt # noqa pylint: disable=import-error +from jose import jwt LOGGER = logging.getLogger(__name__) @@ -92,7 +92,7 @@ def __init__(self, options=None): """Initialize. Args: - options (Optional[Dict[str, str]]): Options for the client. + options: Options for the client. """ self.options = options @@ -102,17 +102,14 @@ def get_keys(self): LOGGER.info("Fetching keys from %s", self.options.get("jwks_uri")) try: - # pylint: disable=consider-using-with request_res = request.urlopen(self.options.get("jwks_uri")) data = json.loads( - request_res.read().decode( - request_res.info().get_param("charset") or "utf-8" - ) + request_res.read().decode(request_res.info().get_param("charset") or "utf-8") ) keys = data["keys"] LOGGER.info("Keys: %s", keys) return keys - except Exception as err: # pylint: disable=broad-except + except Exception as err: LOGGER.info("Failure: ConnectionError") LOGGER.info(err) return {} @@ -167,7 +164,6 @@ def create_jwk(key): else: try: jwk["rsaPublicKey"] = rsa_public_key_to_pem(key.get("n"), key.get("e")) - # pylint: disable=broad-except except Exception as err: LOGGER.error(err) jwk["rsaPublicKey"] = None @@ -178,7 +174,7 @@ def is_signing_key(key): """Filter to determine if this is a signing key. Args: - key (Dict[str, str]): The key. + key (dict[str, str]): The key. """ if key.get("kty", "") != "RSA": @@ -240,9 +236,7 @@ def validate_jwt(jwt_token, jwks_uri, issuer, audience): ) -def validate_and_check_id_token( - id_token, jwks_uri, issuer, audience, required_group=None -): +def validate_and_check_id_token(id_token, jwks_uri, issuer, audience, required_group=None): """Validate JWT and (optionally) check group membership.""" id_token_payload = validate_jwt(id_token, jwks_uri, issuer, audience) if required_group: diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/sign_out/__init__.py b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/sign_out/__init__.py index ddb22583b..02e45c4b9 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/templates/sign_out/__init__.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/templates/sign_out/__init__.py @@ -1,10 +1,9 @@ """Sign user out of Cognito and remove all Cookie Headers.""" -# pylint: disable=consider-using-f-string import logging from urllib.parse import urlencode -from shared import ( # noqa pylint: disable=import-error +from shared import ( create_error_html, extract_and_parse_cookies, generate_cookie_headers, @@ -31,9 +30,7 @@ def handler(event, _context): ), "status": "200", "headers": { - "content-type": [ - {"key": "Content-Type", "value": "text/html; charset=UTF-8"} - ], + "content-type": [{"key": "Content-Type", "value": "text/html; charset=UTF-8"}], **CONFIG.get("cloud_front_headers", {}), }, } diff --git a/runway/cfngin/hooks/staticsite/auth_at_edge/user_pool_id_retriever.py b/runway/cfngin/hooks/staticsite/auth_at_edge/user_pool_id_retriever.py index f44a4489c..1ed016824 100644 --- a/runway/cfngin/hooks/staticsite/auth_at_edge/user_pool_id_retriever.py +++ b/runway/cfngin/hooks/staticsite/auth_at_edge/user_pool_id_retriever.py @@ -1,7 +1,9 @@ """Retrieve the ID of the Cognito User Pool.""" +from __future__ import annotations + import logging -from typing import Any, Dict, Optional +from typing import Any from ...base import HookArgsBaseModel @@ -11,14 +13,14 @@ class HookArgs(HookArgsBaseModel): """Hook arguments.""" - created_user_pool_id: Optional[str] = None + created_user_pool_id: str | None = None """The ID of the created Cognito User Pool.""" - user_pool_arn: Optional[str] = None + user_pool_arn: str | None = None """The ARN of the supplied User pool.""" -def get(*__args: Any, **kwargs: Any) -> Dict[str, Any]: +def get(*__args: Any, **kwargs: Any) -> dict[str, Any]: """Retrieve the ID of the Cognito User Pool. The User Pool can either be supplied via an ARN or by being generated. @@ -29,7 +31,7 @@ def get(*__args: Any, **kwargs: Any) -> Dict[str, Any]: :class:`~runway.cfngin.hooks.staticsite.auth_at_edge.user_pool_id_retriever.HookArgs`. """ - args = HookArgs.parse_obj(kwargs) + args = HookArgs.model_validate(kwargs) # Favor a specific arn over a created one if args.user_pool_arn: diff --git a/runway/cfngin/hooks/staticsite/build_staticsite.py b/runway/cfngin/hooks/staticsite/build_staticsite.py index 22311bf30..cf6311517 100644 --- a/runway/cfngin/hooks/staticsite/build_staticsite.py +++ b/runway/cfngin/hooks/staticsite/build_staticsite.py @@ -7,13 +7,13 @@ import tempfile import zipfile from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any import boto3 -from boto3.s3.transfer import S3Transfer # type: ignore +from boto3.s3.transfer import S3Transfer from typing_extensions import TypedDict -from ....module.staticsite.options.models import RunwayStaticSiteSourceHashingDataModel +from ....module.staticsite.options import RunwayStaticSiteSourceHashingDataModel from ....s3_utils import does_s3_object_exist, download_and_extract_to_mkdtemp from ....utils import change_dir, run_commands from ...lookups.handlers.rxref import RxrefLookup @@ -30,10 +30,10 @@ class HookArgsOptions(HookArgsBaseModel): """Hook arguments ``options`` block.""" - build_output: Optional[str] = None + build_output: str | None = None """Path were the build static site will be stored locally before upload.""" - build_steps: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]] = [] + build_steps: list[str | list[str] | dict[str, str | list[str]]] = [] """Steps to execute to build the static site.""" name: str = "undefined" @@ -45,7 +45,7 @@ class HookArgsOptions(HookArgsBaseModel): path: str """Working directory/path to the static site's source code.""" - pre_build_steps: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]] = [] + pre_build_steps: list[str | list[str] | dict[str, str | list[str]]] = [] """Steps to run before building the static site.""" source_hashing: RunwayStaticSiteSourceHashingDataModel = ( @@ -65,44 +65,43 @@ class HookArgs(HookArgsBaseModel): def zip_and_upload( - app_dir: str, bucket: str, key: str, session: Optional[boto3.Session] = None + app_dir: str, bucket: str, key: str, session: boto3.Session | None = None ) -> None: """Zip built static site and upload to S3.""" s3_client = session.client("s3") if session else boto3.client("s3") - transfer = S3Transfer(s3_client) # type: ignore + transfer = S3Transfer(s3_client) filedes, temp_file = tempfile.mkstemp() os.close(filedes) LOGGER.info("archiving %s to s3://%s/%s", app_dir, bucket, key) - with zipfile.ZipFile(temp_file, "w", zipfile.ZIP_DEFLATED) as filehandle: - with change_dir(app_dir): - for dirname, _subdirs, files in os.walk("./"): - if dirname != "./": - filehandle.write(dirname) - for filename in files: - filehandle.write(os.path.join(dirname, filename)) + with zipfile.ZipFile(temp_file, "w", zipfile.ZIP_DEFLATED) as filehandle, change_dir(app_dir): + for dirname, _subdirs, files in os.walk("./"): + if dirname != "./": + filehandle.write(dirname) + for filename in files: + filehandle.write(os.path.join(dirname, filename)) # noqa: PTH118 transfer.upload_file(temp_file, bucket, key) - os.remove(temp_file) + os.remove(temp_file) # noqa: PTH107 class OptionsArgTypeDef(TypedDict, total=False): """Options argument type definition.""" build_output: str - build_steps: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]] + build_steps: list[str | list[str] | dict[str, str | list[str]]] name: str namespace: str path: str - pre_build_steps: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]] + pre_build_steps: list[str | list[str] | dict[str, str | list[str]]] def build( context: CfnginContext, provider: Provider, *, - options: Optional[OptionsArgTypeDef] = None, + options: OptionsArgTypeDef | None = None, **kwargs: Any, -) -> Dict[str, Any]: +) -> dict[str, Any]: """Build static site. Arguments parsed by :class:`~runway.cfngin.hooks.staticsite.build_staticsite.HookArgs`. @@ -111,15 +110,15 @@ def build( options = options or {} options.setdefault("namespace", context.namespace) options.setdefault("path", str(context.config_path)) - args = HookArgs.parse_obj({"options": options, **kwargs}) + args = HookArgs.model_validate({"options": options, **kwargs}) session = context.get_session() - context_dict: Dict[str, Any] = { + context_dict: dict[str, Any] = { "artifact_key_prefix": f"{args.options.namespace}-{args.options.name}-" } if args.options.build_output: - build_output = os.path.join(args.options.path, args.options.build_output) + build_output = os.path.join(args.options.path, args.options.build_output) # noqa: PTH118 else: build_output = args.options.path @@ -132,17 +131,14 @@ def build( context_dict["hash"] = get_hash_of_files( root_path=Path(args.options.path), - directories=options.get("source_hashing", {"directories": None}).get( - "directories" - ), + directories=options.get("source_hashing", {"directories": None}).get("directories"), ) LOGGER.debug("application hash: %s", context_dict["hash"]) # Now determine if the current staticsite has already been deployed if args.options.source_hashing.enabled: context_dict["hash_tracking_parameter"] = ( - args.options.source_hashing.parameter - or f"{context_dict['artifact_key_prefix']}hash" + args.options.source_hashing.parameter or f"{context_dict['artifact_key_prefix']}hash" ) ssm_client = session.client("ssm") @@ -150,7 +146,7 @@ def build( try: old_parameter_value = ssm_client.get_parameter( Name=context_dict["hash_tracking_parameter"] - )["Parameter"]["Value"] + )["Parameter"].get("Value") except ssm_client.exceptions.ParameterNotFound: old_parameter_value = None else: diff --git a/runway/cfngin/hooks/staticsite/cleanup.py b/runway/cfngin/hooks/staticsite/cleanup.py index 75380947b..67d27e200 100644 --- a/runway/cfngin/hooks/staticsite/cleanup.py +++ b/runway/cfngin/hooks/staticsite/cleanup.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, List +from typing import TYPE_CHECKING, Any from ..base import HookArgsBaseModel @@ -41,16 +41,12 @@ class HookArgs(HookArgsBaseModel): """Name of the CloudFormation Stack as defined in the config file (no namespace).""" -def get_replicated_function_names(outputs: List[OutputTypeDef]) -> List[str]: +def get_replicated_function_names(outputs: list[OutputTypeDef]) -> list[str]: """Extract replicated function names from CFN outputs.""" - function_names: List[str] = [] + function_names: list[str] = [] for i in REPLICATED_FUNCTION_OUTPUTS: function_arn = next( - ( - output.get("OutputValue") - for output in outputs - if output.get("OutputKey") == i - ), + (output.get("OutputValue") for output in outputs if output.get("OutputKey") == i), None, ) if function_arn: @@ -58,32 +54,28 @@ def get_replicated_function_names(outputs: List[OutputTypeDef]) -> List[str]: return function_names -def warn(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool: +def warn(context: CfnginContext, *_args: Any, **kwargs: Any) -> bool: """Notify the user of Lambda functions to delete. Arguments parsed by :class:`~runway.cfngin.hooks.staticsite.cleanup.HookArgs`. Args: context: The context instance. + **kwargs: Arbitrary keyword arguments. """ - args = HookArgs.parse_obj(kwargs) + args = HookArgs.model_validate(kwargs) cfn_client = context.get_session().client("cloudformation") try: describe_response = cfn_client.describe_stacks( - StackName=context.namespace - + context.namespace_delimiter - + args.stack_relative_name + StackName=context.namespace + context.namespace_delimiter + args.stack_relative_name ) stack = next( x for x in describe_response.get("Stacks", []) - if ( - x.get("StackStatus") - and x.get("StackStatus") not in STACK_STATUSES_TO_IGNORE - ) + if (x.get("StackStatus") and x.get("StackStatus") not in STACK_STATUSES_TO_IGNORE) ) - functions = get_replicated_function_names(stack["Outputs"]) + functions = get_replicated_function_names(stack.get("Outputs", [])) if functions: cmd = ( "aws lambda delete-function --function-name $x " @@ -101,7 +93,7 @@ def warn(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool: LOGGER.warning("for x in %s; do %s; done", (" ").join(functions), cmd) LOGGER.warning("On Windows:") LOGGER.warning('Foreach ($x in "%s") { %s }', ('","').join(functions), cmd) - except Exception: # pylint: disable=broad-except + except Exception: # noqa: S110, BLE001 # There's no harm in continuing on in the event of an error # Orphaned functions have no cost pass diff --git a/runway/cfngin/hooks/staticsite/upload_staticsite.py b/runway/cfngin/hooks/staticsite/upload_staticsite.py index 5e9d173dd..e51306775 100644 --- a/runway/cfngin/hooks/staticsite/upload_staticsite.py +++ b/runway/cfngin/hooks/staticsite/upload_staticsite.py @@ -8,12 +8,12 @@ import os import time from operator import itemgetter -from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast +from typing import TYPE_CHECKING, Any, cast import yaml from ....core.providers.aws.s3 import Bucket -from ....module.staticsite.options.models import RunwayStaticSiteExtraFileDataModel +from ....module.staticsite.options import RunwayStaticSiteExtraFileDataModel from ....utils import JsonEncoder from ..base import HookArgsBaseModel @@ -43,16 +43,14 @@ class HookArgs(HookArgsBaseModel): distribution_path: str = "/*" """Path in the CloudFront distribution to invalidate.""" - extra_files: List[RunwayStaticSiteExtraFileDataModel] = [] + extra_files: list[RunwayStaticSiteExtraFileDataModel] = [] """Extra files to sync to the S3 bucket.""" - website_url: Optional[str] = None + website_url: str | None = None """S3 bucket website URL.""" -def get_archives_to_prune( - archives: List[Dict[str, Any]], hook_data: Dict[str, Any] -) -> List[str]: +def get_archives_to_prune(archives: list[dict[str, Any]], hook_data: dict[str, Any]) -> list[str]: """Return list of keys to delete. Args: @@ -66,9 +64,7 @@ def get_archives_to_prune( if hook_data.get(i) ] - archives.sort( # sort from oldest to newest - key=itemgetter("LastModified"), reverse=False - ) + archives.sort(key=itemgetter("LastModified"), reverse=False) # sort from oldest to newest # Drop all but last 15 files return [i["Key"] for i in archives[:-15] if i["Key"] not in files_to_skip] @@ -81,9 +77,10 @@ def sync(context: CfnginContext, *__args: Any, **kwargs: Any) -> bool: Args: context: The context instance. + **kwargs: Arbitrary keyword arguments. """ - args = HookArgs.parse_obj(kwargs) + args = HookArgs.model_validate(kwargs) session = context.get_session() build_context = context.hook_data["staticsite"] invalidate_cache = False @@ -195,7 +192,7 @@ def prune_archives(context: CfnginContext, session: Session) -> bool: """ LOGGER.info("cleaning up old site archives...") - archives: List[Dict[str, Any]] = [] + archives: list[dict[str, Any]] = [] s3_client = session.client("s3") list_objects_v2_paginator = s3_client.get_paginator("list_objects_v2") response_iterator = list_objects_v2_paginator.paginate( @@ -218,7 +215,7 @@ def prune_archives(context: CfnginContext, session: Session) -> bool: return True -def auto_detect_content_type(filename: Optional[str]) -> Optional[str]: +def auto_detect_content_type(filename: str | None) -> str | None: """Auto detects the content type based on the filename. Args: @@ -231,7 +228,7 @@ def auto_detect_content_type(filename: Optional[str]) -> Optional[str]: if not filename: return None - _, ext = os.path.splitext(filename) + _, ext = os.path.splitext(filename) # noqa: PTH122 if ext == ".json": return "application/json" @@ -242,7 +239,7 @@ def auto_detect_content_type(filename: Optional[str]) -> Optional[str]: return None -def get_content_type(extra_file: RunwayStaticSiteExtraFileDataModel) -> Optional[str]: +def get_content_type(extra_file: RunwayStaticSiteExtraFileDataModel) -> str | None: """Return the content type of the file. Args: @@ -256,7 +253,7 @@ def get_content_type(extra_file: RunwayStaticSiteExtraFileDataModel) -> Optional return extra_file.content_type or auto_detect_content_type(extra_file.name) -def get_content(extra_file: RunwayStaticSiteExtraFileDataModel) -> Optional[str]: +def get_content(extra_file: RunwayStaticSiteExtraFileDataModel) -> str | None: """Get serialized content based on content_type. Args: @@ -274,18 +271,16 @@ def get_content(extra_file: RunwayStaticSiteExtraFileDataModel) -> Optional[str] if extra_file.content_type == "text/yaml": return yaml.safe_dump(extra_file.content) - raise ValueError( - '"content_type" must be json or yaml if "content" is not a string' - ) + raise ValueError('"content_type" must be json or yaml if "content" is not a string') if not isinstance(extra_file.content, str): raise TypeError(f"unsupported content: {type(extra_file.content)}") - return cast(Optional[str], extra_file.content) + return cast("str | None", extra_file.content) def calculate_hash_of_extra_files( - extra_files: List[RunwayStaticSiteExtraFileDataModel], + extra_files: list[RunwayStaticSiteExtraFileDataModel], ) -> str: """Return a hash of all of the given extra files. @@ -299,7 +294,7 @@ def calculate_hash_of_extra_files( The hash of all the files. """ - file_hash = hashlib.md5() + file_hash = hashlib.md5() # noqa: S324 for extra_file in sorted(extra_files, key=lambda x: x.name): file_hash.update((extra_file.name + "\0").encode()) @@ -312,20 +307,18 @@ def calculate_hash_of_extra_files( file_hash.update((cast(str, extra_file.content) + "\0").encode()) if extra_file.file: - with open(extra_file.file, "rb") as f: + with open(extra_file.file, "rb") as f: # noqa: PTH123 LOGGER.debug("hashing file: %s", extra_file.file) - for chunk in iter( - lambda: f.read(4096), "" # pylint: disable=cell-var-from-loop - ): + for chunk in iter(lambda: f.read(4096), ""): if not chunk: break file_hash.update(chunk) - file_hash.update("\0".encode()) + file_hash.update(b"\0") return file_hash.hexdigest() -def get_ssm_value(session: Session, name: str) -> Optional[str]: +def get_ssm_value(session: Session, name: str) -> str | None: """Get the ssm parameter value. Args: @@ -339,14 +332,12 @@ def get_ssm_value(session: Session, name: str) -> Optional[str]: ssm_client = session.client("ssm") try: - return ssm_client.get_parameter(Name=name)["Parameter"]["Value"] + return ssm_client.get_parameter(Name=name)["Parameter"].get("Value") except ssm_client.exceptions.ParameterNotFound: return None -def set_ssm_value( - session: Session, name: str, value: Any, description: str = "" -) -> None: +def set_ssm_value(session: Session, name: str, value: Any, description: str = "") -> None: """Set the ssm parameter. Args: @@ -363,18 +354,19 @@ def set_ssm_value( ) -def sync_extra_files( +def sync_extra_files( # noqa: C901 context: CfnginContext, bucket: str, - extra_files: List[RunwayStaticSiteExtraFileDataModel], + extra_files: list[RunwayStaticSiteExtraFileDataModel], **kwargs: Any, -) -> List[str]: +) -> list[str]: """Sync static website extra files to S3 bucket. Args: context: The context instance. bucket: The static site bucket name. extra_files: List of files and file content that should be uploaded. + **kwargs: Arbitrary keyword arguments. """ LOGGER.debug("extra_files to sync: %s", json.dumps(extra_files, cls=JsonEncoder)) @@ -384,7 +376,7 @@ def sync_extra_files( session = context.get_session() s3_client = session.client("s3") - uploaded: List[str] = [] + uploaded: list[str] = [] hash_param = cast(str, kwargs.get("hash_tracking_parameter", "")) hash_new = None @@ -404,9 +396,7 @@ def sync_extra_files( hash_new = calculate_hash_of_extra_files(extra_files) if hash_new == hash_old: - LOGGER.info( - "skipped upload of extra files; latest version already deployed" - ) + LOGGER.info("skipped upload of extra files; latest version already deployed") return [] for extra_file in extra_files: @@ -423,9 +413,7 @@ def sync_extra_files( uploaded.append(extra_file.name) if extra_file.file: - LOGGER.info( - "uploading extra file: %s as %s ", extra_file.file, extra_file.name - ) + LOGGER.info("uploading extra file: %s as %s ", extra_file.file, extra_file.name) extra_args = "" @@ -449,9 +437,7 @@ def sync_extra_files( uploaded.append(extra_file.name) if hash_new: - LOGGER.info( - "updating extra files SSM parameter %s with hash %s", hash_param, hash_new - ) + LOGGER.info("updating extra files SSM parameter %s with hash %s", hash_param, hash_new) set_ssm_value(session, hash_param, hash_new) return uploaded diff --git a/runway/cfngin/hooks/staticsite/utils.py b/runway/cfngin/hooks/staticsite/utils.py index 1f8ea2e22..67a50c96c 100644 --- a/runway/cfngin/hooks/staticsite/utils.py +++ b/runway/cfngin/hooks/staticsite/utils.py @@ -6,13 +6,15 @@ import logging import os from pathlib import Path -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Union, cast +from typing import TYPE_CHECKING, cast import igittigitt from ....utils import FileHash, change_dir if TYPE_CHECKING: + from collections.abc import Iterable + from _typeshed import StrPath LOGGER = logging.getLogger(__name__) @@ -29,14 +31,14 @@ def calculate_hash_of_files(files: Iterable[StrPath], root: Path) -> str: A hash of the hashes of the given files. """ - file_hash = FileHash(hashlib.md5()) + file_hash = FileHash(hashlib.md5()) # noqa: S324 file_hash.add_files(sorted(str(f) for f in files), relative_to=root) return file_hash.hexdigest def get_hash_of_files( root_path: Path, - directories: Optional[List[Dict[str, Union[List[str], str]]]] = None, + directories: list[dict[str, list[str] | str | None]] | None = None, ) -> str: """Generate md5 hash of files. @@ -49,19 +51,19 @@ def get_hash_of_files( """ directories = directories or [{"path": "./"}] - files_to_hash: List[StrPath] = [] + files_to_hash: list[StrPath] = [] for i in directories: gitignore = get_ignorer( root_path / cast(str, i["path"]), - cast(Optional[List[str]], i.get("exclusions")), + cast("list[str] | None", i.get("exclusions")), ) with change_dir(root_path): for root, dirs, files in os.walk(cast(str, i["path"]), topdown=True): sub_root = Path(root).resolve() if root != "./" and gitignore.match(sub_root): - dirs[:] = [] # type: ignore - files[:] = [] # type: ignore + dirs[:] = [] + files[:] = [] else: for filename in files: filepath = sub_root / filename @@ -72,7 +74,7 @@ def get_hash_of_files( def get_ignorer( - path: Path, additional_exclusions: Optional[List[str]] = None + path: Path, additional_exclusions: list[str] | None = None ) -> igittigitt.IgnoreParser: """Create gitignore filter from directory ``.gitignore`` file. diff --git a/runway/cfngin/hooks/utils.py b/runway/cfngin/hooks/utils.py index 565c99fc5..59edd85a2 100644 --- a/runway/cfngin/hooks/utils.py +++ b/runway/cfngin/hooks/utils.py @@ -4,9 +4,9 @@ import collections.abc import logging -import os import sys -from typing import TYPE_CHECKING, Any, Dict, List, cast +from pathlib import Path +from typing import TYPE_CHECKING, Annotated, Any import pydantic @@ -30,36 +30,28 @@ def create_template(self) -> None: """Create template without raising NotImplementedError.""" -# TODO BREAKING find a better place for this +# TODO (kyle): BREAKING move to runway.providers.aws.models.TagModel class TagDataModel(BaseModel): """AWS Resource Tag data model.""" - key: str - value: str + model_config = pydantic.ConfigDict(extra="forbid", populate_by_name=True) - class Config: - """Model configuration.""" - - allow_population_by_field_name = True - extra = pydantic.Extra.forbid - fields = { - "key": {"alias": "Key"}, - "value": {"alias": "Value"}, - } + key: Annotated[str, pydantic.Field(alias="Key")] + value: Annotated[str, pydantic.Field(alias="Value")] def full_path(path: str) -> str: """Return full path.""" - return os.path.abspath(os.path.expanduser(path)) + return str(Path(path).absolute()) -# TODO split up to reduce number of statements -def handle_hooks( # pylint: disable=too-many-statements +# TODO (kyle): split up to reduce number of statements +def handle_hooks( # noqa: C901, PLR0912, PLR0915 stage: str, - hooks: List[CfnginHookDefinitionModel], + hooks: list[CfnginHookDefinitionModel], provider: Provider, context: CfnginContext, -): +) -> None: """Handle pre/post_deploy hooks. These are pieces of code that we want to run before/after deploying @@ -76,7 +68,7 @@ def handle_hooks( # pylint: disable=too-many-statements LOGGER.debug("no %s hooks defined", stage) return - hook_paths: List[str] = [] + hook_paths: list[str] = [] for i, hook in enumerate(hooks): try: hook_paths.append(hook.path) @@ -111,18 +103,16 @@ def handle_hooks( # pylint: disable=too-many-statements "does not exist yet" ) raise - kwargs: Dict[str, Any] = {v.name: v.value for v in args} + kwargs: dict[str, Any] = {v.name: v.value for v in args} else: kwargs = {} try: if isinstance(method, type): - result: Any = getattr( - method(context=context, provider=provider, **kwargs), stage - )() + result: Any = getattr(method(context=context, provider=provider, **kwargs), stage)() else: - result = cast(Any, method(context=context, provider=provider, **kwargs)) - except Exception: # pylint: disable=broad-except + result = method(context=context, provider=provider, **kwargs) + except Exception: LOGGER.exception("hook %s threw an exception", hook.path) if hook.required: raise @@ -130,24 +120,19 @@ def handle_hooks( # pylint: disable=too-many-statements if not result: if hook.required: - LOGGER.error( - "required hook %s failed; return value: %s", hook.path, result - ) + LOGGER.error("required hook %s failed; return value: %s", hook.path, result) sys.exit(1) - LOGGER.warning( - "non-required hook %s failed; return value: %s", hook.path, result - ) - else: - if isinstance(result, (collections.abc.Mapping, pydantic.BaseModel)): - if hook.data_key: - LOGGER.debug( - "adding result for hook %s to context in data_key %s", - hook.path, - hook.data_key, - ) - context.set_hook_data(hook.data_key, result) - else: - LOGGER.debug( - "hook %s returned result data but no data key set; ignoring", - hook.path, - ) + LOGGER.warning("non-required hook %s failed; return value: %s", hook.path, result) + elif isinstance(result, (collections.abc.Mapping, pydantic.BaseModel)): + if hook.data_key: + LOGGER.debug( + "adding result for hook %s to context in data_key %s", + hook.path, + hook.data_key, + ) + context.set_hook_data(hook.data_key, result) + else: + LOGGER.debug( + "hook %s returned result data but no data key set; ignoring", + hook.path, + ) diff --git a/runway/cfngin/logger/__init__.py b/runway/cfngin/logger/__init__.py index da3e36b3f..ebe0e5447 100644 --- a/runway/cfngin/logger/__init__.py +++ b/runway/cfngin/logger/__init__.py @@ -1,12 +1,13 @@ """CFNgin logger.""" +from __future__ import annotations + import logging import sys -from typing import Any, Dict, Optional +from typing import Any DEBUG_FORMAT = ( - "[%(asctime)s] %(levelname)s %(threadName)s " - "%(name)s:%(lineno)d(%(funcName)s): %(message)s" + "[%(asctime)s] %(levelname)s %(threadName)s %(name)s:%(lineno)d(%(funcName)s): %(message)s" ) INFO_FORMAT = "[%(asctime)s] %(message)s" COLOR_FORMAT = "[%(asctime)s] \033[%(color)sm%(message)s\033[39m" @@ -24,7 +25,7 @@ def format(self, record: logging.LogRecord) -> str: return super().format(record) -def setup_logging(verbosity: int, formats: Optional[Dict[str, Any]] = None): +def setup_logging(verbosity: int, formats: dict[str, Any] | None = None) -> None: """Configure a proper logger based on verbosity and optional log formats. Args: diff --git a/runway/cfngin/lookups/handlers/ami.py b/runway/cfngin/lookups/handlers/ami.py index 6ecde4dd8..2b3ca2bd9 100644 --- a/runway/cfngin/lookups/handlers/ami.py +++ b/runway/cfngin/lookups/handlers/ami.py @@ -1,21 +1,19 @@ """AMI lookup.""" -# pylint: disable=no-self-argument -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import operator import re -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, ClassVar -from pydantic import validator -from typing_extensions import Final, Literal +from pydantic import field_validator from ....lookups.handlers.base import LookupHandler from ....utils import BaseModel from ...utils import read_value_from_path if TYPE_CHECKING: + from ....context import CfnginContext @@ -27,21 +25,22 @@ class ArgsDataModel(BaseModel): """ - executable_users: Optional[List[str]] = None + executable_users: list[str] | None = None """List of executable users.""" - owners: List[str] + owners: list[str] """At least one owner is required. Should be ``amazon``, ``self``, or an AWS account ID. """ - region: Optional[str] = None + region: str | None = None """AWS region.""" - @validator("executable_users", "owners", allow_reuse=True, pre=True) - def _convert_str_to_list(cls, v: Union[List[str], str]) -> List[str]: + @field_validator("executable_users", "owners", mode="before") + @classmethod + def _convert_str_to_list(cls, v: list[str] | str) -> list[str]: """Convert str to list.""" if isinstance(v, str): return v.split(",") @@ -56,19 +55,17 @@ class ImageNotFound(Exception): def __init__(self, search_string: str) -> None: """Instantiate class.""" self.search_string = search_string - super().__init__( - f"Unable to find ec2 image with search string: {search_string}" - ) + super().__init__(f"Unable to find ec2 image with search string: {search_string}") -class AmiLookup(LookupHandler): +class AmiLookup(LookupHandler["CfnginContext"]): """AMI lookup.""" - TYPE_NAME: Final[Literal["ami"]] = "ami" + TYPE_NAME: ClassVar[str] = "ami" """Name that the Lookup is registered as.""" @classmethod - def parse(cls, value: str) -> Tuple[str, Dict[str, str]]: + def parse_query(cls, value: str) -> tuple[str, dict[str, str]]: """Parse the value passed to the lookup. This overrides the default parsing to account for special requirements. @@ -81,7 +78,7 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]: """ raw_value = read_value_from_path(value) - args: Dict[str, str] = {} + args: dict[str, str] = {} if "@" in raw_value: args["region"], raw_value = raw_value.split("@", 1) @@ -95,9 +92,7 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]: return args.pop("name_regex"), args @classmethod - def handle( # pylint: disable=arguments-differ - cls, value: str, context: CfnginContext, *__args: Any, **__kwargs: Any - ) -> str: + def handle(cls, value: str, context: CfnginContext, **_kwargs: Any) -> str: """Fetch the most recent AMI Id using a filter. Args: @@ -116,18 +111,16 @@ def handle( # pylint: disable=arguments-differ You can also optionally specify the region in which to perform the AMI lookup. - """ # noqa - query, raw_args = cls.parse(value) - args = ArgsDataModel.parse_obj(raw_args) + """ + query, raw_args = cls.parse_query(value) + args = ArgsDataModel.model_validate(raw_args) ec2 = context.get_session(region=args.region).client("ec2") - describe_args: Dict[str, Any] = { + describe_args: dict[str, Any] = { "Filters": [ {"Name": key, "Values": val.split(",") if val else val} for key, val in { - k: v - for k, v in raw_args.items() - if k not in ArgsDataModel.__fields__ + k: v for k, v in raw_args.items() if k not in ArgsDataModel.model_fields }.items() ], "Owners": args.owners, diff --git a/runway/cfngin/lookups/handlers/awslambda.py b/runway/cfngin/lookups/handlers/awslambda.py index 99cf925f1..69b2f01bf 100644 --- a/runway/cfngin/lookups/handlers/awslambda.py +++ b/runway/cfngin/lookups/handlers/awslambda.py @@ -8,35 +8,34 @@ The :attr:`~cfngin.hook.data_key` is then passed to the lookup as it's input/query. This allows the lookup to function during a ``runway plan``. -""" # noqa +""" from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, cast from pydantic import ValidationError from troposphere.awslambda import Code, Content -from typing_extensions import Final, Literal from ....lookups.handlers.base import LookupHandler from ....utils import load_object_from_string -from ...exceptions import CfnginOnlyLookupError if TYPE_CHECKING: + from ....config import CfnginConfig from ....config.models.cfngin import CfnginHookDefinitionModel - from ....context import CfnginContext, RunwayContext + from ....context import CfnginContext from ...hooks.awslambda.base_classes import AwsLambdaHook from ...hooks.awslambda.models.responses import AwsLambdaHookDeployResponse LOGGER = logging.getLogger(__name__) -class AwsLambdaLookup(LookupHandler): +class AwsLambdaLookup(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda"]] = "awslambda" + TYPE_NAME: ClassVar[str] = "awslambda" @classmethod def get_deployment_package_data( @@ -62,7 +61,6 @@ def get_deployment_package_data( """ # needs to be imported here to avoid cyclic imports for conditional code # caused by import of runway.cfngin.actions.deploy in runway.cfngin.hooks.base - # pylint: disable=import-outside-toplevel from ...hooks.awslambda.models.responses import ( AwsLambdaHookDeployResponse as _AwsLambdaHookDeployResponse, ) @@ -74,7 +72,7 @@ def get_deployment_package_data( ) context.set_hook_data(data_key, hook.plan()) try: - return _AwsLambdaHookDeployResponse.parse_obj(context.hook_data[data_key]) + return _AwsLambdaHookDeployResponse.model_validate(context.hook_data[data_key]) except ValidationError: raise TypeError( "expected AwsLambdaHookDeployResponseTypedDict, " @@ -108,18 +106,12 @@ def get_required_hook_definition( if not hooks_with_data_key: raise ValueError(f"no hook definition found with data_key {data_key}") if len(hooks_with_data_key) > 1: - raise ValueError( - f"more than one hook definition found with data_key {data_key}" - ) + raise ValueError(f"more than one hook definition found with data_key {data_key}") return hooks_with_data_key.pop() @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *_args: Any, - **_kwargs: Any, + def handle( + cls, value: str, context: CfnginContext, **_kwargs: Any ) -> AwsLambdaHookDeployResponse: """Retrieve metadata for an AWS Lambda deployment package. @@ -132,12 +124,8 @@ def handle( # pylint: disable=arguments-differ data model. """ - # `if isinstance(context, _RunwayContext)` without needing to import candidate - # importing candidate causes cyclic import - if "RunwayContext" in type(context).__name__: - raise CfnginOnlyLookupError(cls.TYPE_NAME) query, _ = cls.parse(value) - return cls.get_deployment_package_data(cast("CfnginContext", context), query) + return cls.get_deployment_package_data(context, query) @staticmethod def init_hook_class( @@ -156,7 +144,6 @@ def init_hook_class( """ # needs to be imported here to avoid cyclic imports for conditional code # caused by import of runway.cfngin.actions.deploy in runway.cfngin.hooks.base - # pylint: disable=import-outside-toplevel from ...hooks.awslambda.base_classes import AwsLambdaHook as _AwsLambdaHook kls = load_object_from_string(hook_def.path) @@ -171,24 +158,20 @@ def init_hook_class( ) return cast("AwsLambdaHook[Any]", kls(context, **hook_def.args)) - class Code(LookupHandler): + class Code(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda.Code"]] = "awslambda.Code" + TYPE_NAME: ClassVar[str] = "awslambda.Code" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *args: Any, - **kwargs: Any, - ) -> Code: + def handle(cls, value: str, context: CfnginContext, *args: Any, **kwargs: Any) -> Code: """Retrieve metadata for an AWS Lambda deployment package. Args: value: Value to resolve. context: The current context object. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. Returns: Value that can be passed into CloudFormation property @@ -196,31 +179,27 @@ def handle( # pylint: disable=arguments-differ """ return Code( - **AwsLambdaLookup.handle(value, context, *args, **kwargs).dict( + **AwsLambdaLookup.handle(value, context, *args, **kwargs).model_dump( by_alias=True, exclude_none=True, include={"bucket_name", "object_key", "object_version_id"}, ) ) - class CodeSha256(LookupHandler): + class CodeSha256(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda.CodeSha256"]] = "awslambda.CodeSha256" + TYPE_NAME: ClassVar[str] = "awslambda.CodeSha256" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *args: Any, - **kwargs: Any, - ) -> str: + def handle(cls, value: str, context: CfnginContext, *args: Any, **kwargs: Any) -> str: """Retrieve metadata for an AWS Lambda deployment package. Args: value: Value to resolve. context: The current context object. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. Returns: Value that can be passed into CloudFormation property @@ -229,26 +208,22 @@ def handle( # pylint: disable=arguments-differ """ return AwsLambdaLookup.handle(value, context, *args, **kwargs).code_sha256 - class CompatibleArchitectures(LookupHandler): + class CompatibleArchitectures(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda.CompatibleArchitectures"]] = ( - "awslambda.CompatibleArchitectures" - ) + TYPE_NAME: ClassVar[str] = "awslambda.CompatibleArchitectures" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *args: Any, - **kwargs: Any, - ) -> Optional[List[str]]: + def handle( + cls, value: str, context: CfnginContext, *args: Any, **kwargs: Any + ) -> list[str] | None: """Retrieve metadata for an AWS Lambda deployment package. Args: value: Value to resolve. context: The current context object. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. Returns: Value that can be passed into CloudFormation property @@ -257,32 +232,24 @@ def handle( # pylint: disable=arguments-differ """ _query, lookup_args = cls.parse(value) return cls.format_results( - AwsLambdaLookup.handle( - value, context, *args, **kwargs - ).compatible_architectures, + AwsLambdaLookup.handle(value, context, *args, **kwargs).compatible_architectures, **lookup_args, ) - class CompatibleRuntimes(LookupHandler): + class CompatibleRuntimes(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda.CompatibleRuntimes"]] = ( - "awslambda.CompatibleRuntimes" - ) + TYPE_NAME: ClassVar[str] = "awslambda.CompatibleRuntimes" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *args: Any, - **kwargs: Any, - ) -> Any: + def handle(cls, value: str, context: CfnginContext, *args: Any, **kwargs: Any) -> Any: """Retrieve metadata for an AWS Lambda deployment package. Args: value: Value to resolve. context: The current context object. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. Returns: Value that can be passed into CloudFormation property @@ -291,30 +258,24 @@ def handle( # pylint: disable=arguments-differ """ _query, lookup_args = cls.parse(value) return cls.format_results( - AwsLambdaLookup.handle( - value, context, *args, **kwargs - ).compatible_runtimes, + AwsLambdaLookup.handle(value, context, *args, **kwargs).compatible_runtimes, **lookup_args, ) - class Content(LookupHandler): + class Content(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda.Content"]] = "awslambda.Content" + TYPE_NAME: ClassVar[str] = "awslambda.Content" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *args: Any, - **kwargs: Any, - ) -> Content: + def handle(cls, value: str, context: CfnginContext, *args: Any, **kwargs: Any) -> Content: """Retrieve metadata for an AWS Lambda deployment package. Args: value: Value to resolve. context: The current context object. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. Returns: Value that can be passed into CloudFormation property @@ -322,31 +283,29 @@ def handle( # pylint: disable=arguments-differ """ return Content( - **AwsLambdaLookup.handle(value, context, *args, **kwargs).dict( + **AwsLambdaLookup.handle(value, context, *args, **kwargs).model_dump( by_alias=True, exclude_none=True, include={"bucket_name", "object_key", "object_version_id"}, ) ) - class LicenseInfo(LookupHandler): + class LicenseInfo(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda.LicenseInfo"]] = "awslambda.LicenseInfo" + TYPE_NAME: ClassVar[str] = "awslambda.LicenseInfo" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *args: Any, - **kwargs: Any, - ) -> Optional[str]: + def handle( + cls, value: str, context: CfnginContext, *args: Any, **kwargs: Any + ) -> str | None: """Retrieve metadata for an AWS Lambda deployment package. Args: value: Value to resolve. context: The current context object. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. Returns: Value that can be passed into CloudFormation property @@ -359,24 +318,20 @@ def handle( # pylint: disable=arguments-differ **lookup_args, ) - class Runtime(LookupHandler): + class Runtime(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda.Runtime"]] = "awslambda.Runtime" + TYPE_NAME: ClassVar[str] = "awslambda.Runtime" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *args: Any, - **kwargs: Any, - ) -> str: + def handle(cls, value: str, context: CfnginContext, *args: Any, **kwargs: Any) -> str: """Retrieve metadata for an AWS Lambda deployment package. Args: value: Value to resolve. context: The current context object. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. Returns: Value that can be passed into CloudFormation property @@ -385,24 +340,20 @@ def handle( # pylint: disable=arguments-differ """ return AwsLambdaLookup.handle(value, context, *args, **kwargs).runtime - class S3Bucket(LookupHandler): + class S3Bucket(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda.S3Bucket"]] = "awslambda.S3Bucket" + TYPE_NAME: ClassVar[str] = "awslambda.S3Bucket" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *args: Any, - **kwargs: Any, - ) -> str: + def handle(cls, value: str, context: CfnginContext, *args: Any, **kwargs: Any) -> str: """Retrieve metadata for an AWS Lambda deployment package. Args: value: Value to resolve. context: The current context object. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. Returns: Value that can be passed into CloudFormation property @@ -412,24 +363,20 @@ def handle( # pylint: disable=arguments-differ """ return AwsLambdaLookup.handle(value, context, *args, **kwargs).bucket_name - class S3Key(LookupHandler): + class S3Key(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda.S3Key"]] = "awslambda.S3Key" + TYPE_NAME: ClassVar[str] = "awslambda.S3Key" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *args: Any, - **kwargs: Any, - ) -> str: + def handle(cls, value: str, context: CfnginContext, *args: Any, **kwargs: Any) -> str: """Retrieve metadata for an AWS Lambda deployment package. Args: value: Value to resolve. context: The current context object. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. Returns: Value that can be passed into CloudFormation property @@ -439,26 +386,22 @@ def handle( # pylint: disable=arguments-differ """ return AwsLambdaLookup.handle(value, context, *args, **kwargs).object_key - class S3ObjectVersion(LookupHandler): + class S3ObjectVersion(LookupHandler["CfnginContext"]): """Lookup for AwsLambdaHook responses.""" - TYPE_NAME: Final[Literal["awslambda.S3ObjectVersion"]] = ( - "awslambda.S3ObjectVersion" - ) + TYPE_NAME: ClassVar[str] = "awslambda.S3ObjectVersion" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *args: Any, - **kwargs: Any, - ) -> Optional[str]: + def handle( + cls, value: str, context: CfnginContext, *args: Any, **kwargs: Any + ) -> str | None: """Retrieve metadata for an AWS Lambda deployment package. Args: value: Value to resolve. context: The current context object. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. Returns: Value that can be passed into CloudFormation property @@ -466,6 +409,4 @@ def handle( # pylint: disable=arguments-differ ``AWS::Lambda::LayerVersion.Content.S3ObjectVersion``. """ - return AwsLambdaLookup.handle( - value, context, *args, **kwargs - ).object_version_id + return AwsLambdaLookup.handle(value, context, *args, **kwargs).object_version_id diff --git a/runway/cfngin/lookups/handlers/default.py b/runway/cfngin/lookups/handlers/default.py index 5fed55eb1..fe96906a1 100644 --- a/runway/cfngin/lookups/handlers/default.py +++ b/runway/cfngin/lookups/handlers/default.py @@ -1,28 +1,24 @@ """Lookup to provide a default value.""" -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations -from typing import TYPE_CHECKING, Any, Optional - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, ClassVar from ....lookups.handlers.base import LookupHandler if TYPE_CHECKING: + from ....context import CfnginContext -class DefaultLookup(LookupHandler): +class DefaultLookup(LookupHandler["CfnginContext"]): """Lookup to provide a default value.""" - TYPE_NAME: Final[Literal["default"]] = "default" + TYPE_NAME: ClassVar[str] = "default" """Name that the Lookup is registered as.""" @classmethod - def handle( # pylint: disable=arguments-differ - cls, value: str, context: Optional[CfnginContext] = None, **_: Any - ) -> Any: + def handle(cls, value: str, context: CfnginContext | None = None, **_: Any) -> Any: """Use a value from the environment or fall back to a default value. Allows defaults to be set at the config file level. diff --git a/runway/cfngin/lookups/handlers/dynamodb.py b/runway/cfngin/lookups/handlers/dynamodb.py index 61b6d4d57..f980972a5 100644 --- a/runway/cfngin/lookups/handlers/dynamodb.py +++ b/runway/cfngin/lookups/handlers/dynamodb.py @@ -1,20 +1,22 @@ """DynamoDB lookup.""" -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import re -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, ClassVar, cast from botocore.exceptions import ClientError -from typing_extensions import Final, Literal, TypedDict +from typing_extensions import Literal, TypedDict from ....lookups.handlers.base import LookupHandler from ....utils import BaseModel from ...utils import read_value_from_path if TYPE_CHECKING: + from mypy_boto3_dynamodb.type_defs import AttributeValueTypeDef + from ....context import CfnginContext + from ....lookups.handlers.base import ParsedArgsTypeDef _QUERY_PATTERN = r"""(?x) # @:. @@ -38,7 +40,7 @@ class ArgsDataModel(BaseModel): """Arguments data model.""" - region: Optional[str] = None + region: str | None = None """AWS region.""" @@ -61,7 +63,7 @@ class QueryDataModel(BaseModel): """Name of the DynamoDB Table to query.""" @property - def item_key(self) -> Dict[str, Dict[Literal["B", "N", "S"], Any]]: + def item_key(self) -> dict[str, AttributeValueTypeDef]: """Value to pass to boto3 ``.get_item()`` call as the ``Key`` argument. Raises: @@ -77,22 +79,21 @@ def item_key(self) -> Dict[str, Dict[Literal["B", "N", "S"], Any]]: f"doesn't match regex: {pattern.pattern}" ) return { - self.partition_key: { - cast( - Literal["B", "N", "S"], match.groupdict("S")["data_type"] - ): match.group("value") - } + self.partition_key: cast( + "AttributeValueTypeDef", + {match.groupdict("S")["data_type"]: match.group("value")}, + ) } -class DynamodbLookup(LookupHandler): +class DynamodbLookup(LookupHandler["CfnginContext"]): """DynamoDB lookup.""" - TYPE_NAME: Final[Literal["dynamodb"]] = "dynamodb" + TYPE_NAME: ClassVar[str] = "dynamodb" """Name that the Lookup is registered as.""" @classmethod - def parse(cls, value: str) -> Tuple[str, Dict[str, str]]: + def parse(cls, value: str) -> tuple[str, ParsedArgsTypeDef]: """Parse the value passed to the lookup. This overrides the default parsing to account for special requirements. @@ -109,7 +110,7 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]: """ raw_value = read_value_from_path(value) - args: Dict[str, str] = {} + args: ParsedArgsTypeDef = {} if "@" not in raw_value: raise ValueError( @@ -120,7 +121,7 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]: if ":" in table_info: args["region"], table_info = table_info.split(":", 1) - return "@".join([table_info, table_keys]), args + return f"{table_info}@{table_keys}", args @classmethod def parse_query(cls, value: str) -> QueryDataModel: @@ -136,12 +137,10 @@ def parse_query(cls, value: str) -> QueryDataModel: match = pattern.search(value) if not match: raise ValueError(f"Query '{value}' doesn't match regex:\n{pattern.pattern}") - return QueryDataModel.parse_obj(match.groupdict()) + return QueryDataModel.model_validate(match.groupdict()) @classmethod - def handle( # pylint: disable=arguments-differ - cls, value: str, context: CfnginContext, *__args: Any, **__kwargs: Any - ) -> Any: + def handle(cls, value: str, context: CfnginContext, *__args: Any, **__kwargs: Any) -> Any: """Get a value from a DynamoDB table. Args: @@ -158,7 +157,7 @@ def handle( # pylint: disable=arguments-differ """ raw_query, raw_args = cls.parse(value) query = cls.parse_query(raw_query) - args = ArgsDataModel.parse_obj(raw_args) + args = ArgsDataModel.model_validate(raw_args) table_keys = query.attribute.split(".") @@ -169,22 +168,16 @@ def handle( # pylint: disable=arguments-differ response = dynamodb.get_item( TableName=query.table_name, Key=query.item_key, - ProjectionExpression=",".join( - [query.partition_key, *key_dict["clean_table_keys"]] - ), + ProjectionExpression=",".join([query.partition_key, *key_dict["clean_table_keys"]]), ) except dynamodb.exceptions.ResourceNotFoundException as exc: - raise ValueError( - f"Can't find the DynamoDB table: {query.table_name}" - ) from exc + raise ValueError(f"Can't find the DynamoDB table: {query.table_name}") from exc except ClientError as exc: if exc.response["Error"]["Code"] == "ValidationException": raise ValueError( f"No DynamoDB record matched the partition key: {query.partition_key}" ) from exc - raise ValueError( - f"The DynamoDB lookup '{value}' encountered an error: {exc}" - ) from exc + raise ValueError(f"The DynamoDB lookup '{value}' encountered an error: {exc}") from exc # find and return the key from the dynamo data returned if "Item" in response: return _get_val_from_ddb_data(response["Item"], key_dict["new_keys"]) @@ -196,11 +189,11 @@ def handle( # pylint: disable=arguments-differ class ParsedLookupKey(TypedDict): """Return value of _lookup_key_parse.""" - clean_table_keys: List[str] - new_keys: List[Dict[Literal["L", "M", "N", "S"], str]] + clean_table_keys: list[str] + new_keys: list[dict[Literal["L", "M", "N", "S"], str]] -def _lookup_key_parse(table_keys: List[str]) -> ParsedLookupKey: +def _lookup_key_parse(table_keys: list[str]) -> ParsedLookupKey: """Return the order in which the stacks should be executed. Args: @@ -217,8 +210,8 @@ def _lookup_key_parse(table_keys: List[str]) -> ParsedLookupKey: # we need to parse the key lookup passed in regex_matcher = r"\[([^\]]+)]" valid_dynamodb_datatypes = ["L", "M", "N", "S"] - clean_table_keys: List[str] = [] - new_keys: List[Dict[Literal["L", "M", "N", "S"], str]] = [] + clean_table_keys: list[str] = [] + new_keys: list[dict[Literal["L", "M", "N", "S"], str]] = [] for key in table_keys: match = re.search(regex_matcher, key) @@ -229,7 +222,7 @@ def _lookup_key_parse(table_keys: List[str]) -> ParsedLookupKey: f"CFNgin does not support looking up the data type: {match.group(1)}" ) match_val = cast(Literal["L", "M", "N", "S"], match.group(1)) - key = key.replace(match.group(0), "") + key = key.replace(match.group(0), "") # noqa: PLW2901 new_keys.append({match_val: key}) else: new_keys.append({"S": key}) @@ -237,7 +230,9 @@ def _lookup_key_parse(table_keys: List[str]) -> ParsedLookupKey: return {"new_keys": new_keys, "clean_table_keys": clean_table_keys} -def _get_val_from_ddb_data(data: Dict[str, Any], keylist: List[Dict[str, str]]) -> Any: +def _get_val_from_ddb_data( + data: dict[str, Any], keylist: list[dict[Literal["L", "M", "N", "S"], str]] +) -> Any: """Return the value of the lookup. Args: @@ -249,7 +244,7 @@ def _get_val_from_ddb_data(data: Dict[str, Any], keylist: List[Dict[str, str]]) datatype. """ - next_type: Optional[str] = None + next_type: str | None = None # iterate through the keylist to find the matching key/datatype for key in keylist: for k in key: @@ -263,14 +258,14 @@ def _get_val_from_ddb_data(data: Dict[str, Any], keylist: List[Dict[str, str]]) # if type is list, convert it to a list and return return _convert_ddb_list_to_list(data[cast(str, next_type)]) if next_type == "N": - # TODO: handle various types of 'number' datatypes, (e.g. int, double) + # TODO (troyready): handle various types of 'number' datatypes, (e.g. int, double) # if a number, convert to an int and return return int(data[cast(str, next_type)]) # else, just assume its a string and return return str(data[cast(str, next_type)]) -def _convert_ddb_list_to_list(conversion_list: List[Dict[str, Any]]) -> List[Any]: +def _convert_ddb_list_to_list(conversion_list: list[dict[str, Any]]) -> list[Any]: """Return a python list without the DynamoDB datatypes. Args: @@ -280,8 +275,4 @@ def _convert_ddb_list_to_list(conversion_list: List[Dict[str, Any]]) -> List[Any Returns A sanitized list without the datatypes. """ - ret_list: List[Any] = [] - for val in conversion_list: - for v in val: - ret_list.append(val[v]) - return ret_list + return [val[v] for val in conversion_list for v in val] diff --git a/runway/cfngin/lookups/handlers/envvar.py b/runway/cfngin/lookups/handlers/envvar.py index a38d3251a..8f2989a09 100644 --- a/runway/cfngin/lookups/handlers/envvar.py +++ b/runway/cfngin/lookups/handlers/envvar.py @@ -1,11 +1,8 @@ """Environment variable lookup.""" -# pyright: reportIncompatibleMethodOverride=none import logging import os -from typing import Any - -from typing_extensions import Final, Literal +from typing import Any, ClassVar from ....lookups.handlers.base import LookupHandler from ...utils import read_value_from_path @@ -13,15 +10,15 @@ LOGGER = logging.getLogger(__name__) -class EnvvarLookup(LookupHandler): +class EnvvarLookup(LookupHandler[Any]): """Environment variable lookup.""" DEPRECATION_MSG = "envvar Lookup has been deprecated; use the env lookup instead" - TYPE_NAME: Final[Literal["envvar"]] = "envvar" + TYPE_NAME: ClassVar[str] = "envvar" """Name that the Lookup is registered as.""" @classmethod - def handle(cls, value: str, **_: Any) -> str: # pylint: disable=arguments-differ + def handle(cls, value: str, *_args: Any, **_: Any) -> str: """Retrieve an environment variable. Args: diff --git a/runway/cfngin/lookups/handlers/file.py b/runway/cfngin/lookups/handlers/file.py index dcd73dfe0..2f5f57c43 100644 --- a/runway/cfngin/lookups/handlers/file.py +++ b/runway/cfngin/lookups/handlers/file.py @@ -1,32 +1,34 @@ """File lookup.""" -# pylint: disable=arguments-differ,no-self-argument -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import base64 import collections.abc import json import re -from typing import Any, Callable, Dict, List, Mapping, Sequence, Tuple, Union, overload +from typing import TYPE_CHECKING, Any, Callable, ClassVar, overload import yaml -from pydantic import validator +from pydantic import field_validator from troposphere import Base64, GenericHelperFn -from typing_extensions import Final, Literal from ....lookups.handlers.base import LookupHandler from ....utils import BaseModel from ...utils import read_value_from_path +if TYPE_CHECKING: + from collections.abc import Mapping, Sequence + + from typing_extensions import Literal, TypeAlias + + from ....lookups.handlers.base import ParsedArgsTypeDef + _PARAMETER_PATTERN = re.compile(r"{{([::|\w]+)}}") -ParameterizedObjectTypeDef = Union[str, Mapping[str, Any], Sequence[Any], Any] -ParameterizedObjectReturnTypeDef = Union[ - Dict[str, "ParameterizedObjectReturnTypeDef"], - GenericHelperFn, - List["ParameterizedObjectReturnTypeDef"], -] +ParameterizedObjectTypeDef: TypeAlias = "str | Mapping[str, Any] | Sequence[Any] | Any" +ParameterizedObjectReturnTypeDef: TypeAlias = ( + "dict[str, ParameterizedObjectReturnTypeDef] | GenericHelperFn | list[ParameterizedObjectReturnTypeDef]" +) class ArgsDataModel(BaseModel): @@ -35,7 +37,8 @@ class ArgsDataModel(BaseModel): codec: str """Codec that will be used to parse and/or manipulate the data.""" - @validator("codec", allow_reuse=True) + @field_validator("codec") + @classmethod def _validate_supported_codec(cls, v: str) -> str: """Validate that the selected codec is supported.""" if v in CODECS: @@ -43,14 +46,14 @@ def _validate_supported_codec(cls, v: str) -> str: raise ValueError(f"Codec '{v}' must be one of: {', '.join(CODECS)}") -class FileLookup(LookupHandler): +class FileLookup(LookupHandler[Any]): """File lookup.""" - TYPE_NAME: Final[Literal["file"]] = "file" + TYPE_NAME: ClassVar[str] = "file" """Name that the Lookup is registered as.""" @classmethod - def parse(cls, value: str) -> Tuple[str, Dict[str, str]]: + def parse(cls, value: str) -> tuple[str, ParsedArgsTypeDef]: """Parse the value passed to the lookup. This overrides the default parsing to account for special requirements. @@ -65,21 +68,22 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]: ValueError: The value provided does not match the expected regex. """ - args: Dict[str, str] = {} + args: ParsedArgsTypeDef = {} try: - args["codec"], data_or_path = value.split(":", 1) + args["codec"], data_or_path = value.split( # pyright: ignore[reportGeneralTypeIssues] + ":", 1 + ) except ValueError: raise ValueError( - f"Query '{value}' doesn't match regex: " - rf"^(?P[{'|'.join(CODECS)}]:.+$)" + rf"Query '{value}' doesn't match regex: ^(?P[{'|'.join(CODECS)}]:.+$)" ) from None return read_value_from_path(data_or_path), args @classmethod - def handle(cls, value: str, **_: Any) -> Any: + def handle(cls, value: str, *_args: Any, **_kwargs: Any) -> Any: """Translate a filename into the file contents.""" data, raw_args = cls.parse(value) - args = ArgsDataModel.parse_obj(raw_args) + args = ArgsDataModel.model_validate(raw_args) return CODECS[args.codec](data) @@ -97,7 +101,7 @@ def _parameterize_string(raw: str) -> GenericHelperFn: are found, and a composition of CloudFormation calls otherwise. """ - parts: List[Any] = [] + parts: list[Any] = [] s_index = 0 for match in _PARAMETER_PATTERN.finditer(raw): @@ -140,7 +144,7 @@ def parameterized_codec(raw: str, b64: bool = False) -> Any: @overload -def _parameterize_obj(obj: Union[bytes, str]) -> GenericHelperFn: ... +def _parameterize_obj(obj: bytes | str) -> GenericHelperFn: ... @overload @@ -148,7 +152,7 @@ def _parameterize_obj(obj: Mapping[str, Any]) -> ParameterizedObjectReturnTypeDe @overload -def _parameterize_obj(obj: List[Any]) -> ParameterizedObjectReturnTypeDef: ... +def _parameterize_obj(obj: list[Any]) -> ParameterizedObjectReturnTypeDef: ... def _parameterize_obj( @@ -179,17 +183,17 @@ def _parameterize_obj( def yaml_codec(raw: str, parameterized: bool = False) -> Any: """YAML codec.""" - data = yaml.load(raw, Loader=yaml.SafeLoader) + data: Mapping[str, Any] = yaml.load(raw, Loader=yaml.SafeLoader) return _parameterize_obj(data) if parameterized else data def json_codec(raw: str, parameterized: bool = False) -> Any: """JSON codec.""" - data = json.loads(raw) + data: Mapping[str, Any] = json.loads(raw) return _parameterize_obj(data) if parameterized else data -CODECS: Dict[str, Callable[[str], Any]] = { +CODECS: dict[str, Callable[[str], Any]] = { "base64": lambda x: base64.b64encode(x.encode("utf8")).decode("utf-8"), "json": lambda x: json_codec(x, parameterized=False), "json-parameterized": lambda x: json_codec(x, parameterized=True), diff --git a/runway/cfngin/lookups/handlers/hook_data.py b/runway/cfngin/lookups/handlers/hook_data.py index 433573c5d..38e7ee2a4 100644 --- a/runway/cfngin/lookups/handlers/hook_data.py +++ b/runway/cfngin/lookups/handlers/hook_data.py @@ -1,33 +1,30 @@ """Hook data lookup.""" -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, ClassVar from troposphere import BaseAWSObject -from typing_extensions import Final, Literal from ....lookups.handlers.base import LookupHandler from ....utils import MutableMap if TYPE_CHECKING: + from ....context import CfnginContext LOGGER = logging.getLogger(__name__) -class HookDataLookup(LookupHandler): +class HookDataLookup(LookupHandler["CfnginContext"]): """Hook data lookup.""" - TYPE_NAME: Final[Literal["hook_data"]] = "hook_data" + TYPE_NAME: ClassVar[str] = "hook_data" """Name that the Lookup is registered as.""" @classmethod - def handle( # pylint: disable=arguments-differ - cls, value: str, context: CfnginContext, **_: Any - ) -> Any: + def handle(cls, value: str, context: CfnginContext, **_: Any) -> Any: """Return the data from ``hook_data``. Args: @@ -41,11 +38,7 @@ def handle( # pylint: disable=arguments-differ result = hook_data.find(query, args.get("default")) - if ( - isinstance(result, BaseAWSObject) - and args.get("get") - and not args.get("load") - ): + if isinstance(result, BaseAWSObject) and args.get("get") and not args.get("load"): args["load"] = "troposphere" if not result: diff --git a/runway/cfngin/lookups/handlers/kms.py b/runway/cfngin/lookups/handlers/kms.py index e31699ffe..4eafb5f4d 100644 --- a/runway/cfngin/lookups/handlers/kms.py +++ b/runway/cfngin/lookups/handlers/kms.py @@ -1,13 +1,10 @@ """AWS KMS lookup.""" -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import codecs import logging -from typing import TYPE_CHECKING, Any, BinaryIO, Dict, Tuple, Union, cast - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, BinaryIO, ClassVar, cast from ....lookups.handlers.base import LookupHandler from ....utils import DOC_SITE @@ -15,11 +12,12 @@ if TYPE_CHECKING: from ....context import CfnginContext + from ....lookups.handlers.base import ParsedArgsTypeDef LOGGER = logging.getLogger(__name__) -class KmsLookup(LookupHandler): +class KmsLookup(LookupHandler["CfnginContext"]): """AWS KMS lookup.""" DEPRECATION_MSG = ( @@ -27,11 +25,11 @@ class KmsLookup(LookupHandler): "to learn how to use the new lookup query syntax visit " f"{DOC_SITE}/page/cfngin/lookups/kms.html" ) - TYPE_NAME: Final[Literal["kms"]] = "kms" + TYPE_NAME: ClassVar[str] = "kms" """Name that the Lookup is registered as.""" @classmethod - def legacy_parse(cls, value: str) -> Tuple[str, Dict[str, str]]: + def legacy_parse(cls, value: str) -> tuple[str, ParsedArgsTypeDef]: """Retain support for legacy lookup syntax. Format of value:: @@ -44,9 +42,7 @@ def legacy_parse(cls, value: str) -> Tuple[str, Dict[str, str]]: return value, {"region": region} @classmethod - def handle( # pylint: disable=arguments-differ - cls, value: str, context: CfnginContext, **_: Any - ) -> str: + def handle(cls, value: str, context: CfnginContext, **_: Any) -> str: r"""Decrypt the specified value with a master key in KMS. Args: @@ -62,7 +58,7 @@ def handle( # pylint: disable=arguments-differ kms = context.get_session(region=args.get("region")).client("kms") decrypted = cast( - Union[BinaryIO, bytes], + "BinaryIO | bytes", kms.decrypt(CiphertextBlob=codecs.decode(query.encode(), "base64")).get( "Plaintext", b"" ), diff --git a/runway/cfngin/lookups/handlers/output.py b/runway/cfngin/lookups/handlers/output.py index 32d2ebf8b..575ad3811 100644 --- a/runway/cfngin/lookups/handlers/output.py +++ b/runway/cfngin/lookups/handlers/output.py @@ -1,13 +1,10 @@ """AWS CloudFormation Output lookup.""" -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import logging import re -from typing import TYPE_CHECKING, Any, Dict, NamedTuple, Set, Tuple - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple from ....exceptions import OutputDoesNotExist from ....lookups.handlers.base import LookupHandler @@ -15,7 +12,9 @@ from ...exceptions import StackDoesNotExist if TYPE_CHECKING: + from ....context import CfnginContext + from ....lookups.handlers.base import ParsedArgsTypeDef from ....variables import VariableValue LOGGER = logging.getLogger(__name__) @@ -28,7 +27,7 @@ class OutputQuery(NamedTuple): output_name: str -class OutputLookup(LookupHandler): +class OutputLookup(LookupHandler["CfnginContext"]): """AWS CloudFormation Output lookup.""" DEPRECATION_MSG = ( @@ -36,11 +35,11 @@ class OutputLookup(LookupHandler): "to learn how to use the new lookup query syntax visit " f"{DOC_SITE}/page/cfngin/lookups/output.html" ) - TYPE_NAME: Final[Literal["output"]] = "output" + TYPE_NAME: ClassVar[str] = "output" """Name that the Lookup is registered as.""" @classmethod - def legacy_parse(cls, value: str) -> Tuple[OutputQuery, Dict[str, str]]: + def legacy_parse(cls, value: str) -> tuple[OutputQuery, ParsedArgsTypeDef]: """Retain support for legacy lookup syntax. Format of value: @@ -51,9 +50,7 @@ def legacy_parse(cls, value: str) -> Tuple[OutputQuery, Dict[str, str]]: return deconstruct(value), {} @classmethod - def handle( # pylint: disable=arguments-differ - cls, value: str, context: CfnginContext, **_: Any - ) -> str: + def handle(cls, value: str, context: CfnginContext, **_: Any) -> str: """Fetch an output from the designated stack. Args: @@ -82,9 +79,7 @@ def handle( # pylint: disable=arguments-differ raise StackDoesNotExist(context.get_fqn(query.stack_name)) if "default" in args: # handle falsy default - return cls.format_results( - stack.outputs.get(query.output_name, args["default"]), **args - ) + return cls.format_results(stack.outputs.get(query.output_name, args["default"]), **args) try: return cls.format_results(stack.outputs[query.output_name], **args) @@ -94,7 +89,7 @@ def handle( # pylint: disable=arguments-differ ) from None @classmethod - def dependencies(cls, lookup_query: VariableValue) -> Set[str]: + def dependencies(cls, lookup_query: VariableValue) -> set[str]: """Calculate any dependencies required to perform this lookup. Note that lookup_query may not be (completely) resolved at this time. @@ -127,7 +122,7 @@ def dependencies(cls, lookup_query: VariableValue) -> Set[str]: return set() -def deconstruct(value: str) -> OutputQuery: # TODO remove in next major release +def deconstruct(value: str) -> OutputQuery: # TODO (kyle): remove in next major release """Deconstruct the value.""" try: stack_name, output_name = value.split("::") diff --git a/runway/cfngin/lookups/handlers/rxref.py b/runway/cfngin/lookups/handlers/rxref.py index ab8285586..a03db93e2 100644 --- a/runway/cfngin/lookups/handlers/rxref.py +++ b/runway/cfngin/lookups/handlers/rxref.py @@ -1,12 +1,9 @@ """Handler for fetching outputs from a stack in the current namespace.""" -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, Tuple - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, ClassVar from ....lookups.handlers.base import LookupHandler from ....lookups.handlers.cfn import CfnLookup @@ -15,12 +12,13 @@ if TYPE_CHECKING: from ....context import CfnginContext + from ....lookups.handlers.base import ParsedArgsTypeDef from ...providers.aws.default import Provider LOGGER = logging.getLogger(__name__) -class RxrefLookup(LookupHandler): +class RxrefLookup(LookupHandler["CfnginContext"]): """Rxref lookup.""" DEPRECATION_MSG = ( @@ -28,11 +26,11 @@ class RxrefLookup(LookupHandler): "to learn how to use the new lookup query syntax visit " f"{DOC_SITE}/page/cfngin/lookups/rxref.html" ) - TYPE_NAME: Final[Literal["rxref"]] = "rxref" + TYPE_NAME: ClassVar[str] = "rxref" """Name that the Lookup is registered as.""" @classmethod - def legacy_parse(cls, value: str) -> Tuple[OutputQuery, Dict[str, str]]: + def legacy_parse(cls, value: str) -> tuple[OutputQuery, ParsedArgsTypeDef]: """Retain support for legacy lookup syntax. Format of value: @@ -43,9 +41,7 @@ def legacy_parse(cls, value: str) -> Tuple[OutputQuery, Dict[str, str]]: return deconstruct(value), {} @classmethod - def handle( # pylint: disable=arguments-differ - cls, value: str, context: CfnginContext, provider: Provider, **_: Any - ) -> Any: + def handle(cls, value: str, context: CfnginContext, *, provider: Provider, **_: Any) -> Any: """Fetch an output from the designated stack in the current namespace. The ``output`` lookup supports fetching outputs from stacks created diff --git a/runway/cfngin/lookups/handlers/split.py b/runway/cfngin/lookups/handlers/split.py index 0b4b0e5c8..c3239359a 100644 --- a/runway/cfngin/lookups/handlers/split.py +++ b/runway/cfngin/lookups/handlers/split.py @@ -1,23 +1,18 @@ """Split lookup.""" -# pyright: reportIncompatibleMethodOverride=none -from typing import Any, List - -from typing_extensions import Final, Literal +from typing import Any, ClassVar from ....lookups.handlers.base import LookupHandler -class SplitLookup(LookupHandler): +class SplitLookup(LookupHandler[Any]): """Split lookup.""" - TYPE_NAME: Final[Literal["split"]] = "split" + TYPE_NAME: ClassVar[str] = "split" """Name that the Lookup is registered as.""" @classmethod - def handle( # pylint: disable=arguments-differ - cls, value: str, **_: Any - ) -> List[str]: + def handle(cls, value: str, *_args: Any, **_kwargs: Any) -> list[str]: """Split the supplied string on the given delimiter, providing a list. Args: diff --git a/runway/cfngin/lookups/handlers/xref.py b/runway/cfngin/lookups/handlers/xref.py index a5a7a1747..85bf02b75 100644 --- a/runway/cfngin/lookups/handlers/xref.py +++ b/runway/cfngin/lookups/handlers/xref.py @@ -1,17 +1,15 @@ """Handler for fetching outputs from fully qualified stacks.""" -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, ClassVar from ....lookups.handlers.base import LookupHandler from .output import deconstruct if TYPE_CHECKING: + from ...providers.aws.default import Provider LOGGER = logging.getLogger(__name__) @@ -19,17 +17,15 @@ XREF_PERSISTENT_STATE = {"has_warned": False} -class XrefLookup(LookupHandler): +class XrefLookup(LookupHandler[Any]): """Xref lookup.""" DEPRECATION_MSG = "xref Lookup has been deprecated; use the cfn lookup instead" - TYPE_NAME: Final[Literal["xref"]] = "xref" + TYPE_NAME: ClassVar[str] = "xref" """Name that the Lookup is registered as.""" @classmethod - def handle( # pylint: disable=arguments-differ,arguments-renamed - cls, value: str, provider: Provider, **_: Any - ) -> str: + def handle(cls, value: str, *_args: Any, provider: Provider, **_kwargs: Any) -> str: """Fetch an output from the designated, fully qualified stack. The `output` handler supports fetching outputs from stacks created diff --git a/runway/cfngin/lookups/registry.py b/runway/cfngin/lookups/registry.py index eac3e2e34..ad4c84edd 100644 --- a/runway/cfngin/lookups/registry.py +++ b/runway/cfngin/lookups/registry.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Dict, Type, Union, cast +from typing import Any, cast from ...lookups.handlers.base import LookupHandler from ...lookups.handlers.cfn import CfnLookup @@ -25,12 +25,12 @@ from .handlers.split import SplitLookup from .handlers.xref import XrefLookup -CFNGIN_LOOKUP_HANDLERS: Dict[str, Type[LookupHandler]] = {} +CFNGIN_LOOKUP_HANDLERS: dict[str, type[LookupHandler[Any]]] = {} LOGGER = logging.getLogger(__name__) def register_lookup_handler( - lookup_type: str, handler_or_path: Union[str, Type[LookupHandler]] + lookup_type: str, handler_or_path: str | type[LookupHandler[Any]] ) -> None: """Register a lookup handler. @@ -52,7 +52,7 @@ def register_lookup_handler( CFNGIN_LOOKUP_HANDLERS[lookup_type] = handler return # Handler is a not a new-style handler - except Exception: # pylint: disable=broad-except + except Exception: # noqa: BLE001 LOGGER.debug("failed to validate lookup handler", exc_info=True) LOGGER.error( 'lookup "%s" uses an unsupported format; to learn how to write ' @@ -82,9 +82,7 @@ def unregister_lookup_handler(lookup_type: str) -> None: register_lookup_handler(AmiLookup.TYPE_NAME, AmiLookup) register_lookup_handler(AwsLambdaLookup.TYPE_NAME, AwsLambdaLookup) register_lookup_handler(AwsLambdaLookup.Code.TYPE_NAME, AwsLambdaLookup.Code) -register_lookup_handler( - AwsLambdaLookup.CodeSha256.TYPE_NAME, AwsLambdaLookup.CodeSha256 -) +register_lookup_handler(AwsLambdaLookup.CodeSha256.TYPE_NAME, AwsLambdaLookup.CodeSha256) register_lookup_handler( AwsLambdaLookup.CompatibleArchitectures.TYPE_NAME, AwsLambdaLookup.CompatibleArchitectures, @@ -93,15 +91,11 @@ def unregister_lookup_handler(lookup_type: str) -> None: AwsLambdaLookup.CompatibleRuntimes.TYPE_NAME, AwsLambdaLookup.CompatibleRuntimes ) register_lookup_handler(AwsLambdaLookup.Content.TYPE_NAME, AwsLambdaLookup.Content) -register_lookup_handler( - AwsLambdaLookup.LicenseInfo.TYPE_NAME, AwsLambdaLookup.LicenseInfo -) +register_lookup_handler(AwsLambdaLookup.LicenseInfo.TYPE_NAME, AwsLambdaLookup.LicenseInfo) register_lookup_handler(AwsLambdaLookup.Runtime.TYPE_NAME, AwsLambdaLookup.Runtime) register_lookup_handler(AwsLambdaLookup.S3Bucket.TYPE_NAME, AwsLambdaLookup.S3Bucket) register_lookup_handler(AwsLambdaLookup.S3Key.TYPE_NAME, AwsLambdaLookup.S3Key) -register_lookup_handler( - AwsLambdaLookup.S3ObjectVersion.TYPE_NAME, AwsLambdaLookup.S3ObjectVersion -) +register_lookup_handler(AwsLambdaLookup.S3ObjectVersion.TYPE_NAME, AwsLambdaLookup.S3ObjectVersion) register_lookup_handler(CfnLookup.TYPE_NAME, CfnLookup) register_lookup_handler(DefaultLookup.TYPE_NAME, DefaultLookup) register_lookup_handler(DynamodbLookup.TYPE_NAME, DynamodbLookup) diff --git a/runway/cfngin/plan.py b/runway/cfngin/plan.py index c5f7c059e..c32d8aa87 100644 --- a/runway/cfngin/plan.py +++ b/runway/cfngin/plan.py @@ -4,43 +4,24 @@ import json import logging -import os import threading import time import uuid -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - List, - NoReturn, - Optional, - OrderedDict, - Set, - TypeVar, - Union, - overload, -) +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, NoReturn, TypeVar, overload from .._logging import LogLevels, PrefixAdaptor from ..utils import merge_dicts from .dag import DAG, DAGValidationError, walk from .exceptions import CancelExecution, GraphError, PersistentGraphLocked, PlanFailed from .stack import Stack -from .status import ( - COMPLETE, - FAILED, - PENDING, - SKIPPED, - SUBMITTED, - FailedStatus, - SkippedStatus, -) +from .status import COMPLETE, FAILED, PENDING, SKIPPED, SUBMITTED, FailedStatus, SkippedStatus from .ui import ui from .utils import stack_template_key_name if TYPE_CHECKING: + from collections import OrderedDict + from ..context import CfnginContext from .providers.aws.default import Provider from .status import Status @@ -51,14 +32,14 @@ @overload -def json_serial(obj: Set[_T]) -> List[_T]: ... +def json_serial(obj: set[_T]) -> list[_T]: ... @overload -def json_serial(obj: Union[Dict[Any, Any], int, List[Any], str]) -> NoReturn: ... +def json_serial(obj: dict[Any, Any] | int | list[Any] | str) -> NoReturn: ... -def json_serial(obj: Union[Set[Any], Any]) -> Any: +def json_serial(obj: set[Any] | Any) -> Any: """Serialize json. Args: @@ -82,10 +63,7 @@ def merge_graphs(graph1: Graph, graph2: Graph) -> Graph: """ merged_graph_dict = merge_dicts(graph1.to_dict().copy(), graph2.to_dict()) - steps = [ - graph1.steps.get(name, graph2.steps.get(name)) - for name in merged_graph_dict.keys() - ] + steps = [graph1.steps.get(name, graph2.steps.get(name)) for name in merged_graph_dict] return Graph.from_steps([step for step in steps if step]) @@ -103,19 +81,19 @@ class Step: """ - fn: Optional[Callable[..., Any]] + fn: Callable[..., Any] | None last_updated: float logger: PrefixAdaptor stack: Stack status: Status - watch_func: Optional[Callable[..., Any]] + watch_func: Callable[..., Any] | None def __init__( self, stack: Stack, *, - fn: Optional[Callable[..., Any]] = None, - watch_func: Optional[Callable[..., Any]] = None, + fn: Callable[..., Any] | None = None, + watch_func: Callable[..., Any] | None = None, ) -> None: """Instantiate class. @@ -139,9 +117,7 @@ def run(self) -> bool: stop_watcher = threading.Event() watcher = None if self.watch_func: - watcher = threading.Thread( - target=self.watch_func, args=(self.stack, stop_watcher) - ) + watcher = threading.Thread(target=self.watch_func, args=(self.stack, stop_watcher)) watcher.start() try: @@ -161,7 +137,7 @@ def _run_once(self) -> Status: status = self.fn(self.stack, status=self.status) except CancelExecution: status = SkippedStatus("canceled execution") - except Exception as err: # pylint: disable=broad-except + except Exception as err: LOGGER.exception(err) status = FailedStatus(reason=str(err)) self.set_status(status) @@ -177,12 +153,12 @@ def name(self) -> str: return self.stack.name @property - def requires(self) -> Set[str]: + def requires(self) -> set[str]: """Return a list of step names this step depends on.""" return self.stack.requires @property - def required_by(self) -> Set[str]: + def required_by(self) -> set[str]: """Return a list of step names that depend on this step.""" return self.stack.required_by @@ -265,9 +241,9 @@ def from_stack_name( cls, stack_name: str, context: CfnginContext, - requires: Optional[Union[List[str], Set[str]]] = None, - fn: Optional[Callable[..., Status]] = None, - watch_func: Optional[Callable[..., Any]] = None, + requires: list[str] | set[str] | None = None, + fn: Callable[..., Status] | None = None, + watch_func: Callable[..., Any] | None = None, ) -> Step: """Create a step using only a stack name. @@ -282,10 +258,9 @@ def from_stack_name( step action. """ - # pylint: disable=import-outside-toplevel from runway.config.models.cfngin import CfnginStackDefinitionModel - stack_def = CfnginStackDefinitionModel.construct( + stack_def = CfnginStackDefinitionModel.model_construct( name=stack_name, requires=requires or [] ) stack = Stack(stack_def, context) @@ -294,13 +269,11 @@ def from_stack_name( @classmethod def from_persistent_graph( cls, - graph_dict: Union[ - Dict[str, List[str]], Dict[str, Set[str]], OrderedDict[str, Set[str]] - ], + graph_dict: dict[str, list[str]] | dict[str, set[str]] | OrderedDict[str, set[str]], context: CfnginContext, - fn: Optional[Callable[..., Status]] = None, - watch_func: Optional[Callable[..., Any]] = None, - ) -> List[Step]: + fn: Callable[..., Status] | None = None, + watch_func: Callable[..., Any] | None = None, + ) -> list[Step]: """Create a steps for a persistent graph dict. Args: @@ -347,11 +320,9 @@ class Graph: """ dag: DAG - steps: Dict[str, Step] + steps: dict[str, Step] - def __init__( - self, steps: Optional[Dict[str, Step]] = None, dag: Optional[DAG] = None - ) -> None: + def __init__(self, steps: dict[str, Step] | None = None, dag: DAG | None = None) -> None: """Instantiate class. Args: @@ -423,7 +394,7 @@ def add_step_if_not_exists( except GraphError: continue - def add_steps(self, steps: List[Step]) -> None: + def add_steps(self, steps: list[Step]) -> None: """Add a list of steps. Args: @@ -501,7 +472,7 @@ def fn(step_name: str) -> Any: return walker(self.dag, fn) - def downstream(self, step_name: str) -> List[Step]: + def downstream(self, step_name: str) -> list[Step]: """Return the direct dependencies of the given step.""" return [self.steps[dep] for dep in self.dag.downstream(step_name)] @@ -513,7 +484,7 @@ def transposed(self) -> Graph: """ return Graph(steps=self.steps, dag=self.dag.transpose()) - def filtered(self, step_names: List[str]) -> Graph: + def filtered(self, step_names: list[str]) -> Graph: """Return a "filtered" version of this graph. Args: @@ -522,16 +493,16 @@ def filtered(self, step_names: List[str]) -> Graph: """ return Graph(steps=self.steps, dag=self.dag.filter(step_names)) - def topological_sort(self) -> List[Step]: + def topological_sort(self) -> list[Step]: """Perform a topological sort of the underlying DAG.""" nodes = self.dag.topological_sort() return [self.steps[step_name] for step_name in nodes] - def to_dict(self) -> OrderedDict[str, Set[str]]: + def to_dict(self) -> OrderedDict[str, set[str]]: """Return the underlying DAG as a dictionary.""" return self.dag.graph - def dumps(self, indent: Optional[int] = None) -> str: + def dumps(self, indent: int | None = None) -> str: """Output the graph as a json serialized string for storage. Args: @@ -543,9 +514,7 @@ def dumps(self, indent: Optional[int] = None) -> str: @classmethod def from_dict( cls, - graph_dict: Union[ - Dict[str, List[str]], Dict[str, Set[str]], OrderedDict[str, Set[str]] - ], + graph_dict: dict[str, list[str]] | dict[str, set[str]] | OrderedDict[str, set[str]], context: CfnginContext, ) -> Graph: """Create a Graph from a graph dict. @@ -558,7 +527,7 @@ def from_dict( return cls.from_steps(Step.from_persistent_graph(graph_dict, context)) @classmethod - def from_steps(cls, steps: List[Step]) -> Graph: + def from_steps(cls, steps: list[Step]) -> Graph: """Create a Graph from Steps. Args: @@ -588,7 +557,7 @@ class Plan: """ - context: Optional[CfnginContext] + context: CfnginContext | None description: str graph: Graph id: uuid.UUID @@ -599,7 +568,7 @@ def __init__( self, description: str, graph: Graph, - context: Optional[CfnginContext] = None, + context: CfnginContext | None = None, reverse: bool = False, require_unlocked: bool = True, ) -> None: @@ -627,11 +596,7 @@ def __init__( self.locked = self.context.persistent_graph_locked if self.context.stack_names: - nodes = [ - target - for target in self.context.stack_names - if graph.steps.get(target) - ] + nodes = [target for target in self.context.stack_names if graph.steps.get(target)] graph = graph.filtered(nodes) else: @@ -639,7 +604,7 @@ def __init__( self.graph = graph - def outline(self, level: int = logging.INFO, message: str = ""): + def outline(self, level: int = logging.INFO, message: str = "") -> None: """Print an outline of the actions the plan is going to take. The outline will represent the rough ordering of the steps that will be @@ -669,7 +634,7 @@ def dump( *, directory: str, context: CfnginContext, - provider: Optional[Provider] = None, + provider: Provider | None = None, ) -> Any: """Output the rendered blueprint for all stacks in the plan. @@ -680,30 +645,26 @@ def dump( """ LOGGER.info('dumping "%s"...', self.description) - directory = os.path.expanduser(directory) - if not os.path.exists(directory): - os.makedirs(directory) + dir_path = Path(directory).expanduser() + dir_path.mkdir(exist_ok=True, parents=True) def walk_func(step: Step) -> bool: """Walk function.""" step.stack.resolve(context=context, provider=provider) blueprint = step.stack.blueprint filename = stack_template_key_name(blueprint) - path = os.path.join(directory, filename) - - blueprint_dir = os.path.dirname(path) - if not os.path.exists(blueprint_dir): - os.makedirs(blueprint_dir) + path = dir_path / filename + path.parent.mkdir(exist_ok=True, parents=True) LOGGER.info('writing stack "%s" -> %s', step.name, path) - with open(path, "w", encoding="utf-8") as _file: + with Path(path).open("w", encoding="utf-8") as _file: _file.write(blueprint.rendered) return True return self.graph.walk(walk, walk_func) - def execute(self, *args: Any, **kwargs: Any): + def execute(self, *args: Any, **kwargs: Any) -> None: """Walk each step in the underlying graph. Raises: @@ -752,15 +713,12 @@ def walk_func(step: Step) -> bool: return result if step.completed or ( - step.skipped - and step.status.reason == ("does not exist in cloudformation") + step.skipped and step.status.reason == ("does not exist in cloudformation") ): fn_name = step.fn.__name__ if callable(step.fn) else step.fn if fn_name == "_destroy_stack": self.context.persistent_graph.pop(step) - LOGGER.debug( - "removed step '%s' from the persistent graph", step.name - ) + LOGGER.debug("removed step '%s' from the persistent graph", step.name) elif fn_name == "_launch_stack": self.context.persistent_graph.add_step_if_not_exists( step, add_dependencies=True, add_dependents=True @@ -779,17 +737,17 @@ def lock_code(self) -> str: return str(self.id) @property - def steps(self) -> List[Step]: + def steps(self) -> list[Step]: """Return a list of all steps in the plan.""" steps = self.graph.topological_sort() steps.reverse() return steps @property - def step_names(self) -> List[str]: + def step_names(self) -> list[str]: """Return a list of all step names.""" return [step.name for step in self.steps] - def keys(self) -> List[str]: + def keys(self) -> list[str]: """Return a list of all step names.""" return self.step_names diff --git a/runway/cfngin/providers/aws/default.py b/runway/cfngin/providers/aws/default.py index b91e707e5..9c36a05d0 100644 --- a/runway/cfngin/providers/aws/default.py +++ b/runway/cfngin/providers/aws/default.py @@ -1,24 +1,19 @@ """Default AWS Provider.""" -# pylint: disable=too-many-lines,too-many-public-methods from __future__ import annotations +import functools import json import logging +import operator import sys import threading import time +from collections.abc import Iterable from typing import ( TYPE_CHECKING, Any, Callable, - Dict, - Iterable, - List, - Optional, - Set, - Tuple, - Union, cast, ) from urllib.parse import urlparse, urlunparse @@ -88,7 +83,7 @@ def get_cloudformation_client(session: boto3.Session) -> CloudFormationClient: return session.client("cloudformation", config=config) -def get_output_dict(stack: StackTypeDef) -> Dict[str, str]: +def get_output_dict(stack: StackTypeDef) -> dict[str, str]: """Return a dict of key/values for the outputs for a given CF stack. Args: @@ -112,11 +107,11 @@ def get_output_dict(stack: StackTypeDef) -> Dict[str, str]: def s3_fallback( fqn: str, template: Template, - parameters: List[ParameterTypeDef], - tags: List[TagTypeDef], + parameters: list[ParameterTypeDef], + tags: list[TagTypeDef], method: Callable[..., Any], - change_set_name: Optional[str] = None, - service_role: Optional[str] = None, + change_set_name: str | None = None, + service_role: str | None = None, ) -> Any: """Falling back to legacy CFNgin S3 bucket region for templates.""" LOGGER.warning( @@ -132,7 +127,9 @@ def s3_fallback( template_url = template.url if template_url: template_url_parsed = urlparse(template_url) - template_url_parsed = template_url_parsed._replace(netloc="s3.amazonaws.com") + template_url_parsed = template_url_parsed._replace( # noqa: SLF001 + netloc="s3.amazonaws.com" + ) template_url = urlunparse(template_url_parsed) LOGGER.debug("using template_url: %s", template_url) args = generate_cloudformation_args( @@ -160,25 +157,21 @@ def get_change_set_name() -> str: return f"change-set-{int(time.time())}" -def requires_replacement(changeset: List[ChangeTypeDef]) -> List[ChangeTypeDef]: +def requires_replacement(changeset: list[ChangeTypeDef]) -> list[ChangeTypeDef]: """Return the changes within the changeset that require replacement. Args: changeset: List of changes """ - return [ - r - for r in changeset - if r.get("ResourceChange", {}).get("Replacement", False) == "True" - ] + return [r for r in changeset if r.get("ResourceChange", {}).get("Replacement", False) == "True"] def output_full_changeset( - full_changeset: Optional[List[ChangeTypeDef]] = None, - params_diff: Optional[List[DictValue[Any, Any]]] = None, - answer: Optional[str] = None, - fqn: Optional[str] = None, + full_changeset: list[ChangeTypeDef] | None = None, + params_diff: list[DictValue[Any, Any]] | None = None, + answer: str | None = None, + fqn: str | None = None, ) -> None: """Optionally output full changeset. @@ -211,10 +204,10 @@ def output_full_changeset( def ask_for_approval( - full_changeset: Optional[List[ChangeTypeDef]] = None, - params_diff: Optional[List[DictValue[Any, Any]]] = None, + full_changeset: list[ChangeTypeDef] | None = None, + params_diff: list[DictValue[Any, Any]] | None = None, include_verbose: bool = False, - fqn: Optional[str] = None, + fqn: str | None = None, ) -> None: """Prompt the user for approval to execute a change set. @@ -234,9 +227,7 @@ def ask_for_approval( if include_verbose: approval_options.append("v") - approve = ui.ask( - f"Execute the above changes? [{'/'.join(approval_options)}] " - ).lower() + approve = ui.ask(f"Execute the above changes? [{'/'.join(approval_options)}] ").lower() if include_verbose and approve == "v": output_full_changeset( @@ -254,8 +245,8 @@ def ask_for_approval( def output_summary( fqn: str, action: str, - changeset: List[ChangeTypeDef], - params_diff: List[DictValue[Any, Any]], + changeset: list[ChangeTypeDef], + params_diff: list[DictValue[Any, Any]], replacements_only: bool = False, ) -> None: """Log a summary of the changeset. @@ -271,8 +262,8 @@ def output_summary( replacements. """ - replacements: List[Any] = [] - changes: List[Any] = [] + replacements: list[Any] = [] + changes: list[Any] = [] for change in changeset: resource = change.get("ResourceChange", {}) replacement = resource.get("Replacement", "") == "True" @@ -299,12 +290,12 @@ def output_summary( LOGGER.info("%s %s:\n%s", fqn, action, summary) -def format_params_diff(params_diff: List[DictValue[Any, Any]]) -> str: +def format_params_diff(params_diff: list[DictValue[Any, Any]]) -> str: """Wrap :func:`runway.cfngin.actions.diff.format_params_diff` for testing.""" return format_diff(params_diff) -def summarize_params_diff(params_diff: List[DictValue[Any, Any]]) -> str: +def summarize_params_diff(params_diff: list[DictValue[Any, Any]]) -> str: """Summarize parameter diff.""" summary = "" @@ -366,15 +357,13 @@ def create_change_set( cfn_client: CloudFormationClient, fqn: str, template: Template, - parameters: List[ParameterTypeDef], - tags: List[TagTypeDef], + parameters: list[ParameterTypeDef], + tags: list[TagTypeDef], change_set_type: str = "UPDATE", - service_role: Optional[str] = None, -) -> Tuple[List[ChangeTypeDef], str]: + service_role: str | None = None, +) -> tuple[list[ChangeTypeDef], str]: """Create CloudFormation change set.""" - LOGGER.debug( - "attempting to create change set of type %s for stack: %s", change_set_type, fqn - ) + LOGGER.debug("attempting to create change set of type %s for stack: %s", change_set_type, fqn) args = generate_cloudformation_args( fqn, parameters, @@ -410,20 +399,16 @@ def create_change_set( "didn't contain changes" in status_reason or "No updates are to be performed" in status_reason ): - LOGGER.debug( - "%s:stack did not change; not updating and removing changeset", fqn - ) + LOGGER.debug("%s:stack did not change; not updating and removing changeset", fqn) cfn_client.delete_change_set(ChangeSetName=change_set_id) - raise exceptions.StackDidNotChange() + raise exceptions.StackDidNotChange LOGGER.warning( "got strange status, '%s' for changeset '%s'; not deleting for " "further investigation - you will need to delete the changeset manually", status, change_set_id, ) - raise exceptions.UnhandledChangeSetStatus( - fqn, change_set_id, status, status_reason - ) + raise exceptions.UnhandledChangeSetStatus(fqn, change_set_id, status, status_reason) execution_status = response["ExecutionStatus"] if execution_status != "AVAILABLE": @@ -433,7 +418,7 @@ def create_change_set( return changes, change_set_id -def check_tags_contain(actual: List[TagTypeDef], expected: List[TagTypeDef]) -> bool: +def check_tags_contain(actual: list[TagTypeDef], expected: list[TagTypeDef]) -> bool: """Check if a set of AWS resource tags is contained in another. Every tag key in ``expected`` must be present in ``actual``, and have the @@ -455,15 +440,15 @@ def check_tags_contain(actual: List[TagTypeDef], expected: List[TagTypeDef]) -> def generate_cloudformation_args( stack_name: str, - parameters: List[ParameterTypeDef], - tags: List[TagTypeDef], + parameters: list[ParameterTypeDef], + tags: list[TagTypeDef], template: Template, - capabilities: Optional[List[str]] = None, - change_set_type: Optional[str] = None, - service_role: Optional[str] = None, - stack_policy: Optional[Template] = None, - change_set_name: Optional[str] = None, -) -> Dict[str, Any]: + capabilities: list[str] | None = None, + change_set_type: str | None = None, + service_role: str | None = None, + stack_policy: Template | None = None, + change_set_name: str | None = None, +) -> dict[str, Any]: """Generate the args for common CloudFormation API interactions. This is used for ``create_stack``/``update_stack``/``create_change_set`` @@ -505,9 +490,7 @@ def generate_cloudformation_args( elif template.body: args["TemplateBody"] = template.body else: - raise ValueError( - "either template.body or template.url is required; neither were provided" - ) + raise ValueError("either template.body or template.url is required; neither were provided") # When creating args for CreateChangeSet, don't include the stack policy, # since ChangeSets don't support it. @@ -518,15 +501,15 @@ def generate_cloudformation_args( def generate_stack_policy_args( - stack_policy: Optional[Template] = None, -) -> Dict[str, str]: + stack_policy: Template | None = None, +) -> dict[str, str]: """Convert a stack policy object into keyword args. Args: stack_policy: A template object representing a stack policy. """ - args: Dict[str, str] = {} + args: dict[str, str] = {} if stack_policy: LOGGER.debug("stack has a stack policy") if stack_policy.url: @@ -544,21 +527,19 @@ def generate_stack_policy_args( class ProviderBuilder: """Implements a Memorized ProviderBuilder for the AWS provider.""" - kwargs: Dict[str, Any] + kwargs: dict[str, Any] lock: threading.Lock - providers: Dict[str, Provider] - region: Optional[str] + providers: dict[str, Provider] + region: str | None - def __init__(self, *, region: Optional[str] = None, **kwargs: Any) -> None: + def __init__(self, *, region: str | None = None, **kwargs: Any) -> None: """Instantiate class.""" self.region = region self.kwargs = kwargs self.providers = {} self.lock = threading.Lock() - def build( - self, *, profile: Optional[str] = None, region: Optional[str] = None - ) -> Provider: + def build(self, *, profile: str | None = None, region: str | None = None) -> Provider: """Get or create the provider for the given region and profile.""" with self.lock: # memorization lookup key derived from region + profile. @@ -567,9 +548,7 @@ def build( # assume provider is in provider dictionary. provider = self.providers[key] except KeyError: - LOGGER.debug( - "missed memorized lookup (%s); creating new AWS provider", key - ) + LOGGER.debug("missed memorized lookup (%s); creating new AWS provider", key) if not region: region = self.region # memoize the result for later. @@ -627,9 +606,9 @@ class Provider(BaseProvider): cloudformation: CloudFormationClient interactive: bool recreate_failed: bool - region: Optional[str] + region: str | None replacements_only: bool - service_role: Optional[str] + service_role: str | None def __init__( self, @@ -637,12 +616,12 @@ def __init__( *, interactive: bool = False, recreate_failed: bool = False, - region: Optional[str] = None, + region: str | None = None, replacements_only: bool = False, - service_role: Optional[str] = None, - ): + service_role: str | None = None, + ) -> None: """Instantiate class.""" - self._outputs: Dict[str, Dict[str, str]] = {} + self._outputs: dict[str, dict[str, str]] = {} self.cloudformation = get_cloudformation_client(session) self.interactive = interactive self.recreate_failed = interactive or recreate_failed @@ -654,13 +633,11 @@ def __init__( def get_stack(self, stack_name: str, *_args: Any, **_kwargs: Any) -> StackTypeDef: """Get stack.""" try: - return self.cloudformation.describe_stacks(StackName=stack_name)["Stacks"][ - 0 - ] + return self.cloudformation.describe_stacks(StackName=stack_name)["Stacks"][0] except botocore.exceptions.ClientError as err: if "does not exist" not in str(err): raise - raise exceptions.StackDoesNotExist(stack_name) + raise exceptions.StackDoesNotExist(stack_name) from None @staticmethod def get_stack_status(stack: StackTypeDef, *_args: Any, **_kwargs: Any) -> str: @@ -668,7 +645,7 @@ def get_stack_status(stack: StackTypeDef, *_args: Any, **_kwargs: Any) -> str: return stack["StackStatus"] @staticmethod - def get_stack_status_reason(stack: StackTypeDef) -> Optional[str]: + def get_stack_status_reason(stack: StackTypeDef) -> str | None: """Get stack status reason.""" return stack.get("StackStatusReason") @@ -712,9 +689,9 @@ def tail_stack( self, stack: Stack, cancel: threading.Event, - action: Optional[str] = None, - log_func: Optional[Callable[[StackEventTypeDef], None]] = None, - retries: Optional[int] = None, + action: str | None = None, + log_func: Callable[[StackEventTypeDef], None] | None = None, + retries: int | None = None, ) -> None: """Tail the events of a stack.""" @@ -740,19 +717,13 @@ def _log_func(event: StackEventTypeDef) -> None: while True: attempts += 1 try: - self.tail( - stack.fqn, cancel=cancel, log_func=log_func, include_initial=False - ) + self.tail(stack.fqn, cancel=cancel, log_func=log_func, include_initial=False) break except botocore.exceptions.ClientError as err: if "does not exist" in str(err): - LOGGER.debug( - "%s:unable to tail stack; it does not exist", stack.fqn - ) + LOGGER.debug("%s:unable to tail stack; it does not exist", stack.fqn) if action == "destroy": - LOGGER.debug( - "%s:stack was deleted before it could be tailed", stack.fqn - ) + LOGGER.debug("%s:stack was deleted before it could be tailed", stack.fqn) return if attempts < retries: # stack might be in the process of launching, wait for a @@ -768,7 +739,7 @@ def _tail_print(event: StackEventTypeDef) -> None: f'{event.get("ResourceStatus")} {event.get("ResourceType")} {event.get("EventId")}' ) - def get_delete_failed_status_reason(self, stack_name: str) -> Optional[str]: + def get_delete_failed_status_reason(self, stack_name: str) -> str | None: """Process events and return latest delete failed reason. Args: @@ -778,17 +749,14 @@ def get_delete_failed_status_reason(self, stack_name: str) -> Optional[str]: Reason for the Stack's DELETE_FAILED status if one can be found. """ - event: Union[Dict[str, str], StackEventTypeDef] = ( - self.get_event_by_resource_status( - stack_name, "DELETE_FAILED", chronological=True - ) - or {} + event: dict[str, str] | StackEventTypeDef = ( + self.get_event_by_resource_status(stack_name, "DELETE_FAILED", chronological=True) or {} ) return event.get("ResourceStatusReason") def get_event_by_resource_status( self, stack_name: str, status: str, *, chronological: bool = True - ) -> Optional[StackEventTypeDef]: + ) -> StackEventTypeDef | None: """Get Stack Event of a given set of resource status. Args: @@ -815,7 +783,7 @@ def get_events( ) -> Iterable[StackEventTypeDef]: """Get the events in batches and return in chronological order.""" next_token = None - event_list: List[List[StackEventTypeDef]] = [] + event_list: list[list[StackEventTypeDef]] = [] while True: if next_token is not None: events = self.cloudformation.describe_stack_events( @@ -832,12 +800,12 @@ def get_events( return cast( Iterable["StackEventTypeDef"], reversed( - cast(List["StackEventTypeDef"], sum(event_list, [])) # type: ignore + cast("list[StackEventTypeDef]", functools.reduce(operator.iadd, event_list, [])) ), ) - return cast(Iterable["StackEventTypeDef"], sum(event_list, [])) # type: ignore + return cast(Iterable["StackEventTypeDef"], functools.reduce(operator.iadd, event_list, [])) - def get_rollback_status_reason(self, stack_name: str) -> Optional[str]: + def get_rollback_status_reason(self, stack_name: str) -> str | None: """Process events and returns latest roll back reason. Args: @@ -847,7 +815,7 @@ def get_rollback_status_reason(self, stack_name: str) -> Optional[str]: Reason for the Stack's rollback status if one can be found. """ - event: Union[Dict[str, str], StackEventTypeDef] = ( + event: dict[str, str] | StackEventTypeDef = ( self.get_event_by_resource_status( stack_name, "UPDATE_ROLLBACK_IN_PROGRESS", chronological=False ) @@ -869,7 +837,7 @@ def tail( """Show and then tail the event log.""" # First dump the full list of events in chronological order and keep # track of the events we've seen already - seen: Set[str] = set() + seen: set[str] = set() initial_events = self.get_events(stack_name) for event in initial_events: if include_initial: @@ -891,7 +859,7 @@ def destroy_stack( stack: StackTypeDef, *, action: str = "destroy", - approval: Optional[str] = None, + approval: str | None = None, force_interactive: bool = False, **kwargs: Any, ) -> None: @@ -902,15 +870,14 @@ def destroy_stack( action: Name of the action being executed. This impacts the log message used. approval: Response to approval prompt. force_interactive: Always ask for approval. + **kwargs: Arbitrary keyword arguments. """ fqn = self.get_stack_name(stack) LOGGER.debug("%s:attempting to delete stack", fqn) if action == "deploy": - LOGGER.info( - "%s:removed from the CFNgin config file; it is being destroyed", fqn - ) + LOGGER.info("%s:removed from the CFNgin config file; it is being destroyed", fqn) destroy_method = self.select_destroy_method(force_interactive) return destroy_method(fqn=fqn, action=action, approval=approval, **kwargs) @@ -919,13 +886,13 @@ def create_stack( self, fqn: str, template: Template, - parameters: List[ParameterTypeDef], - tags: List[TagTypeDef], + parameters: list[ParameterTypeDef], + tags: list[TagTypeDef], *, force_change_set: bool = False, - stack_policy: Optional[Template] = None, + stack_policy: Template | None = None, termination_protection: bool = False, - timeout: Optional[int] = None, + timeout: int | None = None, **kwargs: Any, ) -> None: """Create a new Cloudformation stack. @@ -943,14 +910,13 @@ def create_stack( protection. timeout: The amount of time that can pass before the stack status becomes ``CREATE_FAILED``. + **kwargs: Arbitrary keyword arguments. """ LOGGER.debug( "attempting to create stack %s: %s", fqn, - json.dumps( - {"parameters": parameters, "tags": tags, "template_url": template.url} - ), + json.dumps({"parameters": parameters, "tags": tags, "template_url": template.url}), ) if not template.url: LOGGER.debug("no template url; uploading template directly") @@ -988,8 +954,7 @@ def create_stack( self.cloudformation.create_stack(**args) except botocore.exceptions.ClientError as err: if err.response["Error"]["Message"] == ( - "TemplateURL must reference a valid S3 object to which you " - "have access." + "TemplateURL must reference a valid S3 object to which you have access." ): s3_fallback( fqn, @@ -1022,9 +987,7 @@ def select_update_method( return self.noninteractive_changeset_update return self.default_update_stack - def prepare_stack_for_update( - self, stack: StackTypeDef, tags: List[TagTypeDef] - ) -> bool: + def prepare_stack_for_update(self, stack: StackTypeDef, tags: list[TagTypeDef]) -> bool: """Prepare a stack for updating. It may involve deleting the stack if is has failed it's initial @@ -1076,8 +1039,7 @@ def prepare_stack_for_update( raise exceptions.StackUpdateBadStatus( stack_name, stack_status, - "Tags differ from current configuration, possibly not created " - "with CFNgin", + "Tags differ from current configuration, possibly not created with CFNgin", ) if self.interactive: @@ -1100,12 +1062,12 @@ def update_stack( self, fqn: str, template: Template, - old_parameters: List[ParameterTypeDef], - parameters: List[ParameterTypeDef], - tags: List[TagTypeDef], + old_parameters: list[ParameterTypeDef], + parameters: list[ParameterTypeDef], + tags: list[TagTypeDef], force_interactive: bool = False, force_change_set: bool = False, - stack_policy: Optional[Template] = None, + stack_policy: Template | None = None, termination_protection: bool = False, **kwargs: Any, ) -> None: @@ -1128,14 +1090,13 @@ def update_stack( executed with a change set. stack_policy: A template object representing a stack policy. termination_protection: End state of the stack's termination protection. + **kwargs: Arbitrary keyword arguments. """ LOGGER.debug( "attempting to update stack %s: %s", fqn, - json.dumps( - {"parameters": parameters, "tags": tags, "template_url": template.url} - ), + json.dumps({"parameters": parameters, "tags": tags, "template_url": template.url}), ) if not template.url: LOGGER.debug("no template url; uploading template directly") @@ -1152,9 +1113,7 @@ def update_stack( **kwargs, ) - def update_termination_protection( - self, fqn: str, termination_protection: bool - ) -> None: + def update_termination_protection(self, fqn: str, termination_protection: bool) -> None: """Update a Stack's termination protection if needed. Runs before the normal stack update process. @@ -1177,7 +1136,7 @@ def update_termination_protection( ) def deal_with_changeset_stack_policy( - self, fqn: str, stack_policy: Optional[Template] = None + self, fqn: str, stack_policy: Template | None = None ) -> None: """Set a stack policy when using changesets. @@ -1197,13 +1156,14 @@ def deal_with_changeset_stack_policy( self.cloudformation.set_stack_policy(**kwargs) def interactive_destroy_stack( - self, fqn: str, approval: Optional[str] = None, **kwargs: Any + self, fqn: str, approval: str | None = None, **kwargs: Any ) -> None: """Delete a CloudFormation stack in interactive mode. Args: fqn: A fully qualified stack name. approval: Response to approval prompt. + **kwargs: Arbitrary keyword arguments. """ LOGGER.debug("%s:using interactive provider mode", fqn) @@ -1242,10 +1202,10 @@ def interactive_update_stack( self, fqn: str, template: Template, - old_parameters: List[ParameterTypeDef], - parameters: List[ParameterTypeDef], + old_parameters: list[ParameterTypeDef], + parameters: list[ParameterTypeDef], stack_policy: Template, - tags: List[TagTypeDef], + tags: list[TagTypeDef], ) -> None: """Update a Cloudformation stack in interactive mode. @@ -1279,9 +1239,7 @@ def interactive_update_stack( if "ParameterValue" in x else { "ParameterKey": x["ParameterKey"], # type: ignore - "ParameterValue": old_parameters_as_dict[ - x["ParameterKey"] # type: ignore - ], + "ParameterValue": old_parameters_as_dict[x["ParameterKey"]], # type: ignore } ) for x in parameters @@ -1326,16 +1284,16 @@ def noninteractive_destroy_stack(self, fqn: str, **_kwargs: Any) -> None: if self.service_role: args["RoleARN"] = self.service_role - self.cloudformation.delete_stack(**args) + self.cloudformation.delete_stack(**args) # pyright: ignore[reportArgumentType] - def noninteractive_changeset_update( # pylint: disable=unused-argument + def noninteractive_changeset_update( self, fqn: str, template: Template, - old_parameters: List[ParameterTypeDef], - parameters: List[ParameterTypeDef], - stack_policy: Optional[Template], - tags: List[TagTypeDef], + old_parameters: list[ParameterTypeDef], # noqa: ARG002 + parameters: list[ParameterTypeDef], + stack_policy: Template | None, + tags: list[TagTypeDef], ) -> None: """Update a Cloudformation stack using a change set. @@ -1383,14 +1341,14 @@ def select_destroy_method(self, force_interactive: bool) -> Callable[..., None]: return self.interactive_destroy_stack return self.noninteractive_destroy_stack - def default_update_stack( # pylint: disable=unused-argument + def default_update_stack( self, fqn: str, template: Template, - old_parameters: List[ParameterTypeDef], - parameters: List[ParameterTypeDef], - tags: List[TagTypeDef], - stack_policy: Optional[Template] = None, + old_parameters: list[ParameterTypeDef], # noqa: ARG002 + parameters: list[ParameterTypeDef], + tags: list[TagTypeDef], + stack_policy: Template | None = None, ) -> None: """Update a Cloudformation stack in default mode. @@ -1421,7 +1379,7 @@ def default_update_stack( # pylint: disable=unused-argument except botocore.exceptions.ClientError as err: if "No updates are to be performed." in str(err): LOGGER.debug("%s:stack did not change; not updating", fqn) - raise exceptions.StackDidNotChange + raise exceptions.StackDidNotChange from None if err.response["Error"]["Message"] == ( "TemplateURL must reference a valid S3 object to which you have access." ): @@ -1441,13 +1399,11 @@ def get_stack_name(stack: StackTypeDef) -> str: return stack["StackName"] @staticmethod - def get_stack_tags(stack: StackTypeDef) -> List[TagTypeDef]: + def get_stack_tags(stack: StackTypeDef) -> list[TagTypeDef]: """Get stack tags.""" return stack.get("Tags", []) - def get_outputs( - self, stack_name: str, *_args: Any, **_kwargs: Any - ) -> Dict[str, str]: + def get_outputs(self, stack_name: str, *_args: Any, **_kwargs: Any) -> dict[str, str]: """Get stack outputs.""" if not self._outputs.get(stack_name): stack = self.get_stack(stack_name) @@ -1455,40 +1411,36 @@ def get_outputs( return self._outputs[stack_name] @staticmethod - def get_output_dict(stack: StackTypeDef) -> Dict[str, str]: + def get_output_dict(stack: StackTypeDef) -> dict[str, str]: """Get stack outputs dict.""" return get_output_dict(stack) - def get_stack_info( - self, stack: StackTypeDef - ) -> Tuple[str, Dict[str, Union[List[str], str]]]: + def get_stack_info(self, stack: StackTypeDef) -> tuple[str, dict[str, list[str] | str]]: """Get the template and parameters of the stack currently in AWS.""" stack_name = stack.get("StackId", "None") try: - template = self.cloudformation.get_template(StackName=stack_name)[ - "TemplateBody" - ] + template = self.cloudformation.get_template(StackName=stack_name)["TemplateBody"] except botocore.exceptions.ClientError as err: if "does not exist" not in str(err): raise - raise exceptions.StackDoesNotExist(stack_name) + raise exceptions.StackDoesNotExist(stack_name) from None parameters = self.params_as_dict(stack.get("Parameters", [])) # handle yaml templates - if isinstance(template, str): # type: ignore + if isinstance(template, str): template = parse_cloudformation_template(template) return json.dumps(template, cls=JsonEncoder), parameters - def get_stack_changes( + def get_stack_changes( # noqa: C901, PLR0912 self, stack: Stack, template: Template, - parameters: List[ParameterTypeDef], - tags: List[TagTypeDef], - ) -> Dict[str, str]: + parameters: list[ParameterTypeDef], + tags: list[TagTypeDef], + ) -> dict[str, str]: """Get the changes from a ChangeSet. Args: @@ -1509,12 +1461,10 @@ def get_stack_changes( if self.get_stack_status(stack_details) == self.REVIEW_STATUS: raise exceptions.StackDoesNotExist(stack.fqn) old_template_raw, old_params = self.get_stack_info(stack_details) - old_template: Dict[str, Any] = parse_cloudformation_template( - old_template_raw - ) + old_template: dict[str, Any] = parse_cloudformation_template(old_template_raw) change_type = "UPDATE" except exceptions.StackDoesNotExist: - old_params: Dict[str, Union[List[str], str]] = {} + old_params: dict[str, list[str] | str] = {} old_template = {} change_type = "CREATE" @@ -1569,7 +1519,7 @@ def get_stack_changes( self.get_outputs(stack.fqn) # infer which outputs may have changed - refs_to_invalidate: List[str] = [] + refs_to_invalidate: list[str] = [] for change in changes: resc_change = change.get("ResourceChange", {}) if resc_change.get("Type") == "Add": @@ -1629,8 +1579,8 @@ def get_stack_changes( @staticmethod def params_as_dict( - parameters_list: List[ParameterTypeDef], - ) -> Dict[str, Union[List[str], str]]: + parameters_list: list[ParameterTypeDef], + ) -> dict[str, list[str] | str]: """Parameters as dict.""" return { param["ParameterKey"]: param["ParameterValue"] # type: ignore diff --git a/runway/cfngin/providers/base.py b/runway/cfngin/providers/base.py index 24b4d07eb..7007365bc 100644 --- a/runway/cfngin/providers/base.py +++ b/runway/cfngin/providers/base.py @@ -1,7 +1,8 @@ """Provider base class.""" -# pylint: disable=unused-argument -from typing import Any, Optional +from __future__ import annotations + +from typing import Any def not_implemented(method: str) -> None: @@ -12,7 +13,7 @@ def not_implemented(method: str) -> None: class BaseProviderBuilder: """ProviderBuilder base class.""" - def build(self, region: Optional[str] = None) -> Any: + def build(self, region: str | None = None) -> Any: # noqa: ARG002 """Abstract method.""" not_implemented("build") @@ -20,11 +21,11 @@ def build(self, region: Optional[str] = None) -> Any: class BaseProvider: """Provider base class.""" - def get_stack(self, stack_name: str, *args: Any, **kwargs: Any) -> Any: + def get_stack(self, stack_name: str, *_args: Any, **_kwargs: Any) -> Any: # noqa: ARG002 """Abstract method.""" not_implemented("get_stack") - def get_outputs(self, stack_name: str, *args: Any, **kwargs: Any) -> Any: + def get_outputs(self, stack_name: str, *_args: Any, **_kwargs: Any) -> Any: # noqa: ARG002 """Abstract method.""" not_implemented("get_outputs") @@ -42,7 +43,7 @@ class Template: """ - def __init__(self, url: Optional[str] = None, body: Optional[str] = None) -> None: + def __init__(self, url: str | None = None, body: str | None = None) -> None: """Instantiate class.""" self.url = url self.body = body diff --git a/runway/cfngin/session_cache.py b/runway/cfngin/session_cache.py index 48c9ef5f1..8484c73e5 100644 --- a/runway/cfngin/session_cache.py +++ b/runway/cfngin/session_cache.py @@ -1,7 +1,8 @@ """CFNgin session caching.""" +from __future__ import annotations + import logging -from typing import Optional import boto3 @@ -15,11 +16,11 @@ def get_session( - region: Optional[str] = None, - profile: Optional[str] = None, - access_key: Optional[str] = None, - secret_key: Optional[str] = None, - session_token: Optional[str] = None, + region: str | None = None, + profile: str | None = None, + access_key: str | None = None, + secret_key: str | None = None, + session_token: str | None = None, ) -> boto3.Session: """Create a thread-safe boto3 session. @@ -51,12 +52,12 @@ def get_session( aws_access_key_id=access_key, aws_secret_access_key=secret_key, aws_session_token=session_token, - botocore_session=Session(), # type: ignore + botocore_session=Session(), region_name=region, profile_name=profile, ) cred_provider = session._session.get_component("credential_provider") # type: ignore provider = cred_provider.get_provider("assume-role") # type: ignore provider.cache = BOTO3_CREDENTIAL_CACHE - provider._prompter = ui.getpass + provider._prompter = ui.getpass # noqa: SLF001 return session diff --git a/runway/cfngin/stack.py b/runway/cfngin/stack.py index a1b51381e..8e5dd6fc9 100644 --- a/runway/cfngin/stack.py +++ b/runway/cfngin/stack.py @@ -3,9 +3,7 @@ from __future__ import annotations from copy import deepcopy -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, cast - -from typing_extensions import Literal +from typing import TYPE_CHECKING, Any, cast from runway.utils import load_object_from_string from runway.variables import Variable, resolve_variables @@ -13,6 +11,8 @@ from .blueprints.raw import RawTemplateBlueprint if TYPE_CHECKING: + from typing_extensions import Literal + from ..config.models.cfngin import CfnginStackDefinitionModel from ..context import CfnginContext from .blueprints.base import Blueprint @@ -20,8 +20,8 @@ def _initialize_variables( - stack_def: CfnginStackDefinitionModel, variables: Optional[Dict[str, Any]] = None -) -> List[Variable]: + stack_def: CfnginStackDefinitionModel, variables: dict[str, Any] | None = None +) -> list[Variable]: """Convert defined variables into a list of ``Variable`` for consumption. Args: @@ -65,36 +65,36 @@ class Stack: """ - _blueprint: Optional[Blueprint] - _stack_policy: Optional[str] + _blueprint: Blueprint | None + _stack_policy: str | None context: CfnginContext definition: CfnginStackDefinitionModel enabled: bool force: bool fqn: str - in_progress_behavior: Optional[Literal["wait"]] + in_progress_behavior: Literal["wait"] | None locked: bool logging: bool - mappings: Dict[str, Dict[str, Dict[str, Any]]] + mappings: dict[str, dict[str, dict[str, Any]]] name: str - outputs: Dict[str, Any] + outputs: dict[str, Any] protected: bool termination_protection: bool - variables: List[Variable] + variables: list[Variable] def __init__( self, definition: CfnginStackDefinitionModel, context: CfnginContext, *, - variables: Optional[Dict[str, Any]] = None, - mappings: Dict[str, Dict[str, Dict[str, Any]]] = None, + variables: dict[str, Any] | None = None, + mappings: dict[str, dict[str, dict[str, Any]]] | None = None, locked: bool = False, force: bool = False, enabled: bool = True, protected: bool = False, - ): + ) -> None: """Instantiate class. Args: @@ -127,12 +127,12 @@ def __init__( self.variables = _initialize_variables(definition, variables) @property - def required_by(self) -> Set[str]: + def required_by(self) -> set[str]: """Return a list of stack names that depend on this stack.""" return set(self.definition.required_by) @property - def requires(self) -> Set[str]: + def requires(self) -> set[str]: """Return a list of stack names this stack depends on.""" requires = set(self.definition.requires or []) @@ -147,21 +147,17 @@ def requires(self) -> Set[str]: return requires @property - def stack_policy(self) -> Optional[str]: + def stack_policy(self) -> str | None: """Return the Stack Policy to use for this stack.""" - if not self._stack_policy: - self._stack_policy = None - if self.definition.stack_policy_path: - with open(self.definition.stack_policy_path, encoding="utf-8") as file_: - self._stack_policy = file_.read() - - return self._stack_policy + if self.definition.stack_policy_path: + return self.definition.stack_policy_path.read_text() or None + return None @property def blueprint(self) -> Blueprint: """Return the blueprint associated with this stack.""" if not self._blueprint: - kwargs: Dict[str, Any] = {} + kwargs: dict[str, Any] = {} if self.definition.class_path: class_path = self.definition.class_path blueprint_class = load_object_from_string(class_path) @@ -173,9 +169,7 @@ def blueprint(self) -> Blueprint: blueprint_class = RawTemplateBlueprint kwargs["raw_template_path"] = self.definition.template_path else: - raise AttributeError( - "Stack does not have a defined class or template path." - ) + raise AttributeError("Stack does not have a defined class or template path.") self._blueprint = cast( "Blueprint", @@ -190,7 +184,7 @@ def blueprint(self) -> Blueprint: return self._blueprint @property - def tags(self) -> Dict[str, Any]: + def tags(self) -> dict[str, Any]: """Return the tags that should be set on this stack. Includes both the global tags, as well as any stack specific tags @@ -201,7 +195,7 @@ def tags(self) -> Dict[str, Any]: return dict(self.context.tags, **tags) @property - def parameter_values(self) -> Dict[str, Any]: + def parameter_values(self) -> dict[str, Any]: """Return all CloudFormation Parameters for the stack. CloudFormation Parameters can be specified via Blueprint Variables @@ -215,18 +209,16 @@ def parameter_values(self) -> Dict[str, Any]: return self.blueprint.parameter_values @property - def all_parameter_definitions(self) -> Dict[str, Any]: + def all_parameter_definitions(self) -> dict[str, Any]: """Return all parameters in the blueprint/template.""" return self.blueprint.parameter_definitions @property - def required_parameter_definitions(self) -> Dict[str, Any]: + def required_parameter_definitions(self) -> dict[str, Any]: """Return all CloudFormation Parameters without a default value.""" return self.blueprint.required_parameter_definitions - def resolve( - self, context: CfnginContext, provider: Optional[Provider] = None - ) -> None: + def resolve(self, context: CfnginContext, provider: Provider | None = None) -> None: """Resolve the Stack variables. This resolves the Stack variables and then prepares the Blueprint for @@ -240,7 +232,7 @@ def resolve( resolve_variables(self.variables, context, provider) self.blueprint.resolve_variables(self.variables) - def set_outputs(self, outputs: Dict[str, Any]) -> None: + def set_outputs(self, outputs: dict[str, Any]) -> None: """Set stack outputs to the provided value. Args: diff --git a/runway/cfngin/status.py b/runway/cfngin/status.py index 456a6c153..1c4d6572d 100644 --- a/runway/cfngin/status.py +++ b/runway/cfngin/status.py @@ -1,7 +1,9 @@ """CFNgin statuses.""" +from __future__ import annotations + import operator -from typing import Any, Callable, Optional +from typing import Any, Callable class Status: @@ -16,9 +18,9 @@ class Status: code: int name: str - reason: Optional[str] + reason: str | None - def __init__(self, name: str, code: int, reason: Optional[str] = None) -> None: + def __init__(self, name: str, code: int, reason: str | None = None) -> None: """Instantiate class. Args: @@ -46,11 +48,11 @@ def _comparison(self, operator_: Callable[[Any, Any], bool], other: Any) -> bool return operator_(self.code, other.code) return NotImplemented - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: """Compare if self is equal to another object.""" return self._comparison(operator.eq, other) - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: """Compare if self is not equal to another object.""" return self._comparison(operator.ne, other) @@ -74,7 +76,7 @@ def __ge__(self, other: Any) -> bool: class CompleteStatus(Status): """Status name of 'complete' with code of '2'.""" - def __init__(self, reason: Optional[str] = None) -> None: + def __init__(self, reason: str | None = None) -> None: """Instantiate class. Args: @@ -87,7 +89,7 @@ def __init__(self, reason: Optional[str] = None) -> None: class FailedStatus(Status): """Status name of 'failed' with code of '4'.""" - def __init__(self, reason: Optional[str] = None) -> None: + def __init__(self, reason: str | None = None) -> None: """Instantiate class. Args: @@ -100,7 +102,7 @@ def __init__(self, reason: Optional[str] = None) -> None: class PendingStatus(Status): """Status name of 'pending' with code of '0'.""" - def __init__(self, reason: Optional[str] = None) -> None: + def __init__(self, reason: str | None = None) -> None: """Instantiate class. Args: @@ -113,7 +115,7 @@ def __init__(self, reason: Optional[str] = None) -> None: class SkippedStatus(Status): """Status name of 'skipped' with code of '3'.""" - def __init__(self, reason: Optional[str] = None) -> None: + def __init__(self, reason: str | None = None) -> None: """Instantiate class. Args: @@ -126,7 +128,7 @@ def __init__(self, reason: Optional[str] = None) -> None: class SubmittedStatus(Status): """Status name of 'submitted' with code of '1'.""" - def __init__(self, reason: Optional[str] = None) -> None: + def __init__(self, reason: str | None = None) -> None: """Instantiate class. Args: diff --git a/runway/cfngin/tokenize_userdata.py b/runway/cfngin/tokenize_userdata.py index cd5b48ed0..d0432f1ac 100644 --- a/runway/cfngin/tokenize_userdata.py +++ b/runway/cfngin/tokenize_userdata.py @@ -1,7 +1,6 @@ """Resources to tokenize userdata.""" import re -from typing import List from troposphere import GetAtt, Ref @@ -14,7 +13,7 @@ REPLACE_RE = re.compile(REPLACE_STRING) -def cf_tokenize(raw_userdata: str) -> List[str]: +def cf_tokenize(raw_userdata: str) -> list[str]: """Parse UserData for Cloudformation helper functions. http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html @@ -39,7 +38,7 @@ def cf_tokenize(raw_userdata: str) -> List[str]: Base64(Join('', cf_tokenize(userdata_string))) """ - result: List[str] = [] + result: list[str] = [] parts = SPLIT_RE.split(raw_userdata) for part in parts: cf_func = REPLACE_RE.search(part) diff --git a/runway/cfngin/ui.py b/runway/cfngin/ui.py index 861d0158a..dadf76869 100644 --- a/runway/cfngin/ui.py +++ b/runway/cfngin/ui.py @@ -4,12 +4,15 @@ import logging import threading +from contextlib import AbstractContextManager from getpass import getpass -from typing import TYPE_CHECKING, Any, ContextManager, Optional, TextIO, Type, Union +from typing import TYPE_CHECKING, Any, TextIO if TYPE_CHECKING: from types import TracebackType + from typing_extensions import Self + LOGGER = logging.getLogger(__name__) @@ -18,7 +21,7 @@ def get_raw_input(message: str) -> str: return input(message) -class UI(ContextManager["UI"]): +class UI(AbstractContextManager["UI"]): """Used internally from terminal output in a multithreaded environment. Ensures that two threads don't write over each other while asking a user @@ -33,9 +36,9 @@ def __init__(self) -> None: def log( self, lvl: int, - msg: Union[Exception, str], + msg: Exception | str, *args: Any, - logger: Union[logging.Logger, logging.LoggerAdapter[Any]] = LOGGER, + logger: logging.Logger | logging.LoggerAdapter[Any] = LOGGER, **kwargs: Any, ) -> None: """Log the message if the current thread owns the underlying lock. @@ -44,8 +47,11 @@ def log( lvl: Log level. msg: String template or exception to use for the log record. logger: Specific logger to log to. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ + kwargs["stacklevel"] = kwargs.get("stacklevel", 1) + 1 with self: return logger.log(lvl, msg, *args, **kwargs) @@ -53,7 +59,7 @@ def info( self, msg: str, *args: Any, - logger: Union[logging.Logger, logging.LoggerAdapter[Any]] = LOGGER, + logger: logging.Logger | logging.LoggerAdapter[Any] = LOGGER, **kwargs: Any, ) -> None: """Log the line if the current thread owns the underlying lock. @@ -62,6 +68,8 @@ def info( msg: String template or exception to use for the log record. logger: Specific logger to log to. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ kwargs["logger"] = logger @@ -79,21 +87,21 @@ def ask(self, message: str) -> str: with self: return get_raw_input(message) - def getpass(self, prompt: str, stream: Optional[TextIO] = None) -> str: + def getpass(self, prompt: str, stream: TextIO | None = None) -> str: """Wrap getpass to lock the UI.""" with self: return getpass(prompt, stream) - def __enter__(self) -> UI: + def __enter__(self) -> Self: """Enter the context manager.""" self._lock.__enter__() return self def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, ) -> None: """Exit the context manager.""" self._lock.__exit__(exc_type, exc_value, traceback) diff --git a/runway/cfngin/utils.py b/runway/cfngin/utils.py index 1f44abda6..c04b5b02d 100644 --- a/runway/cfngin/utils.py +++ b/runway/cfngin/utils.py @@ -14,20 +14,9 @@ import tempfile import uuid import zipfile +from collections import OrderedDict from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - Dict, - Iterator, - List, - Optional, - OrderedDict, - Type, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, ClassVar, cast import botocore.client import botocore.exceptions @@ -39,8 +28,10 @@ from .session_cache import get_session if TYPE_CHECKING: + from collections.abc import Iterator + from mypy_boto3_route53.client import Route53Client - from mypy_boto3_route53.type_defs import ResourceRecordSetTypeDef + from mypy_boto3_route53.type_defs import ResourceRecordSetExtraOutputTypeDef from mypy_boto3_s3.client import S3Client from ..config.models.cfngin import ( @@ -86,7 +77,7 @@ def parse_zone_id(full_zone_id: str) -> str: return full_zone_id.split("/")[2] -def get_hosted_zone_by_name(client: Route53Client, zone_name: str) -> Optional[str]: +def get_hosted_zone_by_name(client: Route53Client, zone_name: str) -> str | None: """Get the zone id of an existing zone by name. Args: @@ -147,28 +138,19 @@ def __init__(self, record_text: str) -> None: def __str__(self) -> str: """Convert an instance of this class to a string.""" - return " ".join( - [ - self.nameserver, - self.contact, - self.serial, - self.refresh, - self.retry, - self.expire, - self.min_ttl, - ] + return ( + f"{self.nameserver} {self.contact} {self.serial} {self.refresh} " + f"{self.retry} {self.expire} {self.min_ttl}" ) class SOARecord: """Represents an SOA record.""" - def __init__(self, record: ResourceRecordSetTypeDef) -> None: + def __init__(self, record: ResourceRecordSetExtraOutputTypeDef) -> None: """Instantiate class.""" self.name = record["Name"] - self.text = SOARecordText( - record.get("ResourceRecords", [{"Value": ""}])[0]["Value"] - ) + self.text = SOARecordText(record.get("ResourceRecords", [{"Value": ""}])[0]["Value"]) self.ttl = record.get("TTL", 0) @@ -238,9 +220,9 @@ def create_route53_zone(client: Route53Client, zone_name: str) -> str: return zone_id -def yaml_to_ordered_dict( +def yaml_to_ordered_dict( # noqa: C901 stream: str, - loader: Union[Type[yaml.Loader], Type[yaml.SafeLoader]] = yaml.SafeLoader, + loader: type[yaml.Loader | yaml.SafeLoader] = yaml.SafeLoader, ) -> OrderedDict[str, Any]: """yaml.load alternative with preserved dictionary order. @@ -267,12 +249,12 @@ class OrderedUniqueLoader(loader): # type: ignore @staticmethod def _error_mapping_on_dupe( - node: Union[yaml.MappingNode, yaml.ScalarNode, yaml.SequenceNode], + node: yaml.MappingNode | yaml.ScalarNode | yaml.SequenceNode, node_name: str, ) -> None: """Check mapping node for dupe children keys.""" if isinstance(node, yaml.MappingNode): - mapping: Dict[str, Any] = {} + mapping: dict[str, Any] = {} for val in node.value: a = val[0] b = mapping.get(a.value, None) @@ -285,7 +267,7 @@ def _error_mapping_on_dupe( def _validate_mapping( self, - node: Union[yaml.MappingNode, yaml.ScalarNode, yaml.SequenceNode], + node: yaml.MappingNode | yaml.ScalarNode | yaml.SequenceNode, deep: bool = False, ) -> OrderedDict[Any, Any]: if not isinstance(node, yaml.MappingNode): @@ -322,7 +304,7 @@ def _validate_mapping( def construct_mapping( self, - node: Union[yaml.MappingNode, yaml.ScalarNode, yaml.SequenceNode], + node: yaml.MappingNode | yaml.ScalarNode | yaml.SequenceNode, deep: bool = False, ) -> OrderedDict[Any, Any]: """Override parent method to use OrderedDict.""" @@ -331,7 +313,7 @@ def construct_mapping( return self._validate_mapping(node, deep=deep) def construct_yaml_map( - self, node: Union[yaml.MappingNode, yaml.ScalarNode, yaml.SequenceNode] + self, node: yaml.MappingNode | yaml.ScalarNode | yaml.SequenceNode ) -> Iterator[OrderedDict[Any, Any]]: data: OrderedDict[Any, Any] = OrderedDict() yield data @@ -341,7 +323,7 @@ def construct_yaml_map( OrderedUniqueLoader.add_constructor( "tag:yaml.org,2002:map", OrderedUniqueLoader.construct_yaml_map ) - return yaml.load(stream, OrderedUniqueLoader) + return yaml.load(stream, OrderedUniqueLoader) # noqa: S506 def uppercase_first_letter(string_: str) -> str: @@ -361,7 +343,7 @@ def cf_safe_name(name: str) -> str: return "".join(uppercase_first_letter(part) for part in parts) -def read_value_from_path(value: str, *, root_path: Optional[Path] = None) -> str: +def read_value_from_path(value: str, *, root_path: Path | None = None) -> str: """Enable translators to read values from files. The value can be referred to with the `file://` prefix. @@ -373,23 +355,16 @@ def read_value_from_path(value: str, *, root_path: Optional[Path] = None) -> str """ if value.startswith("file://"): - path = value.split("file://", 1)[1] - if os.path.isabs(path): + path = Path(value.split("file://", 1)[1]) + if path.is_absolute(): read_path = Path(path) else: root_path = root_path or Path.cwd() - if root_path.is_dir(): - read_path = root_path / path - else: - read_path = root_path.parent / path + read_path = root_path / path if root_path.is_dir() else root_path.parent / path if read_path.is_file(): - return read_path.read_text( - encoding=locale.getpreferredencoding(do_setlocale=False) - ) + return read_path.read_text(encoding=locale.getpreferredencoding(do_setlocale=False)) if read_path.is_dir(): - raise ValueError( - f"path must lead to a file not directory: {read_path.absolute()}" - ) + raise ValueError(f"path must lead to a file not directory: {read_path.absolute()}") raise ValueError(f"path does not exist: {read_path.absolute()}") return value @@ -404,7 +379,7 @@ def get_client_region(client: Any) -> str: AWS region string. """ - return client._client_config.region_name # type: ignore + return client._client_config.region_name # noqa: SLF001 def get_s3_endpoint(client: Any) -> str: @@ -417,10 +392,10 @@ def get_s3_endpoint(client: Any) -> str: The AWS endpoint for the client. """ - return client._endpoint.host # type: ignore + return client._endpoint.host # noqa: SLF001 -def s3_bucket_location_constraint(region: Optional[str]) -> Optional[str]: +def s3_bucket_location_constraint(region: str | None) -> str | None: """Return the appropriate LocationConstraint info for a new S3 bucket. When creating a bucket in a region OTHER than us-east-1, you need to @@ -442,7 +417,7 @@ def s3_bucket_location_constraint(region: Optional[str]) -> Optional[str]: def ensure_s3_bucket( s3_client: S3Client, bucket_name: str, - bucket_region: Optional[str] = None, + bucket_region: str | None = None, *, create: bool = True, persist_graph: bool = False, @@ -484,7 +459,7 @@ def ensure_s3_bucket( # can't use s3_client.exceptions.NoSuchBucket here. # it does not work if the bucket was recently deleted. LOGGER.debug("creating bucket %s", bucket_name) - create_args: Dict[str, Any] = {"Bucket": bucket_name} + create_args: dict[str, Any] = {"Bucket": bucket_name} location_constraint = s3_bucket_location_constraint(bucket_region) if location_constraint: create_args["CreateBucketConfiguration"] = { @@ -498,8 +473,7 @@ def ensure_s3_bucket( return if err.response["Error"]["Message"] == "Forbidden": LOGGER.exception( - "Access denied for bucket %s. Did you remember " - "to use a globally unique name?", + "Access denied for bucket %s. Did you remember to use a globally unique name?", bucket_name, ) elif err.response["Error"]["Message"] != "Not Found": @@ -507,7 +481,7 @@ def ensure_s3_bucket( raise -def parse_cloudformation_template(template: str) -> Dict[str, Any]: +def parse_cloudformation_template(template: str) -> dict[str, Any]: """Parse CFN template string. Leverages the vendored aws-cli yamlhelper to handle JSON or YAML templates. @@ -525,15 +499,15 @@ def is_within_directory(directory: Path | str, target: str) -> bool: Determines if the provided path is within a specific directory or its subdirectories. Args: - directory (Union[Path, str]): Path of the directory we're checking. - target (str): Path of the file we're checking for containment. + directory: Path of the directory we're checking. + target: Path of the file we're checking for containment. Returns: bool: True if the target is in the directory or subdirectories, False otherwise. """ - abs_directory = os.path.abspath(directory) - abs_target = os.path.abspath(target) + abs_directory = os.path.abspath(directory) # noqa: PTH100 + abs_target = os.path.abspath(target) # noqa: PTH100 prefix = os.path.commonprefix([abs_directory, abs_target]) return prefix == abs_directory @@ -544,27 +518,27 @@ def safe_tar_extract( members: list[tarfile.TarInfo] | None = None, *, numeric_owner: bool = False, -): +) -> None: """Safely extract the contents of a tar file to a specified directory. This code is modified from a PR provided to Runway project to address CVE-2007-4559. Args: - tar (TarFile): The tar file object that will be extracted. - path (Union[Path, str], optional): The directory to extract the tar into. - members (List[TarInfo] | None, optional): List of TarInfo objects to extract. - numeric_owner (bool, optional): Enable usage of owner and group IDs when extracting. + tar: The tar file object that will be extracted. + path: The directory to extract the tar into. + members: List of TarInfo objects to extract. + numeric_owner: Enable usage of owner and group IDs when extracting. Raises: Exception: If any tar file tries to go outside the specified area. """ for member in tar.getmembers(): - member_path = os.path.join(path, member.name) + member_path = os.path.join(path, member.name) # noqa: PTH118 if not is_within_directory(path, member_path): raise Exception("Attempted Path Traversal in Tar File") - tar.extractall(path, members, numeric_owner=numeric_owner) + tar.extractall(path, members, numeric_owner=numeric_owner) # noqa: S202 class Extractor: @@ -572,7 +546,7 @@ class Extractor: extension: ClassVar[str] = "" - def __init__(self, archive: Optional[Path] = None) -> None: + def __init__(self, archive: Path | None = None) -> None: """Instantiate class. Args: @@ -622,7 +596,7 @@ def extract(self, destination: Path) -> None: """Extract the archive.""" if self.archive: with zipfile.ZipFile(self.archive, "r") as zip_ref: - zip_ref.extractall(destination) + zip_ref.extractall(destination) # noqa: S202 class SourceProcessor: @@ -645,7 +619,7 @@ def __init__( self.cache_dir = cache_dir self.package_cache_dir = cache_dir / "packages" self.sources = sources - self.configs_to_merge: List[Path] = [] + self.configs_to_merge: list[Path] = [] self.create_cache_directories() def create_cache_directories(self) -> None: @@ -664,9 +638,7 @@ def get_package_sources(self) -> None: for config in self.sources.git: self.fetch_git_package(config=config) - def fetch_local_package( - self, config: LocalCfnginPackageSourceDefinitionModel - ) -> None: + def fetch_local_package(self, config: LocalCfnginPackageSourceDefinitionModel) -> None: """Make a local path available to current CFNgin config. Args: @@ -713,7 +685,7 @@ def fetch_s3_package(self, config: S3CfnginPackageSourceDefinitionModel) -> None ) session = get_session(region=None) - extra_s3_args: Dict[str, Any] = {} + extra_s3_args: dict[str, Any] = {} if config.requester_pays: extra_s3_args["RequestPayer"] = "requester" @@ -726,7 +698,7 @@ def fetch_s3_package(self, config: S3CfnginPackageSourceDefinitionModel) -> None session.client("s3") .head_object(Bucket=config.bucket, Key=config.key, **extra_s3_args)[ "LastModified" - ] # type: ignore + ] .astimezone(dateutil.tz.tzutc()) # type: ignore ) except botocore.exceptions.ClientError as client_error: @@ -749,13 +721,12 @@ def fetch_s3_package(self, config: S3CfnginPackageSourceDefinitionModel) -> None cached_dir_path, ) tmp_dir = tempfile.mkdtemp(prefix="cfngin") - tmp_package_path = os.path.join(tmp_dir, dir_name) + tmp_package_path = os.path.join(tmp_dir, dir_name) # noqa: PTH118 with tempfile.TemporaryDirectory(prefix="runway-cfngin") as tmp_dir: tmp_package_path = Path(tmp_dir) / dir_name extractor.set_archive(tmp_package_path) LOGGER.debug( - "starting remote package download from S3 to %s " - 'with extra S3 options "%s"', + 'starting remote package download from S3 to %s with extra S3 options "%s"', extractor.archive, str(extra_s3_args), ) @@ -770,8 +741,7 @@ def fetch_s3_package(self, config: S3CfnginPackageSourceDefinitionModel) -> None ) extractor.extract(tmp_package_path) LOGGER.debug( - "moving extracted package directory %s to the " - "CFNgin cache at %s", + "moving extracted package directory %s to the CFNgin cache at %s", dir_name, self.package_cache_dir, ) @@ -797,7 +767,7 @@ def fetch_git_package(self, config: GitCfnginPackageSourceDefinitionModel) -> No """ # only loading git here when needed to avoid load errors on systems # without git installed - from git.repo import Repo # pylint: disable=import-outside-toplevel + from git.repo import Repo ref = self.determine_git_ref(config) dir_name = self.sanitize_git_path(uri=config.uri, ref=ref) @@ -813,7 +783,7 @@ def fetch_git_package(self, config: GitCfnginPackageSourceDefinitionModel) -> No ) tmp_dir = tempfile.mkdtemp(prefix="cfngin") try: - tmp_repo_path = os.path.join(tmp_dir, dir_name) + tmp_repo_path = os.path.join(tmp_dir, dir_name) # noqa: PTH118 with Repo.clone_from(config.uri, tmp_repo_path) as repo: repo.head.set_reference(ref) repo.head.reset(index=True, working_tree=True) @@ -822,8 +792,7 @@ def fetch_git_package(self, config: GitCfnginPackageSourceDefinitionModel) -> No shutil.rmtree(tmp_dir) else: LOGGER.debug( - "remote repo %s appears to have been previously " - "cloned to %s; download skipped", + "remote repo %s appears to have been previously cloned to %s; download skipped", config.uri, cached_dir_path, ) @@ -833,13 +802,13 @@ def fetch_git_package(self, config: GitCfnginPackageSourceDefinitionModel) -> No def update_paths_and_config( self, - config: Union[ - GitCfnginPackageSourceDefinitionModel, - LocalCfnginPackageSourceDefinitionModel, - S3CfnginPackageSourceDefinitionModel, - ], + config: ( + GitCfnginPackageSourceDefinitionModel + | LocalCfnginPackageSourceDefinitionModel + | S3CfnginPackageSourceDefinitionModel + ), pkg_dir_name: str, - pkg_cache_dir: Optional[Path] = None, + pkg_cache_dir: Path | None = None, ) -> None: """Handle remote source defined sys.paths & configs. @@ -933,7 +902,7 @@ def sanitize_uri_path(uri: str) -> str: uri = uri.replace(i, "_") return uri - def sanitize_git_path(self, uri: str, ref: Optional[str] = None) -> str: + def sanitize_git_path(self, uri: str, ref: str | None = None) -> str: """Take a git URI and ref and converts it to a directory safe path. Args: @@ -944,10 +913,7 @@ def sanitize_git_path(self, uri: str, ref: Optional[str] = None) -> str: Directory name for the supplied uri """ - if uri.endswith(".git"): - dir_name = uri[:-4] # drop .git - else: - dir_name = uri + dir_name = uri[:-4] if uri.endswith(".git") else uri # drop .git dir_name = self.sanitize_uri_path(dir_name) if ref is not None: dir_name += f"-{ref}" diff --git a/runway/compat.py b/runway/compat.py index 038e76ed2..360443eff 100644 --- a/runway/compat.py +++ b/runway/compat.py @@ -1,26 +1,19 @@ """Python dependency compatibility handling.""" import sys -from typing import Iterable - -if sys.version_info < (3, 8): # 3.7 - import shlex - - from backports.cached_property import cached_property - from importlib_metadata import PackageNotFoundError, version - - def shlex_join(split_command: Iterable[str]) -> str: - """Backport of :meth:`shlex.join`.""" - return " ".join(shlex.quote(arg) for arg in split_command) +from functools import cached_property +from importlib.metadata import PackageNotFoundError, version +from shlex import join as shlex_join +if sys.version_info < (3, 11): + from typing_extensions import Self else: - from functools import cached_property - from importlib.metadata import PackageNotFoundError, version - from shlex import join as shlex_join + from typing import Self __all__ = [ - "PackageNotFoundError", - "cached_property", - "shlex_join", - "version", + "PackageNotFoundError", # TODO (kyle): remove in next major release + "Self", + "cached_property", # TODO (kyle): remove in next major release + "shlex_join", # TODO (kyle): remove in next major release + "version", # TODO (kyle): remove in next major release ] diff --git a/runway/config/__init__.py b/runway/config/__init__.py index c6a63ca8e..a9fe14c64 100644 --- a/runway/config/__init__.py +++ b/runway/config/__init__.py @@ -7,18 +7,7 @@ import sys from pathlib import Path from string import Template -from typing import ( - TYPE_CHECKING, - AbstractSet, - Any, - Dict, - List, - Mapping, - MutableMapping, - Optional, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast import yaml @@ -41,19 +30,23 @@ from .models.runway import RunwayConfigDefinitionModel, RunwayFutureDefinitionModel if TYPE_CHECKING: + from collections.abc import Mapping, MutableMapping + from packaging.specifiers import SpecifierSet from pydantic import BaseModel LOGGER = logging.getLogger(__name__) +_ModelTypeVar = TypeVar("_ModelTypeVar", bound="BaseModel") + -class BaseConfig: +class BaseConfig(Generic[_ModelTypeVar]): """Base class for configurations.""" file_path: Path - _data: BaseModel + _data: _ModelTypeVar - def __init__(self, data: BaseModel, *, path: Optional[Path] = None) -> None: + def __init__(self, data: _ModelTypeVar, *, path: Path | None = None) -> None: """Instantiate class. Args: @@ -61,22 +54,18 @@ def __init__(self, data: BaseModel, *, path: Optional[Path] = None) -> None: path: Path to the config file. """ - self._data = data.copy() + self._data = data.model_copy() self.file_path = path.resolve() if path else Path.cwd() def dump( self, *, by_alias: bool = False, - exclude: Optional[ - Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any]] - ] = None, + exclude: set[int | str] | Mapping[int | str, Any] | None = None, exclude_defaults: bool = False, exclude_none: bool = False, exclude_unset: bool = True, - include: Optional[ - Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any]] - ] = None, + include: set[int | str] | Mapping[int | str, Any] | None = None, ) -> str: """Dump model to a YAML string. @@ -96,7 +85,7 @@ def dump( """ return yaml.dump( - self._data.dict( + self._data.model_dump( by_alias=by_alias, exclude=exclude, # type: ignore exclude_defaults=exclude_defaults, @@ -108,7 +97,7 @@ def dump( ) @classmethod - def find_config_file(cls, path: Path) -> Optional[Path]: + def find_config_file(cls, path: Path) -> Path | None: """Find a config file in the provided path. Args: @@ -118,7 +107,7 @@ def find_config_file(cls, path: Path) -> Optional[Path]: raise NotImplementedError # cov: ignore -class CfnginConfig(BaseConfig): +class CfnginConfig(BaseConfig[CfnginConfigDefinitionModel]): """Python representation of a CFNgin config file. This is used internally by CFNgin to parse and validate a YAML formatted @@ -147,24 +136,24 @@ class CfnginConfig(BaseConfig): EXCLUDE_LIST = ["bitbucket-pipelines.yml", "buildspec.yml", "docker-compose.yml"] """Explicit files names to ignore when looking for config files.""" - cfngin_bucket: Optional[str] + cfngin_bucket: str | None """Bucket to use for CFNgin resources. (e.g. CloudFormation templates). May be an empty string. """ - cfngin_bucket_region: Optional[str] + cfngin_bucket_region: str | None """Explicit region to use for :attr:`CfnginConfig.cfngin_bucket`""" cfngin_cache_dir: Path """Local directory to use for caching.""" - log_formats: Dict[str, str] + log_formats: dict[str, str] """Custom formatting for log messages.""" - lookups: Dict[str, str] + lookups: dict[str, str] """Register custom lookups.""" - mappings: Dict[str, Dict[str, Dict[str, Any]]] + mappings: dict[str, dict[str, dict[str, Any]]] """Mappings that will be added to all stacks.""" namespace: str @@ -176,44 +165,42 @@ class CfnginConfig(BaseConfig): package_sources: CfnginPackageSourcesDefinitionModel """Remote source locations.""" - persistent_graph_key: Optional[str] = None + persistent_graph_key: str | None = None """S3 object key were the persistent graph is stored.""" - post_deploy: List[CfnginHookDefinitionModel] + post_deploy: list[CfnginHookDefinitionModel] """Hooks to run after a deploy action.""" - post_destroy: List[CfnginHookDefinitionModel] + post_destroy: list[CfnginHookDefinitionModel] """Hooks to run after a destroy action.""" - pre_deploy: List[CfnginHookDefinitionModel] + pre_deploy: list[CfnginHookDefinitionModel] """Hooks to run before a deploy action.""" - pre_destroy: List[CfnginHookDefinitionModel] + pre_destroy: list[CfnginHookDefinitionModel] """Hooks to run before a destroy action.""" - service_role: Optional[str] + service_role: str | None """IAM role for CloudFormation to use.""" - stacks: List[CfnginStackDefinitionModel] + stacks: list[CfnginStackDefinitionModel] """Stacks to be processed.""" - sys_path: Optional[Path] + sys_path: Path | None """Relative or absolute path to use as the work directory.""" - tags: Optional[Dict[str, str]] + tags: dict[str, str] | None """Tags to apply to all resources.""" template_indent: int """Spaces to use per-indent level when outputting a template to json.""" - _data: CfnginConfigDefinitionModel - def __init__( self, data: CfnginConfigDefinitionModel, *, - path: Optional[Path] = None, - work_dir: Optional[Path] = None, + path: Path | None = None, + work_dir: Path | None = None, ) -> None: """Instantiate class. @@ -242,14 +229,12 @@ def __init__( self.namespace_delimiter = self._data.namespace_delimiter self.package_sources = self._data.package_sources self.persistent_graph_key = self._data.persistent_graph_key - self.post_deploy = cast(List[CfnginHookDefinitionModel], self._data.post_deploy) - self.post_destroy = cast( - List[CfnginHookDefinitionModel], self._data.post_destroy - ) - self.pre_deploy = cast(List[CfnginHookDefinitionModel], self._data.pre_deploy) - self.pre_destroy = cast(List[CfnginHookDefinitionModel], self._data.pre_destroy) + self.post_deploy = cast("list[CfnginHookDefinitionModel]", self._data.post_deploy) + self.post_destroy = cast("list[CfnginHookDefinitionModel]", self._data.post_destroy) + self.pre_deploy = cast("list[CfnginHookDefinitionModel]", self._data.pre_deploy) + self.pre_destroy = cast("list[CfnginHookDefinitionModel]", self._data.pre_destroy) self.service_role = self._data.service_role - self.stacks = cast(List[CfnginStackDefinitionModel], self._data.stacks) + self.stacks = cast("list[CfnginStackDefinitionModel]", self._data.stacks) self.sys_path = self._data.sys_path self.tags = self._data.tags self.template_indent = self._data.template_indent @@ -265,9 +250,9 @@ def load(self) -> None: register_lookup_handler(key, handler) @classmethod - def find_config_file( # type: ignore pylint: disable=arguments-differ - cls, path: Optional[Path] = None, *, exclude: Optional[List[str]] = None - ) -> List[Path]: + def find_config_file( # type: ignore + cls, path: Path | None = None, *, exclude: list[str] | None = None + ) -> list[Path]: """Find a config file in the provided path. Args: @@ -286,18 +271,14 @@ def find_config_file( # type: ignore pylint: disable=arguments-differ return [path] exclude = exclude or [] - result: List[Path] = [] + result: list[Path] = [] exclude.extend(cls.EXCLUDE_LIST) yml_files = list(path.glob("*.yml")) yml_files.extend(list(path.glob("*.yaml"))) for f in yml_files: - if ( - re.match(cls.EXCLUDE_REGEX, f.name) - or f.name in exclude - or f.name.startswith(".") - ): + if re.match(cls.EXCLUDE_REGEX, f.name) or f.name in exclude or f.name.startswith("."): continue # cov: ignore result.append(f) result.sort() @@ -307,10 +288,10 @@ def find_config_file( # type: ignore pylint: disable=arguments-differ def parse_file( cls, *, - path: Optional[Path] = None, - file_path: Optional[Path] = None, - parameters: Optional[MutableMapping[str, Any]] = None, - work_dir: Optional[Path] = None, + path: Path | None = None, + file_path: Path | None = None, + parameters: MutableMapping[str, Any] | None = None, + work_dir: Path | None = None, **kwargs: Any, ) -> CfnginConfig: """Parse a YAML file to create a config object. @@ -320,6 +301,7 @@ def parse_file( file_path: Exact path to a file to parse. parameters: Values to use when resolving a raw config. work_dir: Explicit working directory. + **kwargs: Arbitrary keyword arguments. Raises: ConfigNotFound: Provided config file was not found. @@ -349,7 +331,7 @@ def parse_file( @classmethod def parse_obj( - cls, obj: Any, *, path: Optional[Path] = None, work_dir: Optional[Path] = None + cls, obj: Any, *, path: Path | None = None, work_dir: Path | None = None ) -> CfnginConfig: """Parse a python object. @@ -359,19 +341,17 @@ def parse_obj( work_dir: Working directory. """ - return cls( - CfnginConfigDefinitionModel.parse_obj(obj), path=path, work_dir=work_dir - ) + return cls(CfnginConfigDefinitionModel.model_validate(obj), path=path, work_dir=work_dir) @classmethod def parse_raw( cls, data: str, *, - parameters: Optional[MutableMapping[str, Any]] = None, - path: Optional[Path] = None, + parameters: MutableMapping[str, Any] | None = None, + path: Path | None = None, skip_package_sources: bool = False, - work_dir: Optional[Path] = None, + work_dir: Path | None = None, ) -> CfnginConfig: """Parse raw data. @@ -389,9 +369,7 @@ def parse_raw( if skip_package_sources: return cls.parse_obj(yaml.safe_load(pre_rendered)) config_dict = yaml.safe_load( - cls.process_package_sources( - pre_rendered, parameters=parameters, work_dir=work_dir - ) + cls.process_package_sources(pre_rendered, parameters=parameters, work_dir=work_dir) ) return cls.parse_obj(config_dict, path=path) @@ -400,8 +378,8 @@ def process_package_sources( cls, raw_data: str, *, - parameters: Optional[MutableMapping[str, Any]] = None, - work_dir: Optional[Path] = None, + parameters: MutableMapping[str, Any] | None = None, + work_dir: Path | None = None, ) -> str: """Process the package sources defined in a rendered config. @@ -412,29 +390,27 @@ def process_package_sources( work_dir: Explicit working directory. """ - config = yaml.safe_load(raw_data) or {} + config: dict[str, Any] = yaml.safe_load(raw_data) or {} processor = SourceProcessor( - sources=CfnginPackageSourcesDefinitionModel.parse_obj( - config.get("package_sources", {}) # type: ignore + sources=CfnginPackageSourcesDefinitionModel.model_validate( + config.get("package_sources", {}) ), cache_dir=Path( - config.get( - "cfngin_cache_dir", (work_dir or Path().cwd() / ".runway") / "cache" - ) + config.get("cfngin_cache_dir", (work_dir or Path().cwd() / ".runway") / "cache") ), ) processor.get_package_sources() if processor.configs_to_merge: for i in processor.configs_to_merge: LOGGER.debug("merging in remote config: %s", i) - with open(i, "rb") as opened_file: + with i.open("rb") as opened_file: config = merge_dicts(yaml.safe_load(opened_file), config) return cls.resolve_raw_data(yaml.dump(config), parameters=parameters or {}) return raw_data @staticmethod def resolve_raw_data( - raw_data: str, *, parameters: Optional[MutableMapping[str, Any]] = None + raw_data: str, *, parameters: MutableMapping[str, Any] | None = None ) -> str: """Resolve raw data. @@ -459,24 +435,20 @@ def resolve_raw_data( return rendered -class RunwayConfig(BaseConfig): +class RunwayConfig(BaseConfig[RunwayConfigDefinitionModel]): """Python representation of a Runway config file.""" ACCEPTED_NAMES = ["runway.yml", "runway.yaml"] - deployments: List[RunwayDeploymentDefinition] + deployments: list[RunwayDeploymentDefinition] file_path: Path future: RunwayFutureDefinitionModel ignore_git_branch: bool - runway_version: Optional[SpecifierSet] - tests: List[RunwayTestDefinition[Any]] + runway_version: SpecifierSet | None + tests: list[RunwayTestDefinition] variables: RunwayVariablesDefinition - _data: RunwayConfigDefinitionModel - - def __init__( - self, data: RunwayConfigDefinitionModel, *, path: Optional[Path] = None - ) -> None: + def __init__(self, data: RunwayConfigDefinitionModel, *, path: Path | None = None) -> None: """Instantiate class. Args: @@ -485,9 +457,7 @@ def __init__( """ super().__init__(data, path=path) - self.deployments = [ - RunwayDeploymentDefinition(d) for d in self._data.deployments - ] + self.deployments = [RunwayDeploymentDefinition(d) for d in self._data.deployments] self.future = self._data.future self.ignore_git_branch = self._data.ignore_git_branch self.runway_version = self._data.runway_version @@ -520,8 +490,8 @@ def find_config_file(cls, path: Path) -> Path: def parse_file( cls, *, - path: Optional[Path] = None, - file_path: Optional[Path] = None, + path: Path | None = None, + file_path: Path | None = None, **kwargs: Any, ) -> RunwayConfig: """Parse a YAML file to create a config object. @@ -529,6 +499,7 @@ def parse_file( Args: path: The path to search for a config file. file_path: Exact path to a file to parse. + **kwargs: Arbitrary keyword arguments. Raises: ConfigNotFound: Provided config file was not found. @@ -538,15 +509,13 @@ def parse_file( if file_path: if not file_path.is_file(): raise ConfigNotFound(path=file_path) - return cls.parse_obj( - yaml.safe_load(file_path.read_text()), path=file_path, **kwargs - ) + return cls.parse_obj(yaml.safe_load(file_path.read_text()), path=file_path, **kwargs) if path: return cls.parse_file(file_path=cls.find_config_file(path), **kwargs) raise ValueError("must provide path or file_path") @classmethod - def parse_obj(cls, obj: Any, *, path: Optional[Path] = None) -> RunwayConfig: + def parse_obj(cls, obj: Any, *, path: Path | None = None) -> RunwayConfig: """Parse a python object into a config object. Args: @@ -554,4 +523,4 @@ def parse_obj(cls, obj: Any, *, path: Optional[Path] = None) -> RunwayConfig: path: Path to the file the object was parsed from. """ - return cls(RunwayConfigDefinitionModel.parse_obj(obj), path=path) + return cls(RunwayConfigDefinitionModel.model_validate(obj), path=path) diff --git a/runway/config/components/runway/__init__.py b/runway/config/components/runway/__init__.py index e764ae0d9..2cdc0b95d 100644 --- a/runway/config/components/runway/__init__.py +++ b/runway/config/components/runway/__init__.py @@ -2,20 +2,12 @@ from ._deployment_def import RunwayDeploymentDefinition from ._module_def import RunwayModuleDefinition -from ._test_def import ( - CfnLintRunwayTestDefinition, - RunwayTestDefinition, - ScriptRunwayTestDefinition, - YamlLintRunwayTestDefinition, -) +from ._test_def import RunwayTestDefinition from ._variables_def import RunwayVariablesDefinition __all__ = [ - "CfnLintRunwayTestDefinition", "RunwayDeploymentDefinition", "RunwayModuleDefinition", "RunwayTestDefinition", "RunwayVariablesDefinition", - "ScriptRunwayTestDefinition", - "YamlLintRunwayTestDefinition", ] diff --git a/runway/config/components/runway/_deployment_def.py b/runway/config/components/runway/_deployment_def.py index f0ffd84e2..027629e33 100644 --- a/runway/config/components/runway/_deployment_def.py +++ b/runway/config/components/runway/_deployment_def.py @@ -3,18 +3,17 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Union, overload +from typing import TYPE_CHECKING, Any, overload from ....exceptions import UnresolvedVariable from ....variables import Variable -from ...models.runway import ( - RunwayDeploymentDefinitionModel, - RunwayModuleDefinitionModel, -) +from ...models.runway import RunwayDeploymentDefinitionModel, RunwayModuleDefinitionModel from ._module_def import RunwayModuleDefinition from .base import ConfigComponentDefinition if TYPE_CHECKING: + from typing_extensions import Self + from ...models.base import ConfigProperty from ...models.runway import ( RunwayAssumeRoleDefinitionModel, @@ -25,29 +24,28 @@ LOGGER = logging.getLogger(__name__.replace("._", ".")) -class RunwayDeploymentDefinition(ConfigComponentDefinition): +class RunwayDeploymentDefinition(ConfigComponentDefinition[RunwayDeploymentDefinitionModel]): """Runway deployment definition.""" - account_alias: Optional[str] - account_id: Optional[str] + account_alias: str | None + account_id: str | None assume_role: RunwayAssumeRoleDefinitionModel environments: RunwayEnvironmentsType env_vars: RunwayEnvVarsType - module_options: Dict[str, Any] + module_options: dict[str, Any] name: str - parallel_regions: List[str] - parameters: Dict[str, Any] - regions: List[str] + parallel_regions: list[str] + parameters: dict[str, Any] + regions: list[str] - _data: RunwayDeploymentDefinitionModel - _pre_process_vars: Tuple[str, ...] = ( + _pre_process_vars: tuple[str, ...] = ( "account_alias", "account_id", "assume_role", "env_vars", "regions", ) - _supports_vars: Tuple[str, ...] = ( + _supports_vars: tuple[str, ...] = ( "account_alias", "account_id", "assume_role", @@ -81,12 +79,12 @@ def menu_entry(self) -> str: ) @property - def modules(self) -> List[RunwayModuleDefinition]: + def modules(self) -> list[RunwayModuleDefinition]: """List of Runway modules.""" return [RunwayModuleDefinition(module) for module in self._data.modules] @modules.setter - def modules(self, modules: List[RunwayModuleDefinition]) -> None: + def modules(self, modules: list[RunwayModuleDefinition]) -> None: """Set the value of the property. Args: @@ -97,12 +95,12 @@ def modules(self, modules: List[RunwayModuleDefinition]) -> None: """ if not all(isinstance(i, RunwayModuleDefinition) for i in modules): # type: ignore - raise TypeError("modules must be type List[RunwayModuleDefinition]") + raise TypeError("modules must be type list[RunwayModuleDefinition]") self._data.modules = [ - RunwayModuleDefinitionModel.parse_obj(mod.data) for mod in modules + RunwayModuleDefinitionModel.model_validate(mod.data) for mod in modules ] - def reverse(self): + def reverse(self) -> None: """Reverse the order of modules and regions.""" self._data.modules.reverse() for mod in self._data.modules: @@ -112,7 +110,7 @@ def reverse(self): prop.reverse() def set_modules( - self, modules: List[Union[RunwayModuleDefinition, RunwayModuleDefinitionModel]] + self, modules: list[RunwayModuleDefinition | RunwayModuleDefinitionModel] ) -> None: """Set the value of modules. @@ -124,13 +122,11 @@ def set_modules( """ if not isinstance(modules, list): # type: ignore - raise TypeError( - f"expected List[RunwayModuleDefinition]; got {type(modules)}" - ) - sanitized: List[RunwayModuleDefinitionModel] = [] + raise TypeError(f"expected list[RunwayModuleDefinition]; got {type(modules)}") + sanitized: list[RunwayModuleDefinitionModel] = [] for i, mod in enumerate(modules): if isinstance(mod, RunwayModuleDefinition): - sanitized.append(RunwayModuleDefinitionModel.parse_obj(mod.data)) + sanitized.append(RunwayModuleDefinitionModel.model_validate(mod.data)) elif isinstance(mod, RunwayModuleDefinitionModel): # type: ignore sanitized.append(mod) else: @@ -156,29 +152,23 @@ def _register_variable(self, var_name: str, var_value: Any) -> None: @overload @classmethod - def parse_obj( - cls, obj: List[Dict[str, Any]] - ) -> List[RunwayDeploymentDefinition]: ... + def parse_obj(cls: type[Self], obj: list[dict[str, Any]]) -> list[Self]: ... @overload @classmethod def parse_obj( - cls, - obj: Union[ - List[ConfigProperty], Set[ConfigProperty], Tuple[ConfigProperty, ...] - ], - ) -> List[RunwayDeploymentDefinition]: ... + cls: type[Self], + obj: list[ConfigProperty] | set[ConfigProperty] | tuple[ConfigProperty, ...], + ) -> list[Self]: ... @overload @classmethod - def parse_obj( - cls, obj: Union[Dict[str, Any], ConfigProperty] - ) -> RunwayDeploymentDefinition: ... + def parse_obj(cls: type[Self], obj: dict[str, Any] | ConfigProperty) -> Self: ... @classmethod - def parse_obj( # type: ignore - cls, obj: Any - ) -> Union[RunwayDeploymentDefinition, List[RunwayDeploymentDefinition]]: + def parse_obj( # pyright: ignore[reportIncompatibleMethodOverride] + cls: type[Self], obj: Any + ) -> Self | list[Self]: """Parse a python object into this class. Args: @@ -186,7 +176,5 @@ def parse_obj( # type: ignore """ if isinstance(obj, (list, set, tuple)): - return [ - cls(RunwayDeploymentDefinitionModel.parse_obj(o)) for o in obj # type: ignore - ] - return cls(RunwayDeploymentDefinitionModel.parse_obj(obj)) + return [cls(RunwayDeploymentDefinitionModel.parse_obj(o)) for o in obj] # type: ignore + return cls(RunwayDeploymentDefinitionModel.model_validate(obj)) diff --git a/runway/config/components/runway/_module_def.py b/runway/config/components/runway/_module_def.py index cfb31c5ae..19bbd3046 100644 --- a/runway/config/components/runway/_module_def.py +++ b/runway/config/components/runway/_module_def.py @@ -2,36 +2,34 @@ from __future__ import annotations -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any from ....variables import Variable from ...models.runway import RunwayModuleDefinitionModel from .base import ConfigComponentDefinition if TYPE_CHECKING: - from ...models.runway import ( - RunwayEnvironmentsType, - RunwayEnvVarsType, - RunwayModuleTypeTypeDef, - ) + from pathlib import Path + + from typing_extensions import Self + from ...models.runway import RunwayEnvironmentsType, RunwayEnvVarsType, RunwayModuleTypeTypeDef -class RunwayModuleDefinition(ConfigComponentDefinition): + +class RunwayModuleDefinition(ConfigComponentDefinition[RunwayModuleDefinitionModel]): """Runway module definition.""" - class_path: Optional[str] + class_path: str | None environments: RunwayEnvironmentsType env_vars: RunwayEnvVarsType name: str - options: Dict[str, Any] - parameters: Dict[str, Any] - path: Optional[Union[str, Path]] - tags: List[str] - type: Optional[RunwayModuleTypeTypeDef] - - _data: RunwayModuleDefinitionModel - _supports_vars: Tuple[str, ...] = ( + options: dict[str, Any] + parameters: dict[str, Any] + path: str | Path | None + tags: list[str] + type: RunwayModuleTypeTypeDef | None + + _supports_vars: tuple[str, ...] = ( "class_path", "env_vars", "environments", @@ -45,14 +43,14 @@ def __init__(self, data: RunwayModuleDefinitionModel) -> None: super().__init__(data) @property - def child_modules(self) -> List[RunwayModuleDefinition]: + def child_modules(self) -> list[RunwayModuleDefinition]: """List of child modules.""" return [RunwayModuleDefinition(child) for child in self._data.parallel] @child_modules.setter def child_modules( self, - modules: List[Union[RunwayModuleDefinition, RunwayModuleDefinitionModel]], # type: ignore + modules: list[RunwayModuleDefinition | RunwayModuleDefinitionModel], ) -> None: """Set the value of the property. @@ -64,13 +62,11 @@ def child_modules( """ if not isinstance(modules, list): # type: ignore - raise TypeError( - f"expected List[RunwayModuleDefinition]; got {type(modules)}" - ) - sanitized: List[RunwayModuleDefinitionModel] = [] + raise TypeError(f"expected list[RunwayModuleDefinition]; got {type(modules)}") + sanitized: list[RunwayModuleDefinitionModel] = [] for i, mod in enumerate(modules): if isinstance(mod, RunwayModuleDefinition): - sanitized.append(RunwayModuleDefinitionModel.parse_obj(mod.data)) + sanitized.append(RunwayModuleDefinitionModel.model_validate(mod.data)) elif isinstance(mod, RunwayModuleDefinitionModel): # type: ignore sanitized.append(mod) else: @@ -89,12 +85,10 @@ def is_parent(self) -> bool: def menu_entry(self) -> str: """Return menu entry representation of this module.""" if self.is_parent: - return ( - f"{self.name} [{', '.join([c.menu_entry for c in self.child_modules])}]" - ) + return f"{self.name} [{', '.join([c.menu_entry for c in self.child_modules])}]" return self.name - def reverse(self): + def reverse(self) -> None: """Reverse the order of child/parallel modules.""" self._data.parallel.reverse() @@ -113,11 +107,11 @@ def _register_variable(self, var_name: str, var_value: Any) -> None: ) @classmethod - def parse_obj(cls, obj: Any) -> RunwayModuleDefinition: + def parse_obj(cls: type[Self], obj: object) -> Self: """Parse a python object into this class. Args: obj: The object to parse. """ - return cls(RunwayModuleDefinitionModel.parse_obj(obj)) + return cls(RunwayModuleDefinitionModel.model_validate(obj)) diff --git a/runway/config/components/runway/_test_def.py b/runway/config/components/runway/_test_def.py index 1b537d871..fb6e6cd18 100644 --- a/runway/config/components/runway/_test_def.py +++ b/runway/config/components/runway/_test_def.py @@ -2,75 +2,30 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, Generic, Tuple, TypeVar, Union +from typing import TYPE_CHECKING, Any, ClassVar from ....variables import Variable -from ...models.runway import ( - CfnLintRunwayTestArgs, - CfnLintRunwayTestDefinitionModel, - RunwayTestDefinitionModel, - ScriptRunwayTestArgs, - ScriptRunwayTestDefinitionModel, - ValidRunwayTestTypeValues, - YamlLintRunwayTestDefinitionModel, -) +from ...models.runway import RunwayTestDefinitionModel, ValidRunwayTestTypeValues from .base import ConfigComponentDefinition if TYPE_CHECKING: - from typing_extensions import Literal + from typing_extensions import Self - from ...models.base import ConfigProperty - -_DataModel = TypeVar( - "_DataModel", - CfnLintRunwayTestDefinitionModel, - RunwayTestDefinitionModel, - ScriptRunwayTestDefinitionModel, - YamlLintRunwayTestDefinitionModel, -) - - -class RunwayTestDefinition(Generic[_DataModel], ConfigComponentDefinition): +class RunwayTestDefinition(ConfigComponentDefinition[RunwayTestDefinitionModel]): """Runway test definition.""" - args: Union[Dict[str, Any], ConfigProperty] + args: dict[str, Any] name: str required: bool - type: ValidRunwayTestTypeValues + type: ClassVar[ValidRunwayTestTypeValues] - _data: RunwayTestDefinitionModel - _supports_vars: Tuple[str, ...] = ("args", "required") + _supports_vars: tuple[str, ...] = ("args", "required") - def __init__(self, data: _DataModel) -> None: + def __init__(self, data: RunwayTestDefinitionModel) -> None: """Instantiate class.""" super().__init__(data) - # error present on python3.7 - def __new__( # pylint: disable=arguments-differ - cls, - data: _DataModel, - ) -> RunwayTestDefinition[_DataModel]: - """Create a new instance of a class. - - Returns: - Correct subclass of RunwayTestDefinition for the given data. - - """ - if cls is not RunwayTestDefinition: - return super().__new__(cls) - if isinstance(data, CfnLintRunwayTestDefinitionModel): - return super().__new__(CfnLintRunwayTestDefinition) - if isinstance(data, ScriptRunwayTestDefinitionModel): - return super().__new__(ScriptRunwayTestDefinition) - if isinstance(data, YamlLintRunwayTestDefinitionModel): - return super().__new__(YamlLintRunwayTestDefinition) - raise TypeError( - "expected data of type CfnLintRunwayTestDefinitionModel, " - "ScriptRunwayTestDefinitionModel, or YamlLintRunwayTestDefinitionModel; " - f"got {type(data)}" - ) - def _register_variable(self, var_name: str, var_value: Any) -> None: """Register a variable. @@ -86,77 +41,11 @@ def _register_variable(self, var_name: str, var_value: Any) -> None: ) @classmethod - def parse_obj(cls, obj: Any) -> RunwayTestDefinition[_DataModel]: - """Parse a python object into this class. - - Args: - obj: The object to parse. - - """ - return cls(RunwayTestDefinitionModel.parse_obj(obj)) - - -class CfnLintRunwayTestDefinition( - RunwayTestDefinition[CfnLintRunwayTestDefinitionModel] -): - """Runway cfn-lint test definition.""" - - args: CfnLintRunwayTestArgs - type: Literal["cfn-lint"] = "cfn-lint" - - def __init__(self, data: CfnLintRunwayTestDefinitionModel) -> None: - """Instantiate class.""" - super().__init__(data) - - @classmethod - def parse_obj(cls, obj: Any) -> CfnLintRunwayTestDefinition: - """Parse a python object into this class. - - Args: - obj: The object to parse. - - """ - return cls(CfnLintRunwayTestDefinitionModel.parse_obj(obj)) - - -class ScriptRunwayTestDefinition(RunwayTestDefinition[ScriptRunwayTestDefinitionModel]): - """Runway script test definition.""" - - args: ScriptRunwayTestArgs - type: Literal["script"] = "script" - - def __init__(self, data: ScriptRunwayTestDefinitionModel) -> None: - """Instantiate class.""" - super().__init__(data) - - @classmethod - def parse_obj(cls, obj: Any) -> ScriptRunwayTestDefinition: - """Parse a python object into this class. - - Args: - obj: The object to parse. - - """ - return cls(ScriptRunwayTestDefinitionModel.parse_obj(obj)) - - -class YamlLintRunwayTestDefinition( - RunwayTestDefinition[YamlLintRunwayTestDefinitionModel] -): - """Runway yamllint test definition.""" - - type: Literal["yamllint"] = "yamllint" - - def __init__(self, data: YamlLintRunwayTestDefinitionModel) -> None: - """Instantiate class.""" - super().__init__(data) - - @classmethod - def parse_obj(cls, obj: Any) -> YamlLintRunwayTestDefinition: + def parse_obj(cls: type[Self], obj: object) -> Self: """Parse a python object into this class. Args: obj: The object to parse. """ - return cls(YamlLintRunwayTestDefinitionModel.parse_obj(obj)) + return cls(RunwayTestDefinitionModel.model_validate(obj)) diff --git a/runway/config/components/runway/_variables_def.py b/runway/config/components/runway/_variables_def.py index 8d02f2630..ec8b25589 100644 --- a/runway/config/components/runway/_variables_def.py +++ b/runway/config/components/runway/_variables_def.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, ClassVar, Dict, cast +from typing import TYPE_CHECKING, Any, ClassVar, cast import yaml @@ -12,6 +12,8 @@ from ...models.runway import RunwayVariablesDefinitionModel if TYPE_CHECKING: + from typing_extensions import Self + from ...._logging import RunwayLogger LOGGER = cast("RunwayLogger", logging.getLogger(__name__.replace("._", "."))) @@ -29,12 +31,11 @@ def __init__(self, data: RunwayVariablesDefinitionModel) -> None: """Instantiate class.""" self._file_path = data.file_path self._sys_path = data.sys_path - data = RunwayVariablesDefinitionModel(**{**data.dict(), **self.__load_file()}) - super().__init__(**data.dict(exclude={"file_path", "sys_path"})) + data = RunwayVariablesDefinitionModel(**{**data.model_dump(), **self.__load_file()}) + super().__init__(**data.model_dump(exclude={"file_path", "sys_path"})) - def __load_file(self) -> Dict[str, Any]: + def __load_file(self) -> dict[str, Any]: """Load a variables file.""" - # pylint: disable=protected-access if self._file_path: if self._file_path.is_file(): return yaml.safe_load(self._file_path.read_text()) @@ -52,15 +53,15 @@ def __load_file(self) -> Dict[str, Any]: "could not find %s in the current directory; continuing without a variables file", " or ".join(self.default_names), ) - self.__class__._has_notified_missing_file = True + self.__class__._has_notified_missing_file = True # noqa: SLF001 return {} @classmethod - def parse_obj(cls, obj: Any) -> RunwayVariablesDefinition: + def parse_obj(cls: type[Self], obj: Any) -> Self: """Parse a python object into this class. Args: obj: The object to parse. """ - return cls(RunwayVariablesDefinitionModel.parse_obj(obj)) + return cls(RunwayVariablesDefinitionModel.model_validate(obj)) diff --git a/runway/config/components/runway/base.py b/runway/config/components/runway/base.py index d0edec99a..faf61e477 100644 --- a/runway/config/components/runway/base.py +++ b/runway/config/components/runway/base.py @@ -4,13 +4,15 @@ import logging from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast from ...._logging import PrefixAdaptor from ....exceptions import UnresolvedVariable from ....variables import Variable if TYPE_CHECKING: + from typing_extensions import Self + from ...._logging import RunwayLogger from ....context import RunwayContext from ...models.base import ConfigProperty @@ -18,18 +20,20 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) +_ConfigPropertyTypeVar = TypeVar("_ConfigPropertyTypeVar", bound="ConfigProperty") + -class ConfigComponentDefinition(ABC): +class ConfigComponentDefinition(ABC, Generic[_ConfigPropertyTypeVar]): """Base class for Runway config components.""" - _data: ConfigProperty - _pre_process_vars: Tuple[str, ...] = () - _supports_vars: Tuple[str, ...] = () - _vars: Dict[str, Variable] = {} + _data: _ConfigPropertyTypeVar + _pre_process_vars: tuple[str, ...] = () + _supports_vars: tuple[str, ...] = () + _vars: dict[str, Variable] = {} - def __init__(self, data: ConfigProperty) -> None: + def __init__(self, data: _ConfigPropertyTypeVar) -> None: """Instantiate class.""" - self._data = data.copy(deep=True) + self._data = data.model_copy(deep=True) self._vars = {} for var in self._supports_vars: @@ -37,9 +41,9 @@ def __init__(self, data: ConfigProperty) -> None: self._register_variable(var, self._data[var]) @property - def data(self) -> Dict[str, Any]: + def data(self) -> dict[str, Any]: """Return the underlying data as a dict.""" - return self._data.dict() + return self._data.model_dump() def get(self, name: str, default: Any = None) -> None: """Get a value or return default if it is not found. @@ -56,7 +60,7 @@ def resolve( context: RunwayContext, *, pre_process: bool = False, - variables: Optional[RunwayVariablesDefinition] = None, + variables: RunwayVariablesDefinition | None = None, ) -> None: """Resolve variables. @@ -96,13 +100,11 @@ def _register_variable(self, var_name: str, var_value: Any) -> None: as a variable if it contains a lookup. """ - self._vars[var_name] = Variable( - name=var_name, value=var_value, variable_type="runway" - ) + self._vars[var_name] = Variable(name=var_name, value=var_value, variable_type="runway") @classmethod @abstractmethod - def parse_obj(cls, obj: Any) -> ConfigComponentDefinition: + def parse_obj(cls: type[Self], obj: object) -> Self: """Parse a python object into this class. Args: @@ -117,7 +119,7 @@ def __contains__(self, name: str) -> bool: return name in self.__dict__ return self._data.__contains__(name) - def __getattr__(self, name: str): + def __getattr__(self, name: str) -> Any: """Implement evaluation of self.name. Args: @@ -133,12 +135,10 @@ def __getattr__(self, name: str): if name in self._vars and not self._vars[name].resolved: raise UnresolvedVariable(self._vars[name]) if name in super().__getattribute__("_data"): - return super().__getattribute__("_data").__getattribute__(name) - raise AttributeError( - f"{self.__class__.__name__} object has not attribute {name}" - ) + return super().__getattribute__("_data").__getitem__(name) + raise AttributeError(f"{self.__class__.__name__} object has no attribute {name}") - def __getitem__(self, name: str): + def __getitem__(self, name: str) -> Any: """Implement evaluation of self[name]. Args: @@ -150,8 +150,8 @@ def __getitem__(self, name: str): """ try: return self.__getattr__(name) - except AttributeError: - raise KeyError(name) from None + except AttributeError as exc: + raise KeyError(name) from exc def __setattr__(self, name: str, value: Any) -> None: """Implement evaluation of self.name = value. @@ -169,7 +169,7 @@ def __setattr__(self, name: str, value: Any) -> None: """ prop = getattr(self.__class__, name, None) if isinstance(prop, property) and prop.fset: - prop.fset(self, value) # type: ignore + prop.fset(self, value) elif isinstance(prop, property): raise AttributeError(f"setting {name} property is not supported") elif name.startswith("_") or name in dir(self): diff --git a/runway/config/models/base.py b/runway/config/models/base.py index 0a5d8e3d0..b7741eb35 100644 --- a/runway/config/models/base.py +++ b/runway/config/models/base.py @@ -2,7 +2,7 @@ from __future__ import annotations -import pydantic +from pydantic import ConfigDict from ...utils import BaseModel @@ -10,8 +10,8 @@ class ConfigProperty(BaseModel): """Base class for Runway configuration properties.""" - class Config(pydantic.BaseConfig): - """Model configuration.""" - - validate_all = True - validate_assignment = True + model_config = ConfigDict( + extra="ignore", + validate_default=True, + validate_assignment=True, + ) diff --git a/runway/config/models/cfngin/__init__.py b/runway/config/models/cfngin/__init__.py index e227ab563..77e08d6ee 100644 --- a/runway/config/models/cfngin/__init__.py +++ b/runway/config/models/cfngin/__init__.py @@ -1,26 +1,14 @@ """CFNgin config models.""" -# pylint: disable=no-self-argument from __future__ import annotations import copy import locale from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - List, - Optional, - Type, - TypeVar, - Union, - cast, -) +from typing import TYPE_CHECKING, Annotated, Any, cast import yaml -from pydantic import Extra, Field, Protocol, root_validator, validator +from pydantic import ConfigDict, Field, field_validator, model_validator from typing_extensions import Literal from .. import utils @@ -33,9 +21,8 @@ ) if TYPE_CHECKING: - from pydantic import BaseModel - - Model = TypeVar("Model", bound=BaseModel) + from pydantic.config import JsonDict + from typing_extensions import Self __all__ = [ "CfnginConfigDefinitionModel", @@ -51,154 +38,197 @@ class CfnginHookDefinitionModel(ConfigProperty): """Model for a CFNgin hook definition.""" - args: Dict[str, Any] = Field( - default={}, - title="Arguments", - description="Arguments that will be passed to the hook. (supports lookups)", - ) - data_key: Optional[str] = Field( - default=None, - description="Key to use when storing the returned result of the hook.", - ) - enabled: bool = Field(default=True, description="Whether the hook will be run.") - path: str = Field(..., description="Python importable path to the hook.") - required: bool = Field( - default=True, - description="Whether to continue execution if the hook results in an error.", - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - schema_extra = { - "description": "Python classes or functions run before or after " - "deploy/destroy actions." - } - title = "CFNgin Hook Definition" + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ + "description": "Python classes or functions run before or after deploy/destroy actions." + }, + title="CFNgin Hook Definition", + validate_default=True, + validate_assignment=True, + ) + + args: Annotated[ + dict[str, Any], + Field( + title="Arguments", + description="Arguments that will be passed to the hook. (supports lookups)", + ), + ] = {} + data_key: Annotated[ + str | None, Field(description="Key to use when storing the returned result of the hook.") + ] = None + enabled: Annotated[bool, Field(description="Whether the hook will be run.")] = True + path: Annotated[str, Field(description="Python importable path to the hook.")] + required: Annotated[ + bool, Field(description="Whether to continue execution if the hook results in an error.") + ] = True + + +@staticmethod +def _stack_json_schema_extra(schema: JsonDict) -> None: + """Process the schema after it has been generated. + + Schema is modified in place. Return value is ignored. + + https://pydantic-docs.helpmanual.io/usage/schema/#schema-customization + + """ + schema["description"] = "Define CloudFormation stacks using a Blueprint or Template." + + # prevents a false error when defining stacks as a dict + if "required" in schema and isinstance(schema["required"], list): + schema["required"].remove("name") + + # fields that can be bool or lookup + if "properties" in schema and isinstance(schema["properties"], dict): + properties = schema["properties"] + for field_name in ["enabled", "locked", "protected", "termination_protection"]: + if field_name in properties and isinstance(properties[field_name], dict): + field_schema = cast("JsonDict", properties[field_name]) + field_schema.pop("type") + field_schema["anyOf"] = [ + {"type": "boolean"}, + {"type": "string", "pattern": utils.CFNGIN_LOOKUP_STRING_REGEX}, + ] class CfnginStackDefinitionModel(ConfigProperty): """Model for a CFNgin stack definition.""" - class_path: Optional[str] = Field( - default=None, - title="Blueprint Class Path", - description="Python importable path to a blueprint class.", - ) - description: Optional[str] = Field( - default=None, - title="Stack Description", - description="A description that will be applied to the stack in CloudFormation.", - ) - enabled: bool = Field( - default=True, description="Whether the stack will be deployed." - ) - in_progress_behavior: Optional[Literal["wait"]] = Field( - default=None, - title="Stack In Progress Behavior", - description="The action to take when a stack's status is " - "CREATE_IN_PROGRESS or UPDATE_IN_PROGRESS when trying to update it.", - ) - locked: bool = Field( - default=False, description="Whether to limit updating of the stack." - ) - name: str = Field(..., title="Stack Name", description="Name of the stack.") - protected: bool = Field( - default=False, - description="Whether to force all updates to the stack to be performed interactively.", - ) - required_by: List[str] = Field( - default=[], description="Array of stacks (by name) that require this stack." - ) - requires: List[str] = Field( - default=[], description="Array of stacks (by name) that this stack requires." - ) - stack_name: Optional[str] = Field( - default=None, - title="Explicit Stack Name", - description="Explicit name of the stack (namespace will still be prepended).", + model_config = ConfigDict( + extra="forbid", + json_schema_extra=_stack_json_schema_extra, + title="CFNgin Stack Definition", + validate_default=True, + validate_assignment=True, ) - stack_policy_path: Optional[Path] = Field( - default=None, - description="Path to a stack policy document that will be applied to the " - "CloudFormation stack.", - ) - tags: Dict[str, Any] = Field( - default={}, description="Tags that will be applied to the CloudFormation stack." - ) - template_path: Optional[Path] = Field( - default=None, - description="Path to a JSON or YAML formatted CloudFormation Template.", - ) - termination_protection: bool = Field( - default=False, - description="Set the value of termination protection on the CloudFormation stack.", - ) - timeout: Optional[int] = Field( - default=None, - description="The amount of time (in minutes) that can pass before the " - "Stack status becomes CREATE_FAILED.", - ) - variables: Dict[str, Any] = Field( - default={}, - description="Parameter values that will be passed to the " - "Blueprint/CloudFormation stack. (supports lookups)", - ) - - class Config(ConfigProperty.Config): - """Model configuration options.""" - extra = Extra.forbid - title = "CFNgin Stack Definition" + class_path: Annotated[ + str | None, + Field( + title="Blueprint Class Path", description="Python importable path to a blueprint class." + ), + ] = None + """Python importable path to a blueprint class.""" + + description: Annotated[ + str | None, + Field( + title="Stack Description", + description="A description that will be applied to the stack in CloudFormation.", + ), + ] = None + """A description that will be applied to the stack in CloudFormation.""" + + enabled: Annotated[bool, Field(description="Whether the stack will be deployed.")] = True + """Whether the stack will be deployed.""" + + in_progress_behavior: Annotated[ + Literal["wait"] | None, + Field( + title="Stack In Progress Behavior", + description="The action to take when a stack's status is " + "CREATE_IN_PROGRESS or UPDATE_IN_PROGRESS when trying to update it.", + ), + ] = None + """The action to take when a Stack's status is ``CREATE_IN_PROGRESS`` or + ``UPDATE_IN_PROGRESS`` when trying to update it. - @staticmethod - def schema_extra(schema: Dict[str, Any]) -> None: # type: ignore - """Process the schema after it has been generated. + """ - Schema is modified in place. Return value is ignored. + locked: Annotated[bool, Field(description="Whether to limit updating of the stack.")] = False + """Whether to limit updating of the stack.""" - https://pydantic-docs.helpmanual.io/usage/schema/#schema-customization + name: Annotated[str, Field(title="Stack Name", description="Name of the stack.")] + """Name of the stack.""" - """ - schema["description"] = ( - "Define CloudFormation stacks using a Blueprint or Template." - ) - # prevents a false error when defining stacks as a dict - schema.get("required", ["name"]).remove("name") + protected: Annotated[ + bool, + Field( + description="Whether to force all updates to the stack to be performed interactively." + ), + ] = False + """Whether to force all updates to the stack to be performed interactively.""" + + required_by: Annotated[ + list[str], Field(description="Array of stacks (by name) that require this stack.") + ] = [] + """Array of stacks (by name) that require this stack.""" + + requires: Annotated[ + list[str], Field(description="Array of stacks (by name) that this stack requires.") + ] = [] + """Array of stacks (by name) that this stack requires.""" + + stack_name: Annotated[ + str | None, + Field( + title="Explicit Stack Name", + description="Explicit name of the stack (namespace will still be prepended).", + ), + ] = None + """Explicit name of the stack (namespace will still be prepended).""" - # fields that can be bool or lookup - for prop in ["enabled", "locked", "protected", "termination_protection"]: - schema["properties"][prop].pop("type") - schema["properties"][prop]["anyOf"] = [ - {"type": "boolean"}, - {"type": "string", "pattern": utils.CFNGIN_LOOKUP_STRING_REGEX}, - ] + stack_policy_path: Annotated[ + Path | None, + Field( + description="Path to a stack policy document that will be applied to the CloudFormation stack." + ), + ] = None + """Path to a stack policy document that will be applied to the CloudFormation stack.""" + + tags: Annotated[ + dict[str, Any], Field(description="Tags that will be applied to the CloudFormation stack.") + ] = {} + """Tags that will be applied to the CloudFormation stack.""" + + template_path: Annotated[ + Path | None, Field(description="Path to a JSON or YAML formatted CloudFormation Template.") + ] = None + """Path to a JSON or YAML formatted CloudFormation Template.""" + + termination_protection: Annotated[ + bool, + Field(description="Set the value of termination protection on the CloudFormation stack."), + ] = False + """Set the value of termination protection on the CloudFormation stack.""" + + timeout: Annotated[ + int | None, + Field( + description="The amount of time (in minutes) that can pass before the Stack status becomes CREATE_FAILED." + ), + ] = None + """The amount of time (in minutes) that can pass before the Stack status becomes CREATE_FAILED.""" - _resolve_path_fields = cast( - "classmethod[Callable[..., Any]]", - validator("stack_policy_path", "template_path", allow_reuse=True)( - utils.resolve_path_field + variables: Annotated[ + dict[str, Any], + Field( + description="Parameter values that will be passed to the Blueprint/CloudFormation stack. (supports lookups)" ), + ] = {} + """Parameter values that will be passed to the Blueprint/CloudFormation stack. (supports lookups)""" + + _resolve_path_fields = field_validator("stack_policy_path", "template_path")( + utils.resolve_path_field ) - @root_validator(pre=True) - def _validate_class_and_template(cls, values: Dict[str, Any]) -> Dict[str, Any]: + @model_validator(mode="before") + @classmethod + def _validate_class_and_template(cls, values: dict[str, Any]) -> dict[str, Any]: """Validate class_path and template_path are not both provided.""" if values.get("class_path") and values.get("template_path"): raise ValueError("only one of class_path or template_path can be defined") return values - @root_validator(pre=True) - def _validate_class_or_template(cls, values: Dict[str, Any]) -> Dict[str, Any]: + @model_validator(mode="before") + @classmethod + def _validate_class_or_template(cls, values: dict[str, Any]) -> dict[str, Any]: """Ensure that either class_path or template_path is defined.""" - # if the stack is disabled or locked, it is ok that these are missing + # if the Stack is disabled or locked, it is ok that these are missing required = values.get("enabled", True) and not values.get("locked", False) - if ( - not values.get("class_path") - and not values.get("template_path") - and required - ): + if not values.get("class_path") and not values.get("template_path") and required: raise ValueError("either class_path or template_path must be defined") return values @@ -206,138 +236,161 @@ def _validate_class_or_template(cls, values: Dict[str, Any]) -> Dict[str, Any]: class CfnginConfigDefinitionModel(ConfigProperty): """Model for a CFNgin config definition.""" - cfngin_bucket: Optional[str] = Field( - default=None, - title="CFNgin Bucket", - description="Name of an AWS S3 bucket to use for caching CloudFormation templates. " - "Set as an empty string to disable caching.", - ) - cfngin_bucket_region: Optional[str] = Field( - default=None, - title="CFNgin Bucket Region", - description="AWS Region where the CFNgin Bucket is located. " - "If not provided, the current region is used.", - ) - cfngin_cache_dir: Optional[Path] = Field( - default=None, - title="CFNgin Cache Directory", - description="Path to a local directory that CFNgin will use for local caching.", - ) - log_formats: Dict[str, str] = Field( # TODO create model - default={}, description="Customize log message formatting by log level." - ) - lookups: Dict[str, str] = Field( - default={}, - description="Mapping of custom lookup names to a python importable path " - "for the class that will be used to resolve the lookups.", - ) - mappings: Dict[str, Dict[str, Dict[str, Any]]] = Field( - default={}, description="Mappings that will be appended to all stack templates." - ) - namespace: str = Field( - ..., - description="The namespace used to prefix stack names to create separation " - "within an AWS account.", - ) - namespace_delimiter: str = Field( - default="-", - description="Character used to separate the namespace and stack name " - "when the namespace is prepended.", - ) - package_sources: CfnginPackageSourcesDefinitionModel = Field( - default=CfnginPackageSourcesDefinitionModel(), - description=CfnginPackageSourcesDefinitionModel.Config.schema_extra[ - "description" - ], + model_config = ConfigDict( + extra="ignore", + json_schema_extra={"description": "Configuration file for Runway's CFNgin."}, + title="CFNgin Config File", + validate_default=True, + validate_assignment=True, ) - persistent_graph_key: Optional[str] = Field( - default=None, - description="Key for an AWS S3 object used to track a graph of stacks " - "between executions.", - ) - post_deploy: Union[ - List[CfnginHookDefinitionModel], # final type after parsing - Dict[str, CfnginHookDefinitionModel], # recommended when writing config - ] = Field(default=[], title="Post Deploy Hooks") - post_destroy: Union[ - List[CfnginHookDefinitionModel], # final type after parsing - Dict[str, CfnginHookDefinitionModel], # recommended when writing config - ] = Field(default=[], title="Pre Destroy Hooks") - pre_deploy: Union[ - List[CfnginHookDefinitionModel], # final type after parsing - Dict[str, CfnginHookDefinitionModel], # recommended when writing config - ] = Field(default=[], title="Pre Deploy Hooks") - pre_destroy: Union[ - List[CfnginHookDefinitionModel], # final type after parsing - Dict[str, CfnginHookDefinitionModel], # recommended when writing config - ] = Field(default=[], title="Pre Destroy Hooks") - service_role: Optional[str] = Field( - default=None, - title="Service Role ARN", - description="Specify an IAM Role for CloudFormation to use.", - ) - stacks: Union[ - List[CfnginStackDefinitionModel], # final type after parsing - Dict[str, CfnginStackDefinitionModel], # recommended when writing config - ] = Field( - default=[], - description="Define CloudFormation stacks using a Blueprint or Template.", - ) - sys_path: Optional[Path] = Field( - default=None, - title="sys.path", - description="Path to append to $PATH. This is also the root of relative paths.", - ) - tags: Optional[Dict[str, str]] = Field( - default=None, # None is significant here - description="Tags to try to apply to all resources created from this configuration file.", - ) - template_indent: int = Field( - default=4, - description="Number of spaces per indentation level to use when " - "rendering/outputting CloudFormation templates.", - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - schema_extra = {"description": "Configuration file for Runway's CFNgin."} - title = "CFNgin Config File" - - _resolve_path_fields = cast( - "classmethod[Callable[..., Any]]", - validator("cfngin_cache_dir", "sys_path", allow_reuse=True)( - utils.resolve_path_field + cfngin_bucket: Annotated[ + str | None, + Field( + title="CFNgin Bucket", + description="Name of an AWS S3 bucket to use for caching CloudFormation templates. " + "Set as an empty string to disable caching.", ), - ) + ] = None + cfngin_bucket_region: Annotated[ + str | None, + Field( + title="CFNgin Bucket Region", + description="AWS Region where the CFNgin Bucket is located. " + "If not provided, the current region is used.", + ), + ] = None + cfngin_cache_dir: Annotated[ + Path | None, + Field( + title="CFNgin Cache Directory", + description="Path to a local directory that CFNgin will use for local caching.", + ), + ] = None + log_formats: Annotated[ # TODO (kyle): create model + dict[str, str], Field(description="Customize log message formatting by log level.") + ] = {} + lookups: Annotated[ + dict[str, str], + Field( + description="Mapping of custom lookup names to a python importable path " + "for the class that will be used to resolve the lookups.", + ), + ] = {} + mappings: Annotated[ + dict[str, dict[str, dict[str, Any]]], + Field(description="Mappings that will be appended to all stack templates."), + ] = {} + namespace: Annotated[ + str, + Field( + description="The namespace used to prefix stack names to create separation " + "within an AWS account.", + ), + ] + namespace_delimiter: Annotated[ + str, + Field( + description="Character used to separate the namespace and stack name " + "when the namespace is prepended.", + ), + ] = "-" + package_sources: Annotated[ + CfnginPackageSourcesDefinitionModel, + Field( + description="Map of additional package sources to include when " + "processing this configuration file.", + ), + ] = CfnginPackageSourcesDefinitionModel() + persistent_graph_key: Annotated[ + str | None, + Field( + description="Key for an AWS S3 object used to track a graph of stacks " + "between executions.", + ), + ] = None + post_deploy: Annotated[ + list[CfnginHookDefinitionModel] | dict[str, CfnginHookDefinitionModel], + Field(title="Post Deploy Hooks"), + ] = [] + post_destroy: Annotated[ + list[CfnginHookDefinitionModel] | dict[str, CfnginHookDefinitionModel], + Field(title="Pre Destroy Hooks"), + ] = [] + pre_deploy: Annotated[ + list[CfnginHookDefinitionModel] | dict[str, CfnginHookDefinitionModel], + Field(title="Pre Deploy Hooks"), + ] = [] + pre_destroy: Annotated[ + list[CfnginHookDefinitionModel] | dict[str, CfnginHookDefinitionModel], + Field(title="Pre Destroy Hooks"), + ] = [] + service_role: Annotated[ + str | None, + Field( + title="Service Role ARN", + description="Specify an IAM Role for CloudFormation to use.", + ), + ] = None + stacks: Annotated[ + list[CfnginStackDefinitionModel] | dict[str, CfnginStackDefinitionModel], + Field( + description="Define CloudFormation stacks using a Blueprint or Template.", + ), + ] = [] + sys_path: Annotated[ + Path | None, + Field( + title="sys.path", + description="Path to append to $PATH. This is also the root of relative paths.", + ), + ] = None + tags: Annotated[ + dict[str, str] | None, + Field( + description="Tags to try to apply to all resources created from this configuration file.", + ), + ] = None # NOTE (kyle): `None` is significant here + template_indent: Annotated[ + int, + Field( + description="Number of spaces per indentation level to use when " + "rendering/outputting CloudFormation templates.", + ), + ] = 4 + + _resolve_path_fields = field_validator("cfngin_cache_dir", "sys_path")(utils.resolve_path_field) - @validator("post_deploy", "post_destroy", "pre_deploy", "pre_destroy", pre=True) + @field_validator("post_deploy", "post_destroy", "pre_deploy", "pre_destroy", mode="before") + @classmethod def _convert_hook_definitions( - cls, v: Union[Dict[str, Any], List[Dict[str, Any]]] - ) -> List[Dict[str, Any]]: + cls, v: dict[str, Any] | list[dict[str, Any]] + ) -> list[dict[str, Any]]: """Convert hooks defined as a dict to a list.""" if isinstance(v, list): return v return list(v.values()) - @validator("stacks", pre=True) + @field_validator("stacks", mode="before") + @classmethod def _convert_stack_definitions( - cls, v: Union[Dict[str, Any], List[Dict[str, Any]]] - ) -> List[Dict[str, Any]]: - """Convert stacks defined as a dict to a list.""" + cls, v: dict[str, Any] | list[dict[str, Any]] + ) -> list[dict[str, Any]]: + """Convert ``stacks`` defined as a dict to a list.""" if isinstance(v, list): return v - result: List[Dict[str, Any]] = [] + result: list[dict[str, Any]] = [] for name, stack in copy.deepcopy(v).items(): stack["name"] = name result.append(stack) return result - @validator("stacks") + @field_validator("stacks") + @classmethod def _validate_unique_stack_names( - cls, stacks: List[CfnginStackDefinitionModel] - ) -> List[CfnginStackDefinitionModel]: - """Validate that each stack has a unique name.""" + cls, stacks: list[CfnginStackDefinitionModel] + ) -> list[CfnginStackDefinitionModel]: + """Validate that each Stack has a unique name.""" stack_names = [stack.name for stack in stacks] if len(set(stack_names)) != len(stack_names): for i, name in enumerate(stack_names): @@ -346,38 +399,12 @@ def _validate_unique_stack_names( return stacks @classmethod - def parse_file( - cls: Type[Model], - path: Union[str, Path], - *, - content_type: Optional[str] = None, - encoding: str = "utf8", - proto: Optional[Protocol] = None, - allow_pickle: bool = False, - ) -> Model: + def parse_file( # pyright: ignore[reportIncompatibleMethodOverride] + cls: type[Self], path: str | Path + ) -> Self: """Parse a file.""" - return cast( - "Model", - cls.parse_raw( - Path(path).read_text( - encoding=locale.getpreferredencoding(do_setlocale=False) - ), - content_type=content_type, # type: ignore - encoding=encoding, - proto=proto, # type: ignore - allow_pickle=allow_pickle, - ), + return cls.model_validate( + yaml.safe_load( + Path(path).read_text(encoding=locale.getpreferredencoding(do_setlocale=False)) + ) ) - - @classmethod - def parse_raw( - cls: Type[Model], - b: Union[bytes, str], - *, - content_type: Optional[str] = None, # pylint: disable=unused-argument - encoding: str = "utf8", # pylint: disable=unused-argument - proto: Optional[Protocol] = None, # pylint: disable=unused-argument - allow_pickle: bool = False, # pylint: disable=unused-argument - ) -> Model: - """Parse raw data.""" - return cast("Model", cls.parse_obj(yaml.safe_load(b))) diff --git a/runway/config/models/cfngin/_package_sources.py b/runway/config/models/cfngin/_package_sources.py index b50309c8e..19f175dcd 100644 --- a/runway/config/models/cfngin/_package_sources.py +++ b/runway/config/models/cfngin/_package_sources.py @@ -1,11 +1,10 @@ """CFNgin package source models.""" -# pylint: disable=no-self-argument from __future__ import annotations -from typing import Any, Dict, List, Optional +from typing import Annotated, Any -from pydantic import Extra, Field, root_validator +from pydantic import ConfigDict, Field, model_validator from ..base import ConfigProperty @@ -25,40 +24,41 @@ class GitCfnginPackageSourceDefinitionModel(ConfigProperty): """ - branch: Optional[str] = Field( - default=None, title="Git Branch", examples=["ENV-dev", "ENV-prod", "master"] - ) - commit: Optional[str] = Field(default=None, title="Git Commit Hash") - configs: List[str] = Field( - default=[], - description="Array of paths relative to the root of the package source " - "for configuration that should be merged into the current configuration file.", - ) - paths: List[str] = Field( - default=[], - description="Array of paths relative to the root of the package source to add to $PATH.", - ) - tag: Optional[str] = Field( - default=None, title="Git Tag", examples=["1.0.0", "v1.0.0"] - ) - uri: str = Field( - ..., - title="Git Repository URI", - examples=["git@github.com:onicagroup/runway.git"], - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - schema_extra: Dict[str, Any] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "description": "Information about git repositories that should be included " "in the processing of this configuration file." - } - title = "CFNgin Git Repository Package Source Definition" - - @root_validator - def _validate_one_ref(cls, values: Dict[str, Any]) -> Dict[str, Any]: + }, + title="CFNgin Git Repository Package Source Definition", + validate_default=True, + validate_assignment=True, + ) + branch: Annotated[ + str | None, Field(title="Git Branch", examples=["ENV-dev", "ENV-prod", "master"]) + ] = None + commit: Annotated[str | None, Field(title="Git Commit Hash")] = None + configs: Annotated[ + list[str], + Field( + description="Array of paths relative to the root of the package source " + "for configuration that should be merged into the current configuration file." + ), + ] = [] + paths: Annotated[ + list[str], + Field( + description="Array of paths relative to the root of the package source to add to $PATH." + ), + ] = [] + tag: Annotated[str | None, Field(title="Git Tag", examples=["1.0.0", "v1.0.0"])] = None + uri: Annotated[ + str, Field(title="Git Repository URI", examples=["git@github.com:onicagroup/runway.git"]) + ] + + @model_validator(mode="before") + @classmethod + def _validate_one_ref(cls, values: dict[str, Any]) -> dict[str, Any]: """Ensure that only one ref is defined.""" ref_keys = ["branch", "commit", "tag"] count_ref_defs = sum(bool(values.get(i)) for i in ref_keys) @@ -79,30 +79,37 @@ class LocalCfnginPackageSourceDefinitionModel(ConfigProperty): """ - configs: List[str] = Field( - default=[], - description="Array of paths relative to the root of the package source " - "for configuration that should be merged into the current configuration file.", - ) - paths: List[str] = Field( - default=[], - description="Array of paths relative to the root of the package source to add to $PATH.", - ) - source: str = Field( - ..., - description="Path relative to the current configuration file that is the " - "root of the local package source.", - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - schema_extra: Dict[str, Any] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "description": "Information about local directories that should be " "included in the processing of this configuration file." - } - title = "CFNgin Local Package Source Definition" + }, + title="CFNgin Local Package Source Definition", + validate_default=True, + validate_assignment=True, + ) + + configs: Annotated[ + list[str], + Field( + description="Array of paths relative to the root of the package source " + "for configuration that should be merged into the current configuration file.", + ), + ] = [] + paths: Annotated[ + list[str], + Field( + description="Array of paths relative to the root of the package source to add to $PATH." + ), + ] = [] + source: Annotated[ + str, + Field( + description="Path relative to the current configuration file that is the " + "root of the local package source." + ), + ] class S3CfnginPackageSourceDefinitionModel(ConfigProperty): @@ -120,37 +127,43 @@ class S3CfnginPackageSourceDefinitionModel(ConfigProperty): """ - bucket: str = Field(..., title="AWS S3 Bucket Name") - configs: List[str] = Field( - default=[], - description="Array of paths relative to the root of the package source " - "for configuration that should be merged into the current configuration file.", - ) - key: str = Field(..., title="AWS S3 Object Key") - paths: List[str] = Field( - default=[], - description="Array of paths relative to the root of the package source to add to $PATH.", - ) - requester_pays: bool = Field( - default=False, - description="Confirms that the requester knows that they will be charged " - "for the request.", - ) - use_latest: bool = Field( - default=True, - description="Update the local copy if the last modified date in AWS S3 changes.", - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - schema_extra: Dict[str, Any] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "description": "Information about a AWS S3 objects that should be " "downloaded, unzipped, and included in the processing of " "this configuration file." - } - title = "CFNgin S3 Package Source Definition" + }, + title="CFNgin S3 Package Source Definition", + validate_default=True, + validate_assignment=True, + ) + + bucket: Annotated[str, Field(title="AWS S3 Bucket Name")] + configs: Annotated[ + list[str], + Field( + description="Array of paths relative to the root of the package source " + "for configuration that should be merged into the current configuration file.", + ), + ] = [] + key: Annotated[str, Field(title="AWS S3 Object Key")] + paths: Annotated[ + list[str], + Field( + description="Array of paths relative to the root of the package source to add to $PATH." + ), + ] = [] + requester_pays: Annotated[ + bool, + Field( + description="Confirms that the requester knows that they will be charged for the request." + ), + ] = False + use_latest: Annotated[ + bool, + Field(description="Update the local copy if the last modified date in AWS S3 changes."), + ] = True class CfnginPackageSourcesDefinitionModel(ConfigProperty): @@ -163,34 +176,32 @@ class CfnginPackageSourcesDefinitionModel(ConfigProperty): """ - git: List[GitCfnginPackageSourceDefinitionModel] = Field( + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ + "description": "Map of additional package sources to include when " + "processing this configuration file." + }, + title="CFNgin Package Sources Definition", + validate_default=True, + validate_assignment=True, + ) + + git: list[GitCfnginPackageSourceDefinitionModel] = Field( default=[], title="CFNgin Git Repository Package Source Definitions", - description=GitCfnginPackageSourceDefinitionModel.Config.schema_extra[ - "description" - ], + description="Information about git repositories that should be included " + "in the processing of this configuration file.", ) - local: List[LocalCfnginPackageSourceDefinitionModel] = Field( + local: list[LocalCfnginPackageSourceDefinitionModel] = Field( default=[], title="CFNgin Local Package Source Definitions", - description=LocalCfnginPackageSourceDefinitionModel.Config.schema_extra[ - "description" - ], + description="Information about local directories that should be included " + "in the processing of this configuration file.", ) - s3: List[S3CfnginPackageSourceDefinitionModel] = Field( + s3: list[S3CfnginPackageSourceDefinitionModel] = Field( default=[], title="CFNgin S3 Package Source Definitions", - description=S3CfnginPackageSourceDefinitionModel.Config.schema_extra[ - "description" - ], + description="Information about a AWS S3 objects that should be " + "downloaded, unzipped, and included in the processing of this configuration file.", ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - schema_extra: Dict[str, Any] = { - "description": "Map of additional package sources to include when " - "processing this configuration file." - } - title = "CFNgin Package Sources Definition" diff --git a/runway/config/models/runway/__init__.py b/runway/config/models/runway/__init__.py index 3d919d723..671f96b3c 100644 --- a/runway/config/models/runway/__init__.py +++ b/runway/config/models/runway/__init__.py @@ -1,311 +1,254 @@ """Runway config models.""" -# pylint: disable=no-self-argument from __future__ import annotations import locale import logging from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generator, - List, - Optional, - Type, - TypeVar, - Union, - cast, -) +from typing import TYPE_CHECKING, Annotated, Any, TypeVar, cast import yaml from packaging.specifiers import InvalidSpecifier, SpecifierSet -from pydantic import Extra, Field, Protocol, root_validator, validator -from typing_extensions import Literal +from pydantic import ( + ConfigDict, + Field, + GetCoreSchemaHandler, + GetJsonSchemaHandler, + field_serializer, + field_validator, + model_validator, +) +from pydantic_core import CoreSchema, core_schema +from ....utils.pydantic_validators import LaxStr from .. import utils from ..base import ConfigProperty from ..utils import RUNWAY_LOOKUP_STRING_ERROR, RUNWAY_LOOKUP_STRING_REGEX -from ._builtin_tests import ( - CfnLintRunwayTestArgs, - CfnLintRunwayTestDefinitionModel, - RunwayTestDefinitionModel, - ScriptRunwayTestArgs, - ScriptRunwayTestDefinitionModel, - ValidRunwayTestTypeValues, - YamlLintRunwayTestDefinitionModel, +from ._assume_role import RunwayAssumeRoleDefinitionModel +from ._builtin_tests import RunwayTestDefinitionModel, ValidRunwayTestTypeValues +from ._future import RunwayFutureDefinitionModel +from ._module import RunwayModuleDefinitionModel +from ._region import RunwayDeploymentRegionDefinitionModel +from ._type_defs import ( + RunwayEnvironmentsType, + RunwayEnvironmentsUnresolvedType, + RunwayEnvVarsType, + RunwayEnvVarsUnresolvedType, + RunwayModuleTypeTypeDef, ) +from ._variables import RunwayVariablesDefinitionModel if TYPE_CHECKING: from pydantic import BaseModel + from pydantic.json_schema import JsonSchemaValue + from typing_extensions import Self Model = TypeVar("Model", bound=BaseModel) LOGGER = logging.getLogger(__name__) -RunwayEnvironmentsType = Dict[str, Union[bool, List[str], str]] -RunwayEnvironmentsUnresolvedType = Union[Dict[str, Union[bool, List[str], str]], str] -RunwayEnvVarsType = Dict[str, Union[List[str], str]] -RunwayEnvVarsUnresolvedType = Union[RunwayEnvVarsType, str] -RunwayModuleTypeTypeDef = Literal[ - "cdk", "cloudformation", "kubernetes", "serverless", "static", "terraform" -] __all__ = [ - "CfnLintRunwayTestArgs", - "CfnLintRunwayTestDefinitionModel", "RUNWAY_LOOKUP_STRING_ERROR", "RUNWAY_LOOKUP_STRING_REGEX", "RunwayAssumeRoleDefinitionModel", "RunwayConfigDefinitionModel", "RunwayDeploymentDefinitionModel", "RunwayDeploymentRegionDefinitionModel", - "RunwayEnvironmentsType", - "RunwayEnvironmentsUnresolvedType", "RunwayEnvVarsType", "RunwayEnvVarsUnresolvedType", + "RunwayEnvironmentsType", + "RunwayEnvironmentsUnresolvedType", "RunwayFutureDefinitionModel", "RunwayModuleDefinitionModel", "RunwayModuleTypeTypeDef", "RunwayTestDefinitionModel", "RunwayVariablesDefinitionModel", "RunwayVersionField", - "ScriptRunwayTestArgs", - "ScriptRunwayTestDefinitionModel", "ValidRunwayTestTypeValues", - "YamlLintRunwayTestDefinitionModel", ] -class RunwayAssumeRoleDefinitionModel(ConfigProperty): - """Model for a Runway assume role definition.""" +def _deployment_json_schema_extra(schema: dict[str, Any]) -> None: + """Process the schema after it has been generated. - arn: Optional[str] = Field( - default=None, - title="IAM Role ARN", - description="The ARN of the AWS IAM role to be assumed. (supports lookups)", - ) - duration: Union[int, str] = Field( - default=3600, - description="The duration, in seconds, of the role session. (supports lookups)", - ge=900, # applies to int json schema only - le=43_200, # applies to int json schema only - regex=RUNWAY_LOOKUP_STRING_REGEX, # applies to str json schema only - ) - post_deploy_env_revert: bool = Field( - default=False, - title="Post Deployment Environment Revert", - description="Revert the credentials stored in environment variables to " - "what they were prior to execution after the deployment finished processing. " - "(supports lookups)", - ) - session_name: str = Field( - default="runway", - description="An identifier for the assumed role session. (supports lookups)", - ) + Schema is modified in place. Return value is ignored. - class Config(ConfigProperty.Config): - """Model configuration.""" + https://pydantic-docs.helpmanual.io/usage/schema/#schema-customization - extra = Extra.forbid - schema_extra: Dict[str, Any] = { - "description": "Used to defined a role to assume while Runway is " - "processing each module.", - "examples": [ - {"arn": "arn:aws:iam::123456789012:role/name"}, - { - "arn": "${var role_arn.${env DEPLOY_ENVIRONMENT}}", - "duration": 9001, - "post_deploy_env_revert": True, - "session_name": "runway-example", - }, - ], - } - title = "Runway Deployment.assume_role Definition" - - @validator("arn") - def _convert_arn_null_value(cls, v: Optional[str]) -> Optional[str]: - """Convert a "nul" string into type(None).""" - null_strings = ["null", "none", "undefined"] - return None if isinstance(v, str) and v.lower() in null_strings else v - - @validator("duration", pre=True) - def _validate_duration(cls, v: Union[int, str]) -> Union[int, str]: - """Validate duration is within the range allowed by AWS.""" - if isinstance(v, str): - return v - if v < 900: - raise ValueError("duration must be greater than or equal to 900") - if v > 43_200: - raise ValueError("duration must be less than or equal to 43,200") - return v - - _validate_string_is_lookup = cast( - "classmethod[Callable[..., Any]]", - validator("duration", allow_reuse=True, pre=True)( - utils.validate_string_is_lookup - ), - ) + """ + schema["description"] = "A collection of modules, regions, and other configurations to deploy." + # modify schema to allow simple string or mapping definition for a module + module_ref = schema["properties"]["modules"]["items"].pop("$ref") + schema["properties"]["modules"]["items"]["anyOf"] = [ + {"$ref": module_ref}, + {"type": "string"}, + ] -class RunwayDeploymentRegionDefinitionModel(ConfigProperty): - """Model for a Runway deployment region definition.""" +class RunwayDeploymentDefinitionModel(ConfigProperty): + """Model for a Runway deployment definition.""" - parallel: Union[List[str], str] = Field( - ..., - title="Parallel Regions", - description="An array of AWS Regions to process asynchronously. (supports lookups)", + model_config = ConfigDict( + extra="forbid", + json_schema_extra=_deployment_json_schema_extra, + title="Runway Deployment Definition", + validate_default=True, + validate_assignment=True, ) - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - schema_extra: Dict[str, Any] = { - "description": "Only supports 'parallel' field.", - "examples": [ - {"parallel": ["us-east-1", "us-east-2"]}, - {"parallel": "${var regions.${env DEPLOY_ENVIRONMENT}}"}, + account_alias: Annotated[ + str | None, + Field( + description="Used to verify the currently assumed role or credentials. " + "(supports lookups)", + examples=["example-alias", "${var alias.${env DEPLOY_ENVIRONMENT}}"], + ), + ] = None + """Used to verify the currently assumed role or credentials. (supports lookups)""" + + account_id: Annotated[ + str | None, + LaxStr, + Field( + description="Used to verify the currently assumed role or credentials. " + "(supports lookups)", + examples=["123456789012", "${var id.${env DEPLOY_ENVIRONMENT}}"], + ), + ] = None + """Used to verify the currently assumed role or credentials. (supports lookups)""" + + assume_role: Annotated[ + str | RunwayAssumeRoleDefinitionModel | None, + Field( + description="Assume a role when processing the deployment. (supports lookups)", + examples=[ + "arn:aws:iam::123456789012:role/name", + *cast( + "dict[str, list[str]]", + RunwayAssumeRoleDefinitionModel.model_config.get("json_schema_extra", {}), + ).get("examples", []), ], - } - title = "Runway Deployment.regions Definition" - - _validate_string_is_lookup = cast( - "classmethod[Callable[..., Any]]", - validator("parallel", allow_reuse=True, pre=True)( - utils.validate_string_is_lookup ), - ) - - -class RunwayDeploymentDefinitionModel(ConfigProperty): - """Model for a Runway deployment definition.""" - - account_alias: Optional[str] = Field( - default=None, - description="Used to verify the currently assumed role or credentials. " - "(supports lookups)", - examples=["example-alias", "${var alias.${env DEPLOY_ENVIRONMENT}}"], - ) - account_id: Optional[str] = Field( - default=None, - description="Used to verify the currently assumed role or credentials. " - "(supports lookups)", - examples=["123456789012", "${var id.${env DEPLOY_ENVIRONMENT}}"], - ) - assume_role: Union[str, RunwayAssumeRoleDefinitionModel] = Field( - default={}, - description="Assume a role when processing the deployment. (supports lookups)", - examples=["arn:aws:iam::123456789012:role/name"] - + cast( - List[Any], RunwayAssumeRoleDefinitionModel.Config.schema_extra["examples"] + ] = RunwayAssumeRoleDefinitionModel() + """Assume a role when processing the deployment. (supports lookups)""" + + env_vars: Annotated[ + RunwayEnvVarsUnresolvedType, + Field( + title="Environment Variables", + description="Additional variables to add to the environment when " + "processing the deployment. (supports lookups)", + examples=[ + "${var env_vars.${env DEPLOY_ENVIRONMENT}}", + { + "EXAMPLE_VARIABLE": "value", + "KUBECONFIG": [".kube", "${env DEPLOY_ENVIRONMENT}", "config"], + }, + ], ), - ) - env_vars: RunwayEnvVarsUnresolvedType = Field( - default={}, - title="Environment Variables", - description="Additional variables to add to the environment when " - "processing the deployment. (supports lookups)", - examples=[ - "${var env_vars.${env DEPLOY_ENVIRONMENT}}", - { - "EXAMPLE_VARIABLE": "value", - "KUBECONFIG": [".kube", "${env DEPLOY_ENVIRONMENT}", "config"], - }, - ], - ) - environments: RunwayEnvironmentsUnresolvedType = Field( - default={}, - description="Explicitly enable/disable the deployment for a specific " - "deploy environment, AWS Account ID, and AWS Region combination. " - "Can also be set as a static boolean value. (supports lookups)", - examples=[ - "${var envs.${env DEPLOY_ENVIRONMENT}}", - {"dev": "123456789012", "prod": "us-east-1"}, - {"dev": True, "prod": False}, - {"dev": ["us-east-1"], "prod": ["us-west-2", "ca-central-1"]}, - { - "dev": ["123456789012/us-east-1", "123456789012/us-west-2"], - "prod": ["234567890123/us-east-1", "234567890123/us-west-2"], - }, - ], - ) - modules: List[RunwayModuleDefinitionModel] = Field( - ..., description="An array of modules to process as part of a deployment." - ) - module_options: Union[Dict[str, Any], str] = Field( - default={}, - description="Options that are passed directly to the modules within this deployment. " - "(supports lookups)", - examples=[ - "${var sampleapp.options.${env DEPLOY_ENVIRONMENT}}", - {"some_option": "value"}, - ], - ) - name: str = Field( - default="unnamed_deployment", - description="The name of the deployment to be displayed in logs and the " - "interactive selection menu.", - ) - parallel_regions: Union[List[str], str] = Field( - default=[], - description="An array of AWS Regions to process asynchronously. (supports lookups)", - examples=[ - ["us-east-1", "us-west-2"], - "${var regions.${dev DEPLOY_ENVIRONMENT}}", - ], - ) - parameters: Union[Dict[str, Any], str] = Field( - default={}, - description="Used to pass variable values to modules in place of an " - "environment configuration file. (supports lookups)", - examples=[ - {"namespace": "example-${env DEPLOY_ENVIRONMENT}"}, - "${var sampleapp.parameters.${env DEPLOY_ENVIRONMENT}}", - ], - ) - regions: Union[List[str], str] = Field( - default=[], - description="An array of AWS Regions to process this deployment in. (supports lookups)", - examples=[ - ["us-east-1", "us-west-2"], - "${var regions.${dev DEPLOY_ENVIRONMENT}}", - ] - + RunwayDeploymentRegionDefinitionModel.Config.schema_extra["examples"], - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - title = "Runway Deployment Definition" - - @staticmethod - def schema_extra(schema: Dict[str, Any]) -> None: # type: ignore - """Process the schema after it has been generated. - - Schema is modified in place. Return value is ignored. - - https://pydantic-docs.helpmanual.io/usage/schema/#schema-customization + ] = {} + """Additional variables to add to the environment when processing the deployment. (supports lookups)""" + + environments: Annotated[ + RunwayEnvironmentsUnresolvedType, + Field( + description="Explicitly enable/disable the deployment for a specific " + "deploy environment, AWS Account ID, and AWS Region combination. " + "Can also be set as a static boolean value. (supports lookups)", + examples=[ + "${var envs.${env DEPLOY_ENVIRONMENT}}", + {"dev": "123456789012", "prod": "us-east-1"}, + {"dev": True, "prod": False}, + {"dev": ["us-east-1"], "prod": ["us-west-2", "ca-central-1"]}, + { + "dev": ["123456789012/us-east-1", "123456789012/us-west-2"], + "prod": ["234567890123/us-east-1", "234567890123/us-west-2"], + }, + ], + ), + ] = {} + """Explicitly enable/disable the deployment for a specific deploy environment, + AWS Account ID, and AWS Region combination. + Can also be set as a static boolean value. (supports lookups) + + """ + + modules: Annotated[ + list[RunwayModuleDefinitionModel], + Field(description="An array of modules to process as part of a deployment."), + ] + """An array of modules to process as part of a deployment.""" + + module_options: Annotated[ + dict[str, Any] | str, + Field( + description="Options that are passed directly to the modules within this deployment. " + "(supports lookups)", + examples=[ + "${var sampleapp.options.${env DEPLOY_ENVIRONMENT}}", + {"some_option": "value"}, + ], + ), + ] = {} + """Options that are passed directly to the modules within this deployment. (supports lookups)""" + + name: Annotated[ + str, + Field( + description="The name of the deployment to be displayed in logs and the " + "interactive selection menu.", + ), + ] = "unnamed_deployment" + """The name of the deployment to be displayed in logs and the interactive selection menu.""" + + parallel_regions: Annotated[ + list[str] | str, + Field( + description="An array of AWS Regions to process asynchronously. (supports lookups)", + examples=[ + ["us-east-1", "us-west-2"], + "${var regions.${dev DEPLOY_ENVIRONMENT}}", + ], + ), + ] = [] + """An array of AWS Regions to process asynchronously. (supports lookups)""" + + parameters: Annotated[ + dict[str, Any] | str, + Field( + description="Used to pass variable values to modules in place of an " + "environment configuration file. (supports lookups)", + examples=[ + {"namespace": "example-${env DEPLOY_ENVIRONMENT}"}, + "${var sampleapp.parameters.${env DEPLOY_ENVIRONMENT}}", + ], + ), + ] = {} + """Used to pass variable values to modules in place of an environment configuration file. (supports lookups)""" + + regions: Annotated[ + list[str] | str, + Field( + description="An array of AWS Regions to process this deployment in. (supports lookups)", + examples=[ + ["us-east-1", "us-west-2"], + "${var regions.${dev DEPLOY_ENVIRONMENT}}", + *cast( + "dict[str, list[str]]", + RunwayDeploymentRegionDefinitionModel.model_config.get("json_schema_extra", {}), + ).get("examples", []), + ], + ), + ] = [] + """An array of AWS Regions to process this deployment in. (supports lookups)""" - """ - schema["description"] = ( - "A collection of modules, regions, and other configurations to deploy." - ) - # modify schema to allow simple string or mapping definition for a module - module_ref = schema["properties"]["modules"]["items"].pop("$ref") - schema["properties"]["modules"]["items"]["anyOf"] = [ - {"$ref": module_ref}, - {"type": "string"}, - ] - - @root_validator(pre=True) - def _convert_simple_module(cls, values: Dict[str, Any]) -> Dict[str, Any]: + @model_validator(mode="before") + @classmethod + def _convert_simple_module(cls, values: dict[str, Any]) -> dict[str, Any]: """Convert simple modules to dicts.""" modules = values.get("modules", []) - result: List[Dict[str, Any]] = [] + result: list[dict[str, Any]] = [] for module in modules: if isinstance(module, str): result.append({"path": module}) @@ -314,10 +257,11 @@ def _convert_simple_module(cls, values: Dict[str, Any]) -> Dict[str, Any]: values["modules"] = result return values - @root_validator(pre=True) - def _validate_regions(cls, values: Dict[str, Any]) -> Dict[str, Any]: + @model_validator(mode="before") + @classmethod + def _validate_regions(cls, values: dict[str, Any]) -> dict[str, Any]: """Validate & simplify regions.""" - raw_regions = values.get("regions", []) + raw_regions: str | list[str] = values.get("regions", []) parallel_regions = values.get("parallel_regions", []) if all(isinstance(i, str) for i in [raw_regions, parallel_regions]): raise ValueError( @@ -328,7 +272,7 @@ def _validate_regions(cls, values: Dict[str, Any]) -> Dict[str, Any]: if isinstance(raw_regions, list): regions = raw_regions else: - regions = RunwayDeploymentRegionDefinitionModel.parse_obj(raw_regions) + regions = RunwayDeploymentRegionDefinitionModel.model_validate(raw_regions) if regions and parallel_regions: raise ValueError("only one of parallel_regions or regions can be defined") @@ -340,229 +284,54 @@ def _validate_regions(cls, values: Dict[str, Any]) -> Dict[str, Any]: values["parallel_regions"] = regions.parallel return values - _validate_string_is_lookup = cast( - "classmethod[Callable[..., Any]]", - validator( - "env_vars", - "environments", - "module_options", - "parallel_regions", - "parameters", - "regions", - allow_reuse=True, - pre=True, - )(utils.validate_string_is_lookup), - ) - - -class RunwayFutureDefinitionModel(ConfigProperty): - """Model for the Runway future definition.""" - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - schema_extra = { - "description": "Enable features/behaviors that will be become standard " - "ahead of their official release." - } - title = "Runway Future Definition" - - -class RunwayModuleDefinitionModel(ConfigProperty): - """Model for a Runway module definition.""" - - class_path: Optional[str] = Field( - default=None, - description="Import path to a custom Runway module class. (supports lookups)", - ) - env_vars: RunwayEnvVarsUnresolvedType = Field( - default={}, - title="Environment Variables", - description="Additional variables to add to the environment when " - "processing the deployment. (supports lookups)", - examples=[ - "${var env_vars.${env DEPLOY_ENVIRONMENT}}", - { - "EXAMPLE_VARIABLE": "value", - "KUBECONFIG": [".kube", "${env DEPLOY_ENVIRONMENT}", "config"], - }, - ], - ) - environments: RunwayEnvironmentsUnresolvedType = Field( - default={}, - description="Explicitly enable/disable the deployment for a specific " - "deploy environment, AWS Account ID, and AWS Region combination. " - "Can also be set as a static boolean value. (supports lookups)", - examples=[ - "${var envs.${env DEPLOY_ENVIRONMENT}}", - {"dev": "123456789012", "prod": "us-east-1"}, - {"dev": True, "prod": False}, - {"dev": ["us-east-1"], "prod": ["us-west-2", "ca-central-1"]}, - { - "dev": ["123456789012/us-east-1", "123456789012/us-west-2"], - "prod": ["234567890123/us-east-1", "234567890123/us-west-2"], - }, - ], - ) - name: str = Field( - default="undefined", - description="The name of the module to be displayed in logs and the " - "interactive selection menu.", - ) - options: Union[Dict[str, Any], str] = Field( - default={}, description="Module type specific options. (supports lookups)" - ) - parameters: Union[Dict[str, Any], str] = Field( - default={}, - description="Used to pass variable values to modules in place of an " - "environment configuration file. (supports lookups)", - examples=[ - {"namespace": "example-${env DEPLOY_ENVIRONMENT}"}, - "${var sampleapp.parameters.${env DEPLOY_ENVIRONMENT}}", - ], - ) - path: Optional[Union[str, Path]] = Field( - default=None, - description="Directory (relative to the Runway config file) containing IaC. " - "(supports lookups)", - examples=["./", "sampleapp-${env DEPLOY_ENVIRONMENT}.cfn", "sampleapp.sls"], - ) - tags: List[str] = Field( - default=[], - description="Array of values to categorize the module which can be used " - "with the CLI to quickly select a group of modules. " - "This field is only used by the `--tag` CLI option.", - examples=[["type:network", "app:sampleapp"]], - ) - type: Optional[RunwayModuleTypeTypeDef] = None - # needs to be last - parallel: List[RunwayModuleDefinitionModel] = Field( - default=[], - description="Array of module definitions that can be executed asynchronously. " - "Incompatible with class_path, path, and type.", - examples=[[{"path": "sampleapp-01.cfn"}, {"path": "sampleapp-02.cfn"}]], - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - schema_extra = { - "description": "Defines a directory containing IaC, " - "the parameters to pass in during execution, " - "and any applicable options for the module type.", - } - title = "Runway Module Definition" - use_enum_values = True - - @root_validator(pre=True) - def _validate_name(cls, values: Dict[str, Any]) -> Dict[str, Any]: - """Validate module name.""" - if "name" in values: - return values - if "parallel" in values: - values["name"] = "parallel_parent" - return values - if "path" in values: - values["name"] = Path(values["path"]).resolve().name - return values - return values - - @root_validator(pre=True) - def _validate_path(cls, values: Dict[str, Any]) -> Dict[str, Any]: - """Validate path and sets a default value if needed.""" - if not values.get("path") and not values.get("parallel"): - values["path"] = Path.cwd() - return values - - @validator("parallel", pre=True) - def _validate_parallel( - cls, v: List[Union[Dict[str, Any], str]], values: Dict[str, Any] - ) -> List[Dict[str, Any]]: - """Validate parallel.""" - if v and values.get("path"): - raise ValueError("only one of parallel or path can be defined") - result: List[Dict[str, Any]] = [] - for mod in v: - if isinstance(mod, str): - result.append({"path": mod}) - else: - result.append(mod) - return result - - # TODO add regex to schema - _validate_string_is_lookup = cast( - "classmethod[Callable[..., Any]]", - validator( - "env_vars", - "environments", - "options", - "parameters", - allow_reuse=True, - pre=True, - )(utils.validate_string_is_lookup), - ) - - -# https://pydantic-docs.helpmanual.io/usage/postponed_annotations/#self-referencing-models -RunwayModuleDefinitionModel.update_forward_refs() - - -class RunwayVariablesDefinitionModel(ConfigProperty): - """Model for a Runway variable definition.""" - - file_path: Optional[Path] = Field( - default=None, - title="Variables File Path", - description="Explicit path to a variables file that will be loaded and " - "merged with the variables defined here.", - ) - sys_path: Path = Field( - default="./", - description="Directory to use as the root of a relative 'file_path'. " - "If not provided, the current working directory is used.", - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.allow - schema_extra = { - "description": "A variable definitions for the Runway config file. " - "This is used to resolve the 'var' lookup.", - } - title = "Runway Variables Definition" - - _convert_null_values = cast( - "classmethod[Callable[..., Any]]", - validator("*", allow_reuse=True)(utils.convert_null_values), - ) + _validate_string_is_lookup = field_validator( + "env_vars", + "environments", + "module_options", + "parallel_regions", + "parameters", + "regions", + mode="before", + )(utils.validate_string_is_lookup) class RunwayVersionField(SpecifierSet): """Extends packaging.specifiers.SpecifierSet for use with pydantic.""" @classmethod - def __get_validators__(cls) -> Generator[Callable[..., Any], None, None]: + def __get_pydantic_core_schema__( + cls, source_type: Any, handler: GetCoreSchemaHandler + ) -> CoreSchema: """Yield one of more validators with will be called to validate the input. Each validator will receive, as input, the value returned from the previous validator. """ - yield cls._convert_value + assert source_type is RunwayVersionField # noqa: S101 + return core_schema.no_info_before_validator_function( + cls._convert_value, + core_schema.any_schema(), + serialization=core_schema.plain_serializer_function_ser_schema( + str, info_arg=False, return_schema=core_schema.str_schema() + ), + ) @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + def __get_pydantic_json_schema__( + cls, schema: core_schema.JsonSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: # cov: ignore """Mutate the field schema in place. This is only called when output JSON schema from a model. """ - field_schema.update(type="string") # cov: ignore + json_schema = handler(schema) + json_schema = handler.resolve_ref_schema(json_schema) + json_schema["type"] = "str" + return json_schema @classmethod - def _convert_value(cls, v: Union[str, SpecifierSet]) -> RunwayVersionField: + def _convert_value(cls, v: str | SpecifierSet) -> RunwayVersionField: """Convert runway_version string into SpecifierSet with some value handling. Args: @@ -589,40 +358,63 @@ def _convert_value(cls, v: Union[str, SpecifierSet]) -> RunwayVersionField: class RunwayConfigDefinitionModel(ConfigProperty): """Runway configuration definition model.""" - deployments: List[RunwayDeploymentDefinitionModel] = Field( - default=[], description="Array of Runway deployments definitions." - ) - future: RunwayFutureDefinitionModel = RunwayFutureDefinitionModel() - ignore_git_branch: bool = Field( - default=False, - description="Optionally exclude the git branch name when determining the " - "current deploy environment.", - ) - runway_version: Optional[RunwayVersionField] = Field( - default=">1.10", - description="Define the versions of Runway that can be used with this " - "configuration file.", - examples=['"<2.0.0"', '"==1.14.0"', '">=1.14.0,<2.0.0"'], - ) - tests: List[RunwayTestDefinitionModel] = Field( - default=[], - description="Array of Runway test definitions that are executed with the 'test' command.", - ) - variables: RunwayVariablesDefinitionModel = RunwayVariablesDefinitionModel() + model_config = ConfigDict( + arbitrary_types_allowed=True, + json_schema_extra={ + "description": "Configuration file for use with Runway.", + }, + extra="forbid", + title="Runway Configuration File", + validate_default=True, + validate_assignment=True, + ) + + deployments: Annotated[ + list[RunwayDeploymentDefinitionModel], + Field(description="Array of Runway deployments definitions."), + ] = [] + """Array of Runway deployments definitions.""" + + future: Annotated[ + RunwayFutureDefinitionModel, + Field(description="Enable future features before they become default behavior."), + ] = RunwayFutureDefinitionModel() + """Enable future features before they become default behavior.""" + + ignore_git_branch: Annotated[ + bool, + Field( + description="Optionally exclude the git branch name when determining the " + "current deploy environment.", + ), + ] = False + """Optionally exclude the git branch name when determining the current deploy environment.""" + + runway_version: Annotated[ + RunwayVersionField | None, + Field( + description="Define the versions of Runway that can be used with this " + "configuration file.", + examples=['"<2.0.0"', '"==1.14.0"', '">=1.14.0,<2.0.0"'], + ), + ] = None + """Define the versions of Runway that can be used with this configuration file.""" - class Config(ConfigProperty.Config): - """Model configuration.""" + tests: Annotated[ + list[RunwayTestDefinitionModel], + Field( + description="Array of Runway test definitions that are executed with the 'test' command." + ), + ] = [] - extra = Extra.forbid - schema_extra = { - "description": "Configuration file for use with Runway.", - } - title = "Runway Configuration File" - validate_all = True - validate_assignment = True + variables: Annotated[RunwayVariablesDefinitionModel, Field(description="Runway variables.")] = ( + RunwayVariablesDefinitionModel() + ) + """Runway variables.""" - @root_validator(pre=True) - def _add_deployment_names(cls, values: Dict[str, Any]) -> Dict[str, Any]: + @model_validator(mode="before") + @classmethod + def _add_deployment_names(cls, values: dict[str, Any]) -> dict[str, Any]: """Add names to deployments that are missing them.""" deployments = values.get("deployments", []) for i, deployment in enumerate(deployments): @@ -631,43 +423,18 @@ def _add_deployment_names(cls, values: Dict[str, Any]) -> Dict[str, Any]: values["deployments"] = deployments return values + @field_serializer("runway_version", when_used="json-unless-none") + def _serialize_runway_version(self, runway_version: RunwayVersionField, _info: Any) -> str: + """Serialize ``runway_version`` field when dumping to JSON.""" + return str(runway_version) + @classmethod - def parse_file( - cls: Type[Model], - path: Union[str, Path], - *, - content_type: Optional[str] = None, - encoding: str = "utf8", - proto: Optional[Protocol] = None, - allow_pickle: bool = False, - ) -> Model: + def parse_file( # pyright: ignore[reportIncompatibleMethodOverride] + cls: type[Self], path: str | Path + ) -> Self: """Parse a file.""" - return cast( - "Model", - cls.parse_raw( - Path(path).read_text( - encoding=locale.getpreferredencoding(do_setlocale=False) - ), - content_type=content_type, # type: ignore - encoding=encoding, - proto=proto, # type: ignore - allow_pickle=allow_pickle, - ), + return cls.model_validate( + yaml.safe_load( + Path(path).read_text(encoding=locale.getpreferredencoding(do_setlocale=False)) + ) ) - - @classmethod - def parse_raw( - cls: Type[Model], - b: Union[bytes, str], - *, - content_type: Optional[str] = None, # pylint: disable=unused-argument - encoding: str = "utf8", # pylint: disable=unused-argument - proto: Optional[Protocol] = None, # pylint: disable=unused-argument - allow_pickle: bool = False, # pylint: disable=unused-argument - ) -> Model: - """Parse raw data.""" - return cast("Model", cls.parse_obj(yaml.safe_load(b))) - - -# https://pydantic-docs.helpmanual.io/usage/postponed_annotations/#self-referencing-models -RunwayDeploymentDefinitionModel.update_forward_refs() diff --git a/runway/config/models/runway/_assume_role.py b/runway/config/models/runway/_assume_role.py new file mode 100644 index 000000000..347db5ce0 --- /dev/null +++ b/runway/config/models/runway/_assume_role.py @@ -0,0 +1,97 @@ +"""Runway ``assume_role`` definition model.""" + +from __future__ import annotations + +from typing import Annotated + +from pydantic import ConfigDict, Field, field_validator + +from .. import utils +from ..base import ConfigProperty +from ..utils import RUNWAY_LOOKUP_STRING_REGEX + + +class RunwayAssumeRoleDefinitionModel(ConfigProperty): + """Used to defined a role to assume while Runway is processing each module.""" + + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ + "examples": [ + {"arn": "arn:aws:iam::123456789012:role/name"}, + { + "arn": "${var role_arn.${env DEPLOY_ENVIRONMENT}}", + "duration": 9001, + "post_deploy_env_revert": True, + "session_name": "runway-example", + }, + ], + }, + title="Runway Deployment.assume_role Definition", + validate_default=True, + validate_assignment=True, + ) + + arn: Annotated[ + str | None, + Field(description="The ARN of the AWS IAM role to be assumed. (supports lookups)"), + ] = None + """The ARN of the AWS IAM role to be assumed. (supports lookups)""" + + duration: ( + Annotated[ + int, + Field( + description="The duration, in seconds, of the role session. (supports lookups)", + ge=900, + le=43200, + ), + ] + | Annotated[str, Field(pattern=RUNWAY_LOOKUP_STRING_REGEX)] + ) = 3600 + """The duration, in seconds, of the role session. (supports lookups)""" + + post_deploy_env_revert: Annotated[ + bool, + Field( + title="Post Deployment Environment Revert", + description="Revert the credentials stored in environment variables to " + "what they were prior to execution after the deployment finished processing. " + "(supports lookups)", + ), + ] = False + """Revert the credentials stored in environment variables to what they were + prior to execution after the deployment finished processing. (supports lookups) + + """ + + session_name: Annotated[ + str, + Field( + description="An identifier for the assumed role session. (supports lookups)", + ), + ] = "runway" + """An identifier for the assumed role session. (supports lookups)""" + + @field_validator("arn") + @classmethod + def _convert_arn_null_value(cls, v: str | None) -> str | None: + """Convert a "nul" string into type(None).""" + null_strings = ["null", "none", "undefined"] + return None if isinstance(v, str) and v.lower() in null_strings else v + + @field_validator("duration", mode="before") + @classmethod + def _validate_duration(cls, v: int | str) -> int | str: + """Validate duration is within the range allowed by AWS.""" + if isinstance(v, str): + return v + if v < 900: + raise ValueError("duration must be greater than or equal to 900") + if v > 43_200: + raise ValueError("duration must be less than or equal to 43,200") + return v + + _validate_string_is_lookup = field_validator("duration", mode="before")( + utils.validate_string_is_lookup + ) diff --git a/runway/config/models/runway/_builtin_tests.py b/runway/config/models/runway/_builtin_tests.py index db66e3160..cb4d8af0e 100644 --- a/runway/config/models/runway/_builtin_tests.py +++ b/runway/config/models/runway/_builtin_tests.py @@ -2,190 +2,45 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, Union, cast +from typing import Annotated, Any -from pydantic import Extra, Field, validator +from pydantic import ConfigDict, Field, field_validator from typing_extensions import Literal from .. import utils from ..base import ConfigProperty -if TYPE_CHECKING: - from typing import Callable - ValidRunwayTestTypeValues = Literal["cfn-lint", "script", "yamllint"] class RunwayTestDefinitionModel(ConfigProperty): """Model for a Runway test definition.""" - args: Union[Dict[str, Any], ConfigProperty, str] = Field( - default={}, - title="Arguments", - description="Arguments to be passed to the test. Support varies by test type.", - ) - name: str = Field(default="test-name", description="Name of the test.") - required: Union[bool, str] = Field( - default=False, - description="Whether the test must pass for subsequent tests to be run.", - ) - type: ValidRunwayTestTypeValues - - class Config(ConfigProperty.Config): - """Model configuration.""" - - schema_extra = { + model_config = ConfigDict( + extra="ignore", + json_schema_extra={ "description": "Tests that can be run via the 'test' command.", - } - title = "Runway Test Definition" - use_enum_values = True - - def __new__(cls, **kwargs: Any) -> RunwayTestDefinitionModel: - """Create a new instance of a class. - - Returns: - Correct subclass of RunwayTestDefinition for the given data. - - """ - test_type = kwargs.get("type") - if cls is RunwayTestDefinitionModel: - if test_type == "cfn-lint": - return super().__new__(CfnLintRunwayTestDefinitionModel) - if test_type == "script": - return super().__new__(ScriptRunwayTestDefinitionModel) - if test_type == "yamllint": - return super().__new__(YamlLintRunwayTestDefinitionModel) - return super().__new__(cls) - - # TODO add regex to schema - _validate_string_is_lookup = cast( - "classmethod[Callable[..., Any]]", - validator("args", "required", allow_reuse=True, pre=True)( - utils.validate_string_is_lookup - ), - ) - - -class CfnLintRunwayTestArgs(ConfigProperty): - """Model for the args of a cfn-lint test.""" - - cli_args: Union[List[str], str] = Field( - default=[], - title="CLI Arguments", - description="Array of arguments to pass to the cfn-lint CLI.", - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - schema_extra = { - "description": "Arguments supported by the cfn-lint test.", - } - title = "cfn-lint Runway Test Arguments" - - # TODO add regex to schema - _validate_string_is_lookup = cast( - "classmethod[Callable[..., Any]]", - validator("cli_args", allow_reuse=True, pre=True)( - utils.validate_string_is_lookup + }, + title="Runway Test Definition", + use_enum_values=True, + validate_assignment=True, + validate_default=True, + ) + + args: Annotated[ + dict[str, Any] | str, + Field( + title="Arguments", + description="Arguments to be passed to the test. Support varies by test type.", ), - ) - - -class CfnLintRunwayTestDefinitionModel(RunwayTestDefinitionModel): - """Model for a cfn-lint test definition.""" - - args: CfnLintRunwayTestArgs = Field( - default=CfnLintRunwayTestArgs(), - title="Arguments", - description="Arguments to be passed to the test.", - ) - name: str = Field(default="cfn-lint", description="Name of the test.") - required: Union[bool, str] = Field( - default=False, - description="Whether the test must pass for subsequent tests to be run.", - ) - type: Literal["cfn-lint"] = Field( - default="cfn-lint", description="The type of test to run." - ) - - class Config(RunwayTestDefinitionModel.Config): - """Model configuration.""" - - schema_extra = { - "description": "Test using cfn-lint.", - } - title = "cfn-lint Test" - - -class ScriptRunwayTestArgs(ConfigProperty): - """Model for the args of a script test.""" - - commands: Union[List[str], str] = Field( - default=[], description="Array of commands that will be run for this test." - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - schema_extra = { - "description": "Arguments supported by the script test.", - } - title = "Script Runway Test Arguments" - - # TODO add regex to schema - _validate_string_is_lookup = cast( - "classmethod[Callable[..., Any]]", - validator("commands", allow_reuse=True, pre=True)( - utils.validate_string_is_lookup - ), - ) - - -class ScriptRunwayTestDefinitionModel(RunwayTestDefinitionModel): - """Model for a script test definition.""" - - args: ScriptRunwayTestArgs = Field( - default=ScriptRunwayTestArgs(), - title="Arguments", - description="Arguments to be passed to the test.", - ) - name: str = Field(default="script", description="Name of the test.") - required: Union[bool, str] = Field( - default=False, - description="Whether the test must pass for subsequent tests to be run.", - ) - type: Literal["script"] = Field( - default="script", description="The type of test to run." - ) - - class Config(RunwayTestDefinitionModel.Config): - """Model configuration.""" - - schema_extra = { - "description": "Test using a custom script.", - } - title = "Script Test" - - -class YamlLintRunwayTestDefinitionModel(RunwayTestDefinitionModel): - """Model for a yamllint test definition.""" + ] = {} + name: Annotated[str, Field(description="Name of the test.")] = "test-name" + required: Annotated[ + bool | str, Field(description="Whether the test must pass for subsequent tests to be run.") + ] = False + type: ValidRunwayTestTypeValues - name: str = Field(default="yamllint", description="Name of the test.") - required: Union[bool, str] = Field( - default=False, - description="Whether the test must pass for subsequent tests to be run.", + # TODO (kyle): add regex to schema + _validate_string_is_lookup = field_validator("args", "required", mode="before")( + utils.validate_string_is_lookup ) - type: Literal["yamllint"] = Field( - default="yamllint", description="The type of test to run." - ) - - class Config(RunwayTestDefinitionModel.Config): - """Model configuration.""" - - schema_extra = { - "description": "Test using yamllint.", - } - title = "yamllint Test" diff --git a/runway/config/models/runway/_future.py b/runway/config/models/runway/_future.py new file mode 100644 index 000000000..813f2b62b --- /dev/null +++ b/runway/config/models/runway/_future.py @@ -0,0 +1,20 @@ +"""Runway ``future`` definition model.""" + +from pydantic import ConfigDict + +from ..base import ConfigProperty + + +class RunwayFutureDefinitionModel(ConfigProperty): + """Enable features/behaviors that will be become standard ahead of their official release.""" + + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ + "description": "Enable features/behaviors that will be become standard " + "ahead of their official release." + }, + title="Runway Future Definition", + validate_default=True, + validate_assignment=True, + ) diff --git a/runway/config/models/runway/_module.py b/runway/config/models/runway/_module.py new file mode 100644 index 000000000..d384b0c5d --- /dev/null +++ b/runway/config/models/runway/_module.py @@ -0,0 +1,200 @@ +"""Runway Module definition model.""" + +from __future__ import annotations + +from pathlib import Path +from typing import Annotated, Any + +from pydantic import ConfigDict, Field, ValidationInfo, field_validator, model_validator + +from .. import utils +from ..base import ConfigProperty +from ._type_defs import ( + RunwayEnvironmentsUnresolvedType, + RunwayEnvVarsUnresolvedType, + RunwayModuleTypeTypeDef, +) + + +class RunwayModuleDefinitionModel(ConfigProperty): + """Defines a directory containing IaC, parameters, and options for the module type.""" + + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ + "description": "Defines a directory containing IaC, " + "the parameters to pass in during execution, " + "and any applicable options for the module type.", + }, + title="Runway Module Definition", + use_enum_values=True, + validate_default=True, + validate_assignment=True, + ) + + class_path: Annotated[ + str | None, + Field( + description="Import path to a custom Runway module class. (supports lookups)", + ), + ] = None + """Import path to a custom Runway module class. (supports lookups)""" + + env_vars: Annotated[ + RunwayEnvVarsUnresolvedType, + Field( + title="Environment Variables", + description="Additional variables to add to the environment when " + "processing the deployment. (supports lookups)", + examples=[ + "${var env_vars.${env DEPLOY_ENVIRONMENT}}", + { + "EXAMPLE_VARIABLE": "value", + "KUBECONFIG": [".kube", "${env DEPLOY_ENVIRONMENT}", "config"], + }, + ], + ), + ] = {} + """Additional variables to add to the environment when processing the deployment. (supports lookups)""" + + environments: Annotated[ + RunwayEnvironmentsUnresolvedType, + Field( + description="Explicitly enable/disable the deployment for a specific " + "deploy environment, AWS Account ID, and AWS Region combination. " + "Can also be set as a static boolean value. (supports lookups)", + examples=[ + "${var envs.${env DEPLOY_ENVIRONMENT}}", + {"dev": "123456789012", "prod": "us-east-1"}, + {"dev": True, "prod": False}, + {"dev": ["us-east-1"], "prod": ["us-west-2", "ca-central-1"]}, + { + "dev": ["123456789012/us-east-1", "123456789012/us-west-2"], + "prod": ["234567890123/us-east-1", "234567890123/us-west-2"], + }, + ], + ), + ] = {} + """Explicitly enable/disable the deployment for a specific deploy environment, + AWS Account ID, and AWS Region combination. + Can also be set as a static boolean value. (supports lookups) + + """ + + name: Annotated[ + str, + Field( + description="The name of the module to be displayed in logs and the " + "interactive selection menu." + ), + ] = "undefined" + """The name of the module to be displayed in logs and the interactive selection menu.""" + + options: Annotated[ + dict[str, Any] | str, Field(description="Module type specific options. (supports lookups)") + ] = {} + """Module type specific options. (supports lookups)""" + + parameters: Annotated[ + dict[str, Any] | str, + Field( + description="Used to pass variable values to modules in place of an " + "environment configuration file. (supports lookups)", + examples=[ + {"namespace": "example-${env DEPLOY_ENVIRONMENT}"}, + "${var sampleapp.parameters.${env DEPLOY_ENVIRONMENT}}", + ], + ), + ] = {} + """Used to pass variable values to modules in place of an environment configuration file. (supports lookups)""" + + path: Annotated[ + Path | str | None, + Field( + description="Directory (relative to the Runway config file) containing IaC. " + "(supports lookups)", + examples=["./", "sampleapp-${env DEPLOY_ENVIRONMENT}.cfn", "sampleapp.sls"], + ), + ] = None + """Directory (relative to the Runway config file) containing IaC. (supports lookups)""" + + tags: Annotated[ + list[str], + Field( + description="Array of values to categorize the module which can be used " + "with the CLI to quickly select a group of modules. " + "This field is only used by the `--tag` CLI option.", + examples=[["type:network", "app:sampleapp"]], + ), + ] = [] + """Array of values to categorize the module which can be used with the CLI to + quickly select a group of modules. + + This field is only used by the ``--tag`` CLI option. + + """ + + type: Annotated[ + RunwayModuleTypeTypeDef | None, + Field( + description="Explicitly define the module type. If not provided, this will be inferred." + ), + ] = None + """Explicitly define the module type. If not provided, this will be inferred.""" + + # needs to be last + parallel: Annotated[ + list[RunwayModuleDefinitionModel], + Field( + description="Array of module definitions that can be executed asynchronously. " + "Incompatible with class_path, path, and type.", + examples=[[{"path": "sampleapp-01.cfn"}, {"path": "sampleapp-02.cfn"}]], + ), + ] = [] + """List of module definitions that can be executed asynchronously. + Incompatible with class_path, path, and type. + + """ + + @model_validator(mode="before") + @classmethod + def _validate_name(cls, values: dict[str, Any]) -> dict[str, Any]: + """Validate module name.""" + if "name" in values: + return values + if "parallel" in values: + values["name"] = "parallel_parent" + return values + if "path" in values and values: + values["name"] = Path(values["path"]).resolve().name + return values + return values + + @model_validator(mode="before") + @classmethod + def _validate_path(cls, values: dict[str, Any]) -> dict[str, Any]: + """Validate path and sets a default value if needed.""" + if not values.get("path") and not values.get("parallel"): + values["path"] = Path.cwd() + return values + + @field_validator("parallel", mode="before") + @classmethod + def _validate_parallel( + cls, v: list[dict[str, Any] | str], info: ValidationInfo + ) -> list[dict[str, Any]]: + """Validate parallel.""" + if v and info.data.get("path"): + raise ValueError("only one of parallel or path can be defined") + result: list[dict[str, Any]] = [] + for mod in v: + if isinstance(mod, str): + result.append({"path": mod}) + else: + result.append(mod) + return result + + # TODO(kyle): add regex to schema + _validate_string_is_lookup = field_validator( + "env_vars", "environments", "options", "parameters", mode="before" + )(utils.validate_string_is_lookup) diff --git a/runway/config/models/runway/_region.py b/runway/config/models/runway/_region.py new file mode 100644 index 000000000..43bff06c8 --- /dev/null +++ b/runway/config/models/runway/_region.py @@ -0,0 +1,40 @@ +"""Runway deployment ``region`` definition model.""" + +from __future__ import annotations + +from typing import Annotated + +from pydantic import ConfigDict, Field, field_validator + +from .. import utils +from ..base import ConfigProperty + + +class RunwayDeploymentRegionDefinitionModel(ConfigProperty): + """Only supports ``parallel`` field.""" + + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ + "examples": [ + {"parallel": ["us-east-1", "us-east-2"]}, + {"parallel": "${var regions.${env DEPLOY_ENVIRONMENT}}"}, + ], + }, + title="Runway Deployment.regions Definition", + validate_default=True, + validate_assignment=True, + ) + + parallel: Annotated[ + list[str] | str, + Field( + title="Parallel Regions", + description="An array of AWS Regions to process asynchronously. (supports lookups)", + ), + ] + """A list of AWS Regions to process asynchronously. (supports lookups)""" + + _validate_string_is_lookup = field_validator("parallel", mode="before")( + utils.validate_string_is_lookup + ) diff --git a/runway/config/models/runway/_type_defs.py b/runway/config/models/runway/_type_defs.py new file mode 100644 index 000000000..45370bfea --- /dev/null +++ b/runway/config/models/runway/_type_defs.py @@ -0,0 +1,16 @@ +"""Type definitions.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Literal, Union + +if TYPE_CHECKING: + from typing_extensions import TypeAlias + +RunwayEnvironmentsType: TypeAlias = dict[str, Union[bool, list[str], str]] +RunwayEnvironmentsUnresolvedType: TypeAlias = Union[dict[str, Union[bool, list[str], str]], str] +RunwayEnvVarsType: TypeAlias = dict[str, Union[list[str], str]] +RunwayEnvVarsUnresolvedType: TypeAlias = Union[RunwayEnvVarsType, str] +RunwayModuleTypeTypeDef: TypeAlias = Literal[ + "cdk", "cloudformation", "kubernetes", "serverless", "static", "terraform" +] diff --git a/runway/config/models/runway/_variables.py b/runway/config/models/runway/_variables.py new file mode 100644 index 000000000..bc461f087 --- /dev/null +++ b/runway/config/models/runway/_variables.py @@ -0,0 +1,54 @@ +"""Runway ``variables`` definition model.""" + +from __future__ import annotations + +from pathlib import Path +from typing import Annotated + +from pydantic import ConfigDict, Field, field_validator + +from .. import utils +from ..base import ConfigProperty + + +class RunwayVariablesDefinitionModel(ConfigProperty): + """A variable definitions for the Runway config file. + + This is used to resolve the ``var`` lookup. + + """ + + model_config = ConfigDict( + extra="allow", + json_schema_extra={ + "description": "A variable definitions for the Runway config file. " + "This is used to resolve the 'var' lookup.", + }, + title="Runway Variables Definition", + validate_default=True, + validate_assignment=True, + ) + + file_path: Annotated[ + Path | None, + Field( + title="Variables File Path", + description="Explicit path to a variables file that will be loaded and " + "merged with the variables defined here.", + ), + ] = None + """Explicit path to a variables file that will be loaded and merged with the variables defined here.""" + + sys_path: Annotated[ + Path, + Field( + description="Directory to use as the root of a relative 'file_path'. " + "If not provided, the current working directory is used.", + ), + ] = "./" # pyright: ignore[reportAssignmentType] + """Directory to use as the root of a relative 'file_path'. + If not provided, the current working directory is used. + + """ + + _convert_null_values = field_validator("*")(utils.convert_null_values) diff --git a/runway/config/models/runway/options/cdk.py b/runway/config/models/runway/options/cdk.py index 182310460..0a04254ba 100644 --- a/runway/config/models/runway/options/cdk.py +++ b/runway/config/models/runway/options/cdk.py @@ -2,9 +2,7 @@ from __future__ import annotations -from typing import List - -from pydantic import Extra +from pydantic import ConfigDict from ...base import ConfigProperty @@ -12,11 +10,12 @@ class RunwayCdkModuleOptionsDataModel(ConfigProperty): """Model for Runway AWS Cloud Development Kit Module options.""" - build_steps: List[str] = [] - skip_npm_ci: bool = False - - class Config(ConfigProperty.Config): - """Model configuration.""" + model_config = ConfigDict( + extra="ignore", + title="Runway AWS Cloud Development Kit Module options", + validate_default=True, + validate_assignment=True, + ) - extra = Extra.ignore - title = "Runway AWS Cloud Development Kit Module options." + build_steps: list[str] = [] + skip_npm_ci: bool = False diff --git a/runway/config/models/runway/options/k8s.py b/runway/config/models/runway/options/k8s.py index e83128f20..6db7b792a 100644 --- a/runway/config/models/runway/options/k8s.py +++ b/runway/config/models/runway/options/k8s.py @@ -3,9 +3,8 @@ from __future__ import annotations from pathlib import Path -from typing import Optional -from pydantic import Extra +from pydantic import ConfigDict from ...base import ConfigProperty @@ -13,11 +12,12 @@ class RunwayK8sModuleOptionsDataModel(ConfigProperty): """Model for Runway Kubernetes Module options.""" - kubectl_version: Optional[str] = None - overlay_path: Optional[Path] = None + model_config = ConfigDict( + extra="ignore", + title="Runway Kubernetes Module options", + validate_default=True, + validate_assignment=True, + ) - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.ignore - title = "Runway Kubernetes Module options." + kubectl_version: str | None = None + overlay_path: Path | None = None diff --git a/runway/config/models/runway/options/serverless.py b/runway/config/models/runway/options/serverless.py index 81f051916..c9b418e7e 100644 --- a/runway/config/models/runway/options/serverless.py +++ b/runway/config/models/runway/options/serverless.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional +from typing import Any -from pydantic import Extra +from pydantic import ConfigDict from ...base import ConfigProperty @@ -12,31 +12,33 @@ class RunwayServerlessPromotezipOptionDataModel(ConfigProperty): """Model for Runway Serverless module promotezip option.""" - bucketname: Optional[str] = None - - class Config(ConfigProperty.Config): - """Model configuration.""" + model_config = ConfigDict( + extra="forbid", + title="Runway Serverless Framework Module promotezip option", + validate_default=True, + validate_assignment=True, + ) - extra = Extra.forbid - title = "Runway Serverless Framework Module promotezip option" + bucketname: str | None = None def __bool__(self) -> bool: """Evaluate the boolean value of the object instance.""" - return bool(self.dict(exclude_none=True)) + return bool(self.model_dump(exclude_none=True)) class RunwayServerlessModuleOptionsDataModel(ConfigProperty): """Model for Runway Serverless Framework Module options.""" - args: List[str] = [] - extend_serverless_yml: Dict[str, Any] = {} + model_config = ConfigDict( + extra="ignore", + title="Runway Serverless Framework Module options", + validate_default=True, + validate_assignment=True, + ) + + args: list[str] = [] + extend_serverless_yml: dict[str, Any] = {} promotezip: RunwayServerlessPromotezipOptionDataModel = ( RunwayServerlessPromotezipOptionDataModel() ) skip_npm_ci: bool = False - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.ignore - title = "Runway Serverless Framework Module options" diff --git a/runway/config/models/runway/options/terraform.py b/runway/config/models/runway/options/terraform.py index ade284365..97072c6e9 100644 --- a/runway/config/models/runway/options/terraform.py +++ b/runway/config/models/runway/options/terraform.py @@ -1,11 +1,10 @@ """Runway Terraform Module options.""" -# pylint: disable=no-self-argument from __future__ import annotations -from typing import Dict, List, Optional, Union +from typing import Annotated -from pydantic import Extra, Field, validator +from pydantic import ConfigDict, Field, field_validator from ...base import ConfigProperty @@ -13,59 +12,62 @@ class RunwayTerraformArgsDataModel(ConfigProperty): """Model for Runway Terraform Module args option.""" - apply: List[str] = [] - init: List[str] = [] - plan: List[str] = [] - - class Config(ConfigProperty.Config): - """Model configuration.""" + model_config = ConfigDict( + extra="forbid", + title="Runway Terraform Module args option", + validate_default=True, + validate_assignment=True, + ) - extra = Extra.forbid - title = "Runway Terraform Module args option" + apply: list[str] = [] + init: list[str] = [] + plan: list[str] = [] class RunwayTerraformBackendConfigDataModel(ConfigProperty): """Model for Runway Terraform Module terraform_backend_config option.""" - bucket: Optional[str] = None - dynamodb_table: Optional[str] = None - region: Optional[str] = None - workspace_key_prefix: Optional[str] = None - - class Config(ConfigProperty.Config): - """Model configuration.""" + model_config = ConfigDict( + extra="forbid", + title="Runway Terraform Module terraform_backend_config option", + validate_default=True, + validate_assignment=True, + ) - extra = Extra.forbid - title = "Runway Terraform Module terraform_backend_config option" + bucket: str | None = None + dynamodb_table: str | None = None + region: str | None = None + workspace_key_prefix: str | None = None def __bool__(self) -> bool: """Evaluate the boolean value of the object instance.""" - data = self.dict(exclude_none=True) + data = self.model_dump(exclude_none=True) return "bucket" in data or "dynamodb_table" in data class RunwayTerraformModuleOptionsDataModel(ConfigProperty): """Model for Runway Terraform Module options.""" + model_config = ConfigDict( + extra="ignore", + title="Runway Terraform Module options", + populate_by_name=True, + validate_default=True, + validate_assignment=True, + ) + args: RunwayTerraformArgsDataModel = RunwayTerraformArgsDataModel() backend_config: RunwayTerraformBackendConfigDataModel = Field( default=RunwayTerraformBackendConfigDataModel(), alias="terraform_backend_config", ) - version: Optional[str] = Field(default=None, alias="terraform_version") - workspace: Optional[str] = Field(default=None, alias="terraform_workspace") - write_auto_tfvars: bool = Field(default=False, alias="terraform_write_auto_tfvars") - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.ignore - title = "Runway Terraform Module options" + version: Annotated[str | None, Field(alias="terraform_version")] = None + workspace: Annotated[str | None, Field(alias="terraform_workspace")] = None + write_auto_tfvars: Annotated[bool, Field(alias="terraform_write_auto_tfvars")] = False - @validator("args", pre=True) - def _convert_args( - cls, v: Union[List[str], Dict[str, List[str]]] - ) -> Dict[str, List[str]]: + @field_validator("args", mode="before") + @classmethod + def _convert_args(cls, v: list[str] | dict[str, list[str]]) -> dict[str, list[str]]: """Convert args from list to dict.""" if isinstance(v, list): return {"apply": v} diff --git a/runway/config/models/utils.py b/runway/config/models/utils.py index 0a8f7fce8..a65fdd8b3 100644 --- a/runway/config/models/utils.py +++ b/runway/config/models/utils.py @@ -3,12 +3,14 @@ from __future__ import annotations import re -from pathlib import Path -from typing import Any, Optional +from typing import TYPE_CHECKING, Any -CFNGIN_LOOKUP_STRING_REGEX = r"^\${.*}$" +if TYPE_CHECKING: + from pathlib import Path + +CFNGIN_LOOKUP_STRING_REGEX = r"^\$\{.+\}$" RUNWAY_LOOKUP_STRING_ERROR = ValueError("field can only be a string if it's a lookup") -RUNWAY_LOOKUP_STRING_REGEX = r"^\${.*}$" +RUNWAY_LOOKUP_STRING_REGEX = r"^\$\{.+\}$" def convert_null_values(v: Any) -> Any: @@ -17,7 +19,7 @@ def convert_null_values(v: Any) -> Any: return None if isinstance(v, str) and v.lower() in null_strings else v -def resolve_path_field(v: Optional[Path]) -> Optional[Path]: +def resolve_path_field(v: Path | None) -> Path | None: """Resolve sys_path.""" return v.resolve() if v else v diff --git a/runway/constants.py b/runway/constants.py index 12c53db5d..a8a4c7693 100644 --- a/runway/constants.py +++ b/runway/constants.py @@ -1,8 +1,10 @@ """Runway constants.""" -from typing import Any, Dict +from __future__ import annotations -BOTO3_CREDENTIAL_CACHE: Dict[str, Any] = {} +from typing import Any + +BOTO3_CREDENTIAL_CACHE: dict[str, Any] = {} """A global credential cache that can be shared among boto3 sessions. This is inherently threadsafe thanks to the GIL. (https://docs.python.org/3/glossary.html#term-global-interpreter-lock) diff --git a/runway/context/_base.py b/runway/context/_base.py index 84d3ff2af..0327a6814 100644 --- a/runway/context/_base.py +++ b/runway/context/_base.py @@ -4,7 +4,7 @@ import logging from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast import boto3 import botocore.exceptions @@ -30,7 +30,7 @@ class BaseContext(DelCachedPropMixin): env: DeployEnvironment """Object containing information about the environment being deployed to.""" - logger: Union[PrefixAdaptor, RunwayLogger] + logger: PrefixAdaptor | RunwayLogger """Custom logger.""" sys_info: SystemInfo @@ -43,8 +43,8 @@ def __init__( self, *, deploy_environment: DeployEnvironment, - logger: Union[PrefixAdaptor, RunwayLogger] = LOGGER, - work_dir: Optional[Path] = None, + logger: PrefixAdaptor | RunwayLogger = LOGGER, + work_dir: Path | None = None, **_: Any, ) -> None: """Instantiate class. @@ -64,7 +64,9 @@ def __init__( def boto3_credentials(self) -> Boto3CredentialsTypeDef: """Return a dict of boto3 credentials.""" return Boto3CredentialsTypeDef( - **{key.lower(): value for key, value in self.current_aws_creds.items()} + **{ + key.lower(): value for key, value in self.current_aws_creds.items() + } # pyright: ignore[reportArgumentType] ) @property @@ -94,11 +96,11 @@ def is_noninteractive(self) -> bool: def get_session( self, *, - aws_access_key_id: Optional[str] = None, - aws_secret_access_key: Optional[str] = None, - aws_session_token: Optional[str] = None, - profile: Optional[str] = None, - region: Optional[str] = None, + aws_access_key_id: str | None = None, + aws_secret_access_key: str | None = None, + aws_session_token: str | None = None, + profile: str | None = None, + region: str | None = None, ) -> boto3.Session: """Create a thread-safe boto3 session. @@ -125,15 +127,11 @@ def get_session( region or "default", ) else: # use explicit values or grab values from env vars - aws_access_key_id = aws_access_key_id or self.env.vars.get( - "AWS_ACCESS_KEY_ID" - ) + aws_access_key_id = aws_access_key_id or self.env.vars.get("AWS_ACCESS_KEY_ID") aws_secret_access_key = aws_secret_access_key or self.env.vars.get( "AWS_SECRET_ACCESS_KEY" ) - aws_session_token = aws_session_token or self.env.vars.get( - "AWS_SESSION_TOKEN" - ) + aws_session_token = aws_session_token or self.env.vars.get("AWS_SESSION_TOKEN") if aws_access_key_id: self.logger.debug( 'building session with Access Key "%s" in region "%s"', @@ -151,10 +149,10 @@ def get_session( cred_provider = session._session.get_component("credential_provider") # type: ignore provider = cred_provider.get_provider("assume-role") # type: ignore provider.cache = BOTO3_CREDENTIAL_CACHE - provider._prompter = ui.getpass + provider._prompter = ui.getpass # noqa: SLF001 return session - # TODO remove after IaC tools support AWS SSO + # TODO (kyle): remove after IaC tools support AWS SSO def _inject_profile_credentials(self) -> None: # cov: ignore """Inject AWS credentials into self.env_vars if using an AWS profile. diff --git a/runway/context/_cfngin.py b/runway/context/_cfngin.py index c8f386f2b..73745956e 100644 --- a/runway/context/_cfngin.py +++ b/runway/context/_cfngin.py @@ -3,10 +3,11 @@ from __future__ import annotations import collections.abc +import contextlib import json import logging from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, MutableMapping, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast from pydantic import BaseModel @@ -27,6 +28,8 @@ from ._base import BaseContext if TYPE_CHECKING: + from collections.abc import MutableMapping + from mypy_boto3_s3.client import S3Client from .type_defs import PersistentGraphLocation @@ -34,7 +37,7 @@ LOGGER = cast(RunwayLogger, logging.getLogger(__name__)) -def get_fqn(base_fqn: str, delimiter: str, name: Optional[str] = None) -> str: +def get_fqn(base_fqn: str, delimiter: str, name: str | None = None) -> str: """Return the fully qualified name of an object within this context. If the name passed already appears to be a fully qualified name, it @@ -70,32 +73,32 @@ class CfnginContext(BaseContext): """ - _persistent_graph_lock_code: Optional[str] + _persistent_graph_lock_code: str | None _persistent_graph_lock_tag: str = "cfngin_lock_code" - _persistent_graph: Optional[Graph] + _persistent_graph: Graph | None _s3_bucket_verified: bool bucket_region: str config: CfnginConfig config_path: Path env: DeployEnvironment - force_stacks: List[str] - hook_data: Dict[str, Any] - logger: Union[PrefixAdaptor, RunwayLogger] + force_stacks: list[str] + hook_data: dict[str, Any] + logger: PrefixAdaptor | RunwayLogger parameters: MutableMapping[str, Any] - stack_names: List[str] + stack_names: list[str] def __init__( self, *, - config: Optional[CfnginConfig] = None, - config_path: Optional[Path] = None, - deploy_environment: Optional[DeployEnvironment] = None, - force_stacks: Optional[List[str]] = None, - logger: Union[PrefixAdaptor, RunwayLogger] = LOGGER, - parameters: Optional[MutableMapping[str, Any]] = None, - stack_names: Optional[List[str]] = None, - work_dir: Optional[Path] = None, + config: CfnginConfig | None = None, + config_path: Path | None = None, + deploy_environment: DeployEnvironment | None = None, + force_stacks: list[str] | None = None, + logger: PrefixAdaptor | RunwayLogger = LOGGER, + parameters: MutableMapping[str, Any] | None = None, + stack_names: list[str] | None = None, + work_dir: Path | None = None, **_: Any, ) -> None: """Instantiate class. @@ -137,17 +140,14 @@ def base_fqn(self) -> str: return self.config.namespace.replace(".", "-").lower() @cached_property - def bucket_name(self) -> Optional[str]: + def bucket_name(self) -> str | None: """Return ``cfngin_bucket`` from config, calculated name, or None.""" if not self.upload_to_s3: return None - return ( - self.config.cfngin_bucket - or f"cfngin-{self.get_fqn()}-{self.env.aws_region}" - ) + return self.config.cfngin_bucket or f"cfngin-{self.get_fqn()}-{self.env.aws_region}" @cached_property - def mappings(self) -> Dict[str, Dict[str, Dict[str, Any]]]: + def mappings(self) -> dict[str, dict[str, dict[str, Any]]]: """Return ``mappings`` from config.""" return self.config.mappings or {} @@ -185,7 +185,7 @@ def persistent_graph_locked(self) -> bool: return bool(self.persistent_graph_lock_code) @property - def persistent_graph_lock_code(self) -> Optional[str]: + def persistent_graph_lock_code(self) -> str | None: """Code used to lock the persistent graph S3 object.""" if not self._persistent_graph_lock_code and self.persistent_graph_location: self._persistent_graph_lock_code = self.persistent_graph_tags.get( @@ -194,23 +194,21 @@ def persistent_graph_lock_code(self) -> Optional[str]: return self._persistent_graph_lock_code @property - def persistent_graph_tags(self) -> Dict[str, str]: + def persistent_graph_tags(self) -> dict[str, str]: """Cache of tags on the persistent graph object.""" try: return { t["Key"]: t["Value"] - for t in self.s3_client.get_object_tagging( - **self.persistent_graph_location - ).get("TagSet", []) + for t in self.s3_client.get_object_tagging(**self.persistent_graph_location).get( + "TagSet", [] + ) } except self.s3_client.exceptions.NoSuchKey: - self.logger.debug( - "persistent graph object does not exist in S3; could not get tags" - ) + self.logger.debug("persistent graph object does not exist in S3; could not get tags") return {} @property - def persistent_graph(self) -> Optional[Graph]: + def persistent_graph(self) -> Graph | None: """Graph if a persistent graph is being used. Will create an "empty" object in S3 if one is not found. @@ -238,8 +236,7 @@ def persistent_graph(self) -> Optional[Graph]: ) except self.s3_client.exceptions.NoSuchKey: self.logger.info( - "persistent graph object does not exist in s3; " - "creating one now..." + "persistent graph object does not exist in s3; creating one now..." ) self.s3_client.put_object( Body=content.encode(), @@ -253,7 +250,7 @@ def persistent_graph(self) -> Optional[Graph]: return self._persistent_graph @persistent_graph.setter - def persistent_graph(self, graph: Optional[Graph]) -> None: + def persistent_graph(self, graph: Graph | None) -> None: """Load a persistent graph dict as a :class:`runway.cfngin.plan.Graph`.""" self._persistent_graph = graph @@ -281,12 +278,12 @@ def s3_client(self) -> S3Client: return self.get_session(region=self.bucket_region).client("s3") @cached_property - def stacks_dict(self) -> Dict[str, Stack]: + def stacks_dict(self) -> dict[str, Stack]: """Construct a dict of ``{stack.fqn: Stack}`` for easy access to stacks.""" return {stack.fqn: stack for stack in self.stacks} @cached_property - def stacks(self) -> List[Stack]: + def stacks(self) -> list[Stack]: """Stacks for the current action.""" return [ Stack( @@ -302,16 +299,12 @@ def stacks(self) -> List[Stack]: ] @cached_property - def tags(self) -> Dict[str, str]: + def tags(self) -> dict[str, str]: """Return ``tags`` from config.""" return ( self.config.tags if self.config.tags is not None - else ( - {"cfngin_namespace": self.config.namespace} - if self.config.namespace - else {} - ) + else ({"cfngin_namespace": self.config.namespace} if self.config.namespace else {}) ) @cached_property @@ -326,8 +319,7 @@ def upload_to_s3(self) -> bool: # explicitly set to an empty string. if self.config.cfngin_bucket == "": self.logger.debug( - "not uploading to s3; cfngin_bucket " - "is explicitly set to an empty string" + "not uploading to s3; cfngin_bucket is explicitly set to an empty string" ) return False @@ -336,9 +328,7 @@ def upload_to_s3(self) -> bool: # sense because we can't realistically auto generate a cfngin # bucket name in this case. if not self.config.namespace and not self.config.cfngin_bucket: - self.logger.debug( - "not uploading to s3; namespace & cfngin_bucket not provided" - ) + self.logger.debug("not uploading to s3; namespace & cfngin_bucket not provided") return False return True @@ -356,7 +346,7 @@ def copy(self) -> CfnginContext: work_dir=self.work_dir, ) - def get_fqn(self, name: Optional[str] = None) -> str: + def get_fqn(self, name: str | None = None) -> str: """Return the fully qualified name of an object within this context. If the name passed already appears to be a fully qualified name, it @@ -365,7 +355,7 @@ def get_fqn(self, name: Optional[str] = None) -> str: """ return get_fqn(self.base_fqn, self.config.namespace_delimiter, name) - def get_stack(self, name: str) -> Optional[Stack]: + def get_stack(self, name: str) -> Stack | None: """Get a stack by name. Args: @@ -400,19 +390,15 @@ def lock_persistent_graph(self, lock_code: str) -> None: try: self.s3_client.put_object_tagging( - Tagging={ - "TagSet": [ - {"Key": self._persistent_graph_lock_tag, "Value": lock_code} - ] - }, + Tagging={"TagSet": [{"Key": self._persistent_graph_lock_tag, "Value": lock_code}]}, **self.persistent_graph_location, ) self.logger.info( 'locked persistent graph "%s" with lock ID "%s"', "/".join( [ - self.persistent_graph_location["Bucket"], - self.persistent_graph_location["Key"], + self.persistent_graph_location.get("Bucket", "unknown"), + self.persistent_graph_location.get("Key", "unknown"), ] ), lock_code, @@ -445,9 +431,7 @@ def put_persistent_graph(self, lock_code: str) -> None: ) if self.persistent_graph_lock_code != lock_code: - raise PersistentGraphLockCodeMismatch( - lock_code, self.persistent_graph_lock_code - ) + raise PersistentGraphLockCodeMismatch(lock_code, self.persistent_graph_lock_code) self.s3_client.put_object( Body=self.persistent_graph.dumps(4).encode(), @@ -457,9 +441,7 @@ def put_persistent_graph(self, lock_code: str) -> None: Tagging=f"{self._persistent_graph_lock_tag}={lock_code}", **self.persistent_graph_location, ) - self.logger.debug( - "persistent graph updated:\n%s", self.persistent_graph.dumps(indent=4) - ) + self.logger.debug("persistent graph updated:\n%s", self.persistent_graph.dumps(indent=4)) def set_hook_data(self, key: str, data: Any) -> None: """Set hook data for the given key. @@ -477,8 +459,7 @@ def set_hook_data(self, key: str, data: Any) -> None: if key in self.hook_data: raise KeyError( - f"Hook data for key {key} already exists, each hook " - "must have a unique data_key." + f"Hook data for key {key} already exists, each hook must have a unique data_key." ) self.hook_data[key] = data @@ -503,14 +484,10 @@ def unlock_persistent_graph(self, lock_code: str) -> bool: **self.persistent_graph_location, ) except self.s3_client.exceptions.NoSuchKey: - self.logger.info( - "persistent graph deleted; does not need to be unlocked" - ) + self.logger.info("persistent graph deleted; does not need to be unlocked") return True - self.logger.verbose( - 'unlocking persistent graph "%s"...', self.persistent_graph_location - ) + self.logger.verbose('unlocking persistent graph "%s"...', self.persistent_graph_location) if not self.persistent_graph_locked: raise PersistentGraphCannotUnlock( @@ -520,10 +497,8 @@ def unlock_persistent_graph(self, lock_code: str) -> bool: ) if self.persistent_graph_lock_code == lock_code: - try: + with contextlib.suppress(self.s3_client.exceptions.NoSuchKey): self.s3_client.delete_object_tagging(**self.persistent_graph_location) - except self.s3_client.exceptions.NoSuchKey: - pass self._persistent_graph_lock_code = None self.logger.info( 'unlocked persistent graph "%s/%s"', diff --git a/runway/context/_runway.py b/runway/context/_runway.py index 426e4977b..8f360dd54 100644 --- a/runway/context/_runway.py +++ b/runway/context/_runway.py @@ -4,7 +4,7 @@ import logging import sys -from typing import TYPE_CHECKING, Any, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast from ..compat import cached_property from ..core.components import DeployEnvironment @@ -19,7 +19,7 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) -def str2bool(v: str): +def str2bool(v: str) -> bool: """Return boolean value of string.""" return v.lower() in ("yes", "true", "t", "1", "on", "y") @@ -27,16 +27,16 @@ def str2bool(v: str): class RunwayContext(BaseContext): """Runway context object.""" - command: Optional[RunwayActionTypeDef] + command: RunwayActionTypeDef | None """Runway command/action being run.""" def __init__( self, *, - command: Optional[RunwayActionTypeDef] = None, - deploy_environment: Optional[DeployEnvironment] = None, - logger: Union[PrefixAdaptor, RunwayLogger] = LOGGER, - work_dir: Optional[Path] = None, + command: RunwayActionTypeDef | None = None, + deploy_environment: DeployEnvironment | None = None, + logger: PrefixAdaptor | RunwayLogger = LOGGER, + work_dir: Path | None = None, **_: Any, ) -> None: """Instantiate class. @@ -65,7 +65,7 @@ def no_color(self) -> bool: """ colorize = self.env.vars.get("RUNWAY_COLORIZE") # explicitly enable/disable try: - if isinstance(colorize, bool): # type: ignore + if isinstance(colorize, bool): # catch False return not colorize if colorize and isinstance(colorize, str): # type: ignore diff --git a/runway/context/sys_info.py b/runway/context/sys_info.py index a0c12d2b5..52dcc7999 100644 --- a/runway/context/sys_info.py +++ b/runway/context/sys_info.py @@ -5,15 +5,16 @@ import os import platform import sys -from typing import Any, ClassVar, Optional, cast +from typing import Any, ClassVar, cast, final from ..compat import cached_property +@final class OsInfo: """Information about the operating system running on the current system.""" - __instance: ClassVar[Optional[OsInfo]] = None + __instance: ClassVar[OsInfo | None] = None def __new__(cls, *args: Any, **kwargs: Any) -> OsInfo: """Create a new instance of class. @@ -69,10 +70,11 @@ def clear_singleton(cls) -> None: cls.__instance = None +@final class SystemInfo: """Information about the system running Runway.""" - __instance: ClassVar[Optional[SystemInfo]] = None + __instance: ClassVar[SystemInfo | None] = None def __new__(cls, *args: Any, **kwargs: Any) -> SystemInfo: """Create a new instance of class. @@ -87,9 +89,7 @@ def __new__(cls, *args: Any, **kwargs: Any) -> SystemInfo: @cached_property def is_frozen(self) -> bool: """Whether or not Runway is running from a frozen package (Pyinstaller).""" - if getattr(sys, "frozen", False): - return True - return False + return bool(getattr(sys, "frozen", False)) @cached_property def os(self) -> OsInfo: diff --git a/runway/core/__init__.py b/runway/core/__init__.py index 508b67976..e74eb7b99 100644 --- a/runway/core/__init__.py +++ b/runway/core/__init__.py @@ -5,7 +5,7 @@ import logging as _logging import sys as _sys import traceback as _traceback -from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast +from typing import TYPE_CHECKING, Any, cast import yaml as _yaml @@ -48,9 +48,7 @@ def __init__(self, config: RunwayConfig, context: RunwayContext) -> None: self.__assert_config_version() self.ctx.env.log_name() - def deploy( - self, deployments: Optional[List[RunwayDeploymentDefinition]] = None - ) -> None: + def deploy(self, deployments: list[RunwayDeploymentDefinition] | None = None) -> None: """Deploy action. Args: @@ -58,13 +56,9 @@ def deploy( all deployments in the config will be run. """ - self.__run_action( - "deploy", deployments if deployments is not None else self.deployments - ) + self.__run_action("deploy", deployments if deployments is not None else self.deployments) - def destroy( - self, deployments: Optional[List[RunwayDeploymentDefinition]] = None - ) -> None: + def destroy(self, deployments: list[RunwayDeploymentDefinition] | None = None) -> None: """Destroy action. Args: @@ -85,8 +79,8 @@ def destroy( self.reverse_deployments(self.deployments) def get_env_vars( - self, deployments: Optional[List[RunwayDeploymentDefinition]] = None - ) -> Dict[str, Any]: + self, deployments: list[RunwayDeploymentDefinition] | None = None + ) -> dict[str, Any]: """Get env_vars defined in the config. Args: @@ -97,7 +91,7 @@ def get_env_vars( """ deployments = deployments or self.deployments - result: Dict[str, str] = {} + result: dict[str, str] = {} for deployment in deployments: obj = components.Deployment( context=self.ctx, definition=deployment, variables=self.variables @@ -105,9 +99,7 @@ def get_env_vars( result.update(obj.env_vars_config) return result - def init( - self, deployments: Optional[List[RunwayDeploymentDefinition]] = None - ) -> None: + def init(self, deployments: list[RunwayDeploymentDefinition] | None = None) -> None: """Init action. Args: @@ -115,13 +107,9 @@ def init( all deployments in the config will be run. """ - self.__run_action( - "init", deployments if deployments is not None else self.deployments - ) + self.__run_action("init", deployments if deployments is not None else self.deployments) - def plan( - self, deployments: Optional[List[RunwayDeploymentDefinition]] = None - ) -> None: + def plan(self, deployments: list[RunwayDeploymentDefinition] | None = None) -> None: """Plan action. Args: @@ -129,14 +117,12 @@ def plan( all deployments in the config will be run. """ - self.__run_action( - "plan", deployments if deployments is not None else self.deployments - ) + self.__run_action("plan", deployments if deployments is not None else self.deployments) @staticmethod def reverse_deployments( - deployments: List[RunwayDeploymentDefinition], - ) -> List[RunwayDeploymentDefinition]: + deployments: list[RunwayDeploymentDefinition], + ) -> list[RunwayDeploymentDefinition]: """Reverse deployments and the modules within them. Args: @@ -146,7 +132,7 @@ def reverse_deployments( Deployments and modules in reverse order. """ - result: List[RunwayDeploymentDefinition] = [] + result: list[RunwayDeploymentDefinition] = [] for deployment in deployments: deployment.reverse() result.insert(0, deployment) @@ -180,7 +166,7 @@ def test(self) -> None: _sys.exit(1) self.ctx.command = "test" - failed_tests: List[str] = [] + failed_tests: list[str] = [] LOGGER.info("found %i test(s)", len(self.tests)) for tst in self.tests: @@ -198,7 +184,7 @@ def test(self) -> None: try: handler.handle(tst.name, tst.args) logger.success("running test (pass)") - except (Exception, SystemExit) as err: # pylint: disable=broad-except + except (Exception, SystemExit) as err: # for lack of an easy, better way to do this atm, assume # SystemExits are due to a test failure and the failure reason # has already been properly logged by the handler or the @@ -217,7 +203,7 @@ def test(self) -> None: _sys.exit(1) LOGGER.success("all tests passed") - def __assert_config_version(self): + def __assert_config_version(self) -> None: """Assert the config supports this version of Runway.""" if not self.required_version: LOGGER.debug("required Runway version not specified") @@ -245,7 +231,7 @@ def __assert_config_version(self): def __run_action( self, action: type_defs.RunwayActionTypeDef, - deployments: Optional[List[RunwayDeploymentDefinition]], + deployments: list[RunwayDeploymentDefinition] | None, ) -> None: """Run an action on a list of deployments. diff --git a/runway/core/components/_deploy_environment.py b/runway/core/components/_deploy_environment.py index 261d4a071..af079f986 100644 --- a/runway/core/components/_deploy_environment.py +++ b/runway/core/components/_deploy_environment.py @@ -7,7 +7,7 @@ import os import sys from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Optional, cast +from typing import TYPE_CHECKING, Any, cast import click @@ -20,7 +20,7 @@ import git from git.exc import InvalidGitRepositoryError except ImportError: # cov: ignore - git = object # pylint: disable=invalid-name + git = object InvalidGitRepositoryError = AttributeError if TYPE_CHECKING: @@ -32,19 +32,19 @@ class DeployEnvironment(DelCachedPropMixin): """Runway deploy environment.""" - __name: Optional[str] + __name: str | None _ignore_git_branch: bool - name_derived_from: Optional[str] + name_derived_from: str | None root_dir: Path def __init__( self, *, - environ: Optional[Dict[str, str]] = None, - explicit_name: Optional[str] = None, + environ: dict[str, str] | None = None, + explicit_name: str | None = None, ignore_git_branch: bool = False, - root_dir: Optional[Path] = None, + root_dir: Path | None = None, ) -> None: """Instantiate class. @@ -70,7 +70,7 @@ def aws_credentials(self) -> EnvVarsAwsCredentialsTypeDef: ) @property - def aws_profile(self) -> Optional[str]: + def aws_profile(self) -> str | None: """Get AWS profile from environment variables.""" return self.vars.get("AWS_PROFILE") @@ -82,9 +82,7 @@ def aws_profile(self, profile_name: str) -> None: @property def aws_region(self) -> str: """Get AWS region from environment variables.""" - return self.vars.get( - "AWS_REGION", self.vars.get("AWS_DEFAULT_REGION", "us-east-1") - ) + return self.vars.get("AWS_REGION", self.vars.get("AWS_DEFAULT_REGION", "us-east-1")) @aws_region.setter def aws_region(self, region: str) -> None: @@ -92,7 +90,7 @@ def aws_region(self, region: str) -> None: self._update_vars({"AWS_DEFAULT_REGION": region, "AWS_REGION": region}) @cached_property - def branch_name(self) -> Optional[str]: + def branch_name(self) -> str | None: """Git branch name.""" if isinstance(git, type): LOGGER.debug( @@ -102,9 +100,7 @@ def branch_name(self) -> Optional[str]: return None try: LOGGER.debug("getting git branch name...") - return git.Repo( # type: ignore - str(self.root_dir), search_parent_directories=True - ).active_branch.name + return git.Repo(str(self.root_dir), search_parent_directories=True).active_branch.name except InvalidGitRepositoryError: return None except TypeError: @@ -261,9 +257,7 @@ def name(self) -> str: else: self.name_derived_from = "directory" if self.root_dir.name.startswith("ENV-"): - LOGGER.verbose( - 'stripped "ENV-" from the directory name "%s"', self.root_dir.name - ) + LOGGER.verbose('stripped "ENV-" from the directory name "%s"', self.root_dir.name) name = self.root_dir.name[4:] else: name = self.root_dir.name @@ -307,9 +301,7 @@ def log_name(self) -> None: """Output name to log.""" name = self.name # resolve if not already resolved if self.name_derived_from == "explicit": - LOGGER.info( - 'deploy environment "%s" is explicitly defined in the environment', name - ) + LOGGER.info('deploy environment "%s" is explicitly defined in the environment', name) LOGGER.info( "if not correct, update the value or unset it to fall back " "to the name of the current git branch or parent directory" @@ -333,13 +325,11 @@ def log_name(self) -> None: "override via the DEPLOY_ENVIRONMENT environment variable" ) - def _parse_branch_name(self) -> Optional[str]: + def _parse_branch_name(self) -> str | None: """Parse branch name for use as deploy environment name.""" if self.branch_name: if self.branch_name.startswith("ENV-"): - LOGGER.verbose( - 'stripped "ENV-" from the branch name "%s"', self.branch_name - ) + LOGGER.verbose('stripped "ENV-" from the branch name "%s"', self.branch_name) return self.branch_name[4:] if self.branch_name == "master": LOGGER.verbose('translated branch name "master" to "common"') @@ -354,11 +344,11 @@ def _parse_branch_name(self) -> Optional[str]: return result return self.branch_name - def _update_vars(self, env_vars: Dict[str, str]) -> None: + def _update_vars(self, env_vars: dict[str, str]) -> None: """Update vars and log the change. Args: - env_vars (Dict[str, str]): Dict to update self.vars with. + env_vars: Dict to update self.vars with. """ self.vars.update(env_vars) diff --git a/runway/core/components/_deployment.py b/runway/core/components/_deployment.py index 882e402a9..1bbf79669 100644 --- a/runway/core/components/_deployment.py +++ b/runway/core/components/_deployment.py @@ -6,7 +6,7 @@ import logging import multiprocessing import sys -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, TypedDict from ..._logging import PrefixAdaptor from ...compat import cached_property @@ -29,6 +29,15 @@ LOGGER = logging.getLogger(__name__.replace("._", ".")) +class _AssumeRoleConfigTypeDef(TypedDict, total=False): + """Return type for :attr:`Deployment.assume_role_config`.""" + + duration_seconds: int + revert_on_exit: bool + role_arn: str + session_name: str + + class Deployment: """Runway deployment.""" @@ -36,8 +45,8 @@ def __init__( self, context: RunwayContext, definition: RunwayDeploymentDefinition, - future: Optional[RunwayFutureDefinitionModel] = None, - variables: Optional[RunwayVariablesDefinition] = None, + future: RunwayFutureDefinitionModel | None = None, + variables: RunwayVariablesDefinition | None = None, ) -> None: """Instantiate class. @@ -57,49 +66,44 @@ def __init__( self.__merge_env_vars() @property - def assume_role_config(self) -> Dict[str, Union[bool, int, str]]: + def assume_role_config(self) -> _AssumeRoleConfigTypeDef: """Parse the definition to get assume role arguments.""" assume_role = self.definition.assume_role if not assume_role: - self.logger.debug( - "assume_role not configured for deployment: %s", self.name - ) + self.logger.debug("assume_role not configured for deployment: %s", self.name) return {} - if isinstance(assume_role, str): # type: ignore + if isinstance(assume_role, str): self.logger.debug("role found: %s", assume_role) assume_role = RunwayAssumeRoleDefinitionModel(arn=assume_role) - elif isinstance(assume_role, dict): # type: ignore - assume_role = RunwayAssumeRoleDefinitionModel.parse_obj(assume_role) + elif isinstance(assume_role, dict): + assume_role = RunwayAssumeRoleDefinitionModel.model_validate(assume_role) if not assume_role.arn: - self.logger.debug( - "assume_role not configured for deployment: %s", self.name - ) + self.logger.debug("assume_role not configured for deployment: %s", self.name) return {} return { - "duration_seconds": assume_role.duration, + "duration_seconds": int(assume_role.duration), "revert_on_exit": assume_role.post_deploy_env_revert, "role_arn": assume_role.arn, "session_name": assume_role.session_name, } @property - def env_vars_config(self) -> Dict[str, str]: + def env_vars_config(self) -> dict[str, str]: """Parse the definition to get the correct env_vars configuration.""" try: if not self.definition.env_vars: return {} except UnresolvedVariable: - # pylint: disable=protected-access - if "env_vars" in self.definition._vars: - var = self.definition._vars["env_vars"] + if "env_vars" in self.definition._vars: # noqa: SLF001 + var = self.definition._vars["env_vars"] # noqa: SLF001 var.resolve(self.ctx, variables=self._variables) - self.definition._data["env_vars"] = var.value + self.definition._data["env_vars"] = var.value # noqa: SLF001 else: raise return flatten_path_lists(self.definition.env_vars, str(self.ctx.env.root_dir)) @cached_property - def regions(self) -> List[str]: + def regions(self) -> list[str]: """List of regions this deployment is associated with.""" return self.definition.parallel_regions or self.definition.regions @@ -114,9 +118,7 @@ def deploy(self) -> None: High level method for running a deployment. """ - self.logger.verbose( - "attempting to deploy to region(s): %s", ", ".join(self.regions) - ) + self.logger.verbose("attempting to deploy to region(s): %s", ", ".join(self.regions)) if self.use_async: return self.__async("deploy") return self.__sync("deploy") @@ -127,9 +129,7 @@ def destroy(self) -> None: High level method for running a deployment. """ - self.logger.verbose( - "attempting to destroy in region(s): %s", ", ".join(self.regions) - ) + self.logger.verbose("attempting to destroy in region(s): %s", ", ".join(self.regions)) if self.use_async: return self.__async("destroy") return self.__sync("destroy") @@ -140,9 +140,7 @@ def init(self) -> None: High level method for running a deployment. """ - self.logger.verbose( - "attempting to initialize region(s): %s", ", ".join(self.regions) - ) + self.logger.verbose("attempting to initialize region(s): %s", ", ".join(self.regions)) if self.use_async: return self.__async("init") return self.__sync("init") @@ -189,9 +187,7 @@ def run(self, action: RunwayActionTypeDef, region: str) -> None: variables=self._variables, ) - def validate_account_credentials( - self, context: Optional[RunwayContext] = None - ) -> None: + def validate_account_credentials(self, context: RunwayContext | None = None) -> None: """Exit if requested deployment account doesn't match credentials. Args: @@ -213,7 +209,7 @@ def validate_account_credentials( ) sys.exit(1) self.logger.info( - "verified current AWS account matches required " + 'account id "%s"', + 'verified current AWS account matches required account id "%s"', self.definition.account_id, ) if self.definition.account_alias: @@ -236,9 +232,7 @@ def __merge_env_vars(self) -> None: self.logger.verbose( "environment variable overrides are being applied to this deployment" ) - self.logger.debug( - "environment variable overrides: %s", self.env_vars_config - ) + self.logger.debug("environment variable overrides: %s", self.env_vars_config) self.ctx.env.vars = merge_dicts(self.ctx.env.vars, self.env_vars_config) def __async(self, action: RunwayActionTypeDef) -> None: @@ -248,16 +242,12 @@ def __async(self, action: RunwayActionTypeDef) -> None: action: Name of action to run. """ - self.logger.info( - "processing regions in parallel... (output will be interwoven)" - ) + self.logger.info("processing regions in parallel... (output will be interwoven)") with concurrent.futures.ProcessPoolExecutor( max_workers=self.ctx.env.max_concurrent_regions, mp_context=multiprocessing.get_context("fork"), ) as executor: - futures = [ - executor.submit(self.run, *[action, region]) for region in self.regions - ] + futures = [executor.submit(self.run, action, region) for region in self.regions] for job in futures: job.result() # raise exceptions / exit as needed @@ -278,7 +268,7 @@ def run_list( cls, action: RunwayActionTypeDef, context: RunwayContext, - deployments: List[RunwayDeploymentDefinition], + deployments: list[RunwayDeploymentDefinition], future: RunwayFutureDefinitionModel, variables: RunwayVariablesDefinition, ) -> None: diff --git a/runway/core/components/_module.py b/runway/core/components/_module.py index bc0e6cedc..7199ef02c 100644 --- a/runway/core/components/_module.py +++ b/runway/core/components/_module.py @@ -7,7 +7,7 @@ import logging import multiprocessing import sys -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast import yaml @@ -66,9 +66,7 @@ def __init__( """ self.__deployment = deployment self.__future = future or RunwayFutureDefinitionModel() - self.__variables = variables or RunwayVariablesDefinition( - RunwayVariablesDefinitionModel() - ) + self.__variables = variables or RunwayVariablesDefinition(RunwayVariablesDefinitionModel()) self.ctx = context.copy() # each module has it's own instance of context definition.resolve(self.ctx, variables=variables) self.definition = definition @@ -76,7 +74,7 @@ def __init__( self.logger = PrefixAdaptor(self.fqn, LOGGER) @cached_property - def child_modules(self) -> List[Module]: + def child_modules(self) -> list[Module]: """Return child modules.""" return [ self.__class__( @@ -90,7 +88,7 @@ def child_modules(self) -> List[Module]: ] @cached_property - def environment_matches_defined(self) -> Optional[bool]: + def environment_matches_defined(self) -> bool | None: """Environment matches one of the defined environments. Will return None if there is nothing defined for the current environment. @@ -109,14 +107,14 @@ def environments(self) -> RunwayEnvironmentsType: return tmp @cached_property - def fqn(self): + def fqn(self) -> str: """Fully qualified name.""" if not self.__deployment: return self.name return f"{self.__deployment.name}.{self.name}" @cached_property - def opts_from_file(self) -> Dict[str, Any]: + def opts_from_file(self) -> dict[str, Any]: """Load module options from local file.""" opts_file = self.path.module_root / "runway.module.yml" if opts_file.is_file(): @@ -134,9 +132,9 @@ def path(self) -> ModulePath: # lazy load the path ) @cached_property - def payload(self) -> Dict[str, Any]: # lazy load the payload + def payload(self) -> dict[str, Any]: # lazy load the payload """Return payload to be passed to module class handler class.""" - payload: Dict[str, Any] = {} + payload: dict[str, Any] = {} if self.__deployment: payload.update( { @@ -216,9 +214,7 @@ def plan(self) -> None: if not self.child_modules: return self.run("plan") if self.use_async: - self.logger.info( - "processing of modules will be done in parallel during deploy/destroy" - ) + self.logger.info("processing of modules will be done in parallel during deploy/destroy") return self.__sync("plan") def run(self, action: RunwayActionTypeDef) -> None: @@ -231,9 +227,7 @@ def run(self, action: RunwayActionTypeDef) -> None: """ LOGGER.info("") - self.logger.notice( - "processing module in %s (in progress)", self.ctx.env.aws_region - ) + self.logger.notice("processing module in %s (in progress)", self.ctx.env.aws_region) self.logger.verbose("module payload: %s", json.dumps(self.payload)) if self.should_skip: return @@ -248,9 +242,7 @@ def run(self, action: RunwayActionTypeDef) -> None: else: self.logger.error('"%s" is missing method "%s"', inst, action) sys.exit(1) - self.logger.success( - "processing module in %s (complete)", self.ctx.env.aws_region - ) + self.logger.success("processing module in %s (complete)", self.ctx.env.aws_region) def __async(self, action: RunwayActionTypeDef) -> None: """Execute asynchronously. @@ -259,9 +251,7 @@ def __async(self, action: RunwayActionTypeDef) -> None: action: Name of action to run. """ - self.logger.info( - "processing modules in parallel... (output will be interwoven)" - ) + self.logger.info("processing modules in parallel... (output will be interwoven)") # Can't use threading or ThreadPoolExecutor here because # we need to be able to do things like `cd` which is not # thread safe. @@ -269,9 +259,7 @@ def __async(self, action: RunwayActionTypeDef) -> None: max_workers=self.ctx.env.max_concurrent_modules, mp_context=multiprocessing.get_context("fork"), ) as executor: - futures = [ - executor.submit(child.run, *[action]) for child in self.child_modules - ] + futures = [executor.submit(child.run, action) for child in self.child_modules] for job in futures: job.result() # raise exceptions / exit as needed @@ -294,9 +282,7 @@ def __merge_env_vars(self, env_vars: RunwayEnvVarsType) -> None: self.logger.verbose( "environment variable overrides are being applied to this module" ) - self.logger.debug( - "environment variable overrides: %s", resolved_env_vars - ) + self.logger.debug("environment variable overrides: %s", resolved_env_vars) self.ctx.env.vars = merge_dicts(self.ctx.env.vars, resolved_env_vars) @classmethod @@ -304,10 +290,10 @@ def run_list( cls, action: RunwayActionTypeDef, context: RunwayContext, - modules: List[RunwayModuleDefinition], + modules: list[RunwayModuleDefinition], variables: RunwayVariablesDefinition, deployment: RunwayDeploymentDefinition = None, - future: Optional[RunwayFutureDefinitionModel] = None, + future: RunwayFutureDefinitionModel | None = None, ) -> None: """Run a list of modules. @@ -341,9 +327,9 @@ def __getitem__(self, key: str) -> Any: def validate_environment( context: RunwayContext, - env_def: Optional[Union[bool, Dict[str, Any], int, str, List[str]]], - logger: Union[PrefixAdaptor, RunwayLogger] = LOGGER, -) -> Optional[bool]: + env_def: bool | dict[str, Any] | int | str | list[str] | None, + logger: PrefixAdaptor | RunwayLogger = LOGGER, +) -> bool | None: """Check if an environment should be deployed to. Args: @@ -363,14 +349,12 @@ def validate_environment( else: logger.verbose("environment not defined; module will determine deployment") env_def = None - return cast(Optional[bool], env_def) + return env_def if isinstance(env_def, dict): if context.env.name not in env_def: logger.info("skipped; environment not in definition") return False - return validate_environment( - context, cast(Any, env_def.get(context.env.name, False)), logger=logger - ) + return validate_environment(context, env_def.get(context.env.name, False), logger=logger) account = aws.AccountDetails(context) accepted_values = [ diff --git a/runway/core/components/_module_path.py b/runway/core/components/_module_path.py index 29b8d16b2..4eff846a1 100644 --- a/runway/core/components/_module_path.py +++ b/runway/core/components/_module_path.py @@ -5,7 +5,7 @@ import logging import re from pathlib import Path -from typing import TYPE_CHECKING, ClassVar, Dict, Optional, Type, Union +from typing import TYPE_CHECKING, ClassVar from urllib.parse import parse_qs from typing_extensions import TypedDict @@ -25,7 +25,7 @@ class ModulePathMetadataTypeDef(TypedDict): """Type definition for ModulePath.metadata.""" - arguments: Dict[str, str] + arguments: dict[str, str] cache_dir: Path location: str source: str @@ -36,16 +36,16 @@ class ModulePath: """Handler for the ``path`` field of a Runway module.""" ARGS_REGEX: ClassVar[str] = r"(\?)(?P.*)$" - REMOTE_SOURCE_HANDLERS: ClassVar[Dict[str, Type[Source]]] = {"git": Git} + REMOTE_SOURCE_HANDLERS: ClassVar[dict[str, type[Source]]] = {"git": Git} SOURCE_REGEX: ClassVar[str] = r"(?P[a-z]+)(\:\:)" URI_REGEX: ClassVar[str] = r"(?P[a-z]+://[a-zA-Z0-9\./-]+?(?=//|\?|$))" def __init__( self, - definition: Optional[Union[Path, str]] = None, + definition: Path | str | None = None, *, cache_dir: Path, - deploy_environment: Optional[DeployEnvironment] = None, + deploy_environment: DeployEnvironment | None = None, ) -> None: """Instantiate class. @@ -60,24 +60,19 @@ def __init__( self.env = deploy_environment or DeployEnvironment() @cached_property - def arguments(self) -> Dict[str, str]: + def arguments(self) -> dict[str, str]: """Remote source arguments.""" if isinstance(self.definition, str): match = re.match(rf"^.*{self.ARGS_REGEX}", self.definition) if match: - return { - k: ",".join(v) for k, v in parse_qs(match.group("args")).items() - } + return {k: ",".join(v) for k, v in parse_qs(match.group("args")).items()} return {} @cached_property def location(self) -> str: """Location of the module.""" if isinstance(self.definition, str): - if ( - re.match(r"^(/|//|\.|\./)", self.definition) - or "::" not in self.definition - ): + if re.match(r"^(/|//|\.|\./)", self.definition) or "::" not in self.definition: return re.sub(self.ARGS_REGEX, "", self.definition) no_src = re.sub(rf"^{self.SOURCE_REGEX}", "", self.definition) no_uri = re.sub(rf"^{self.URI_REGEX}", "", no_src) @@ -141,12 +136,10 @@ def _fetch_remote_source(self) -> Path: @classmethod def parse_obj( cls, - obj: Optional[ - Union[Path, RunwayModuleDefinition, RunwayModuleDefinitionModel, str] - ], + obj: Path | RunwayModuleDefinition | RunwayModuleDefinitionModel | str | None, *, cache_dir: Path, - deploy_environment: Optional[DeployEnvironment] = None, + deploy_environment: DeployEnvironment | None = None, ) -> ModulePath: """Parse object. diff --git a/runway/core/components/_module_type.py b/runway/core/components/_module_type.py index 31abc8c58..24e1a7311 100644 --- a/runway/core/components/_module_type.py +++ b/runway/core/components/_module_type.py @@ -5,14 +5,15 @@ import logging import os import sys -from pathlib import Path -from typing import TYPE_CHECKING, ClassVar, Dict, Optional, Type, cast +from typing import TYPE_CHECKING, Any, ClassVar, cast from typing_extensions import Literal from ...utils import load_object_from_string if TYPE_CHECKING: + from pathlib import Path + from ...config.models.runway import RunwayModuleTypeTypeDef from ...module.base import RunwayModule @@ -32,7 +33,7 @@ class RunwayModuleType: Runway determines the type of module you are trying to deploy in 3 different ways. First, it will check for the ``type`` property as described here, next it will look - for a suffix as described in :ref:`Module Definition`, + for a suffix as described in :ref:`Module Definition `, and finally it will attempt to autodetect your module type by scanning the files of the project. If none of those settings produces a valid result an error will @@ -51,7 +52,7 @@ class RunwayModuleType: +--------------------+-----------------------------------------------+ | ``kubernetes`` | Kubernetes | +--------------------+-----------------------------------------------+ - | ``static`` | :ref:`Static Site` | + | ``static`` | :ref:`index:Static Site` | +--------------------+-----------------------------------------------+ Even when specifying a module ``type`` the module structure @@ -60,7 +61,7 @@ class RunwayModuleType: """ - EXTENSION_MAP: ClassVar[Dict[str, str]] = { + EXTENSION_MAP: ClassVar[dict[str, str]] = { "cdk": "runway.module.cdk.CloudDevelopmentKit", "cfn": "runway.module.cloudformation.CloudFormation", "k8s": "runway.module.k8s.K8s", @@ -69,7 +70,7 @@ class RunwayModuleType: "web": "runway.module.staticsite.handler.StaticSite", } - TYPE_MAP: ClassVar[Dict[str, str]] = { + TYPE_MAP: ClassVar[dict[str, str]] = { "cdk": EXTENSION_MAP["cdk"], "cloudformation": EXTENSION_MAP["cfn"], "kubernetes": EXTENSION_MAP["k8s"], @@ -81,8 +82,8 @@ class RunwayModuleType: def __init__( self, path: Path, - class_path: Optional[str] = None, - type_str: Optional[RunwayModuleTypeTypeDef] = None, + class_path: str | None = None, + type_str: RunwayModuleTypeTypeDef | None = None, ) -> None: """Instantiate class. @@ -97,7 +98,7 @@ def __init__( self.type_str = type_str self.module_class = self._determine_module_class() - def _determine_module_class(self) -> Type[RunwayModule]: + def _determine_module_class(self) -> type[RunwayModule[Any]]: """Determine type of module and return deployment module class. Returns: @@ -113,16 +114,12 @@ def _determine_module_class(self) -> Type[RunwayModule]: if not self.class_path and self.type_str: self.class_path = self.TYPE_MAP.get(self.type_str, None) if self.class_path: - LOGGER.debug( - 'module class "%s" determined from explicit type', self.class_path - ) + LOGGER.debug('module class "%s" determined from explicit type', self.class_path) if not self.class_path: self._set_class_path_based_on_extension() if self.class_path: - LOGGER.debug( - 'module class "%s" determined from path extension', self.class_path - ) + LOGGER.debug('module class "%s" determined from path extension', self.class_path) if not self.class_path: self._set_class_path_based_on_autodetection() @@ -130,15 +127,15 @@ def _determine_module_class(self) -> Type[RunwayModule]: if not self.class_path: LOGGER.error( 'module class could not be determined from path "%s"', - os.path.basename(self.path), + self.path.name, ) sys.exit(1) - return cast(Type["RunwayModule"], load_object_from_string(self.class_path)) + return cast(type["RunwayModule[Any]"], load_object_from_string(self.class_path)) def _set_class_path_based_on_extension(self) -> None: """Based on the directory suffix set the class_path.""" - basename = os.path.basename(self.path) + basename = self.path.name basename_split = basename.split(".") extension = basename_split[len(basename_split) - 1] self.class_path = self.EXTENSION_MAP.get(extension, None) @@ -161,9 +158,7 @@ def _set_class_path_based_on_autodetection(self) -> None: self.class_path = self.TYPE_MAP.get("serverless", None) elif next(self.path.glob("*.tf"), None): self.class_path = self.TYPE_MAP.get("terraform", None) - elif (self.path / "cdk.json").is_file() and ( - self.path / "package.json" - ).is_file(): + elif (self.path / "cdk.json").is_file() and (self.path / "package.json").is_file(): self.class_path = self.TYPE_MAP.get("cdk", None) elif (self.path / "overlays").is_dir() and self._find_kustomize_files(): self.class_path = self.TYPE_MAP.get("kubernetes", None) @@ -174,9 +169,7 @@ def _set_class_path_based_on_autodetection(self) -> None: ): self.class_path = self.TYPE_MAP.get("cloudformation", None) if self.class_path: - LOGGER.debug( - 'module class "%s" determined from autodetection', self.class_path - ) + LOGGER.debug('module class "%s" determined from autodetection', self.class_path) def _find_kustomize_files(self) -> bool: """Return true if kustomize yaml file found. diff --git a/runway/core/providers/aws/_account.py b/runway/core/providers/aws/_account.py index 5d6c0cd4f..e75a8a3be 100644 --- a/runway/core/providers/aws/_account.py +++ b/runway/core/providers/aws/_account.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, List, Union +from typing import TYPE_CHECKING from ....compat import cached_property @@ -15,7 +15,7 @@ class AccountDetails: """AWS account details.""" - def __init__(self, context: Union[CfnginContext, RunwayContext]) -> None: + def __init__(self, context: CfnginContext | RunwayContext) -> None: """Instantiate class. Args: @@ -25,12 +25,12 @@ def __init__(self, context: Union[CfnginContext, RunwayContext]) -> None: self.__ctx = context @cached_property - def aliases(self) -> List[str]: + def aliases(self) -> list[str]: """Get the aliases of the AWS account.""" # Super overkill here using pagination when an account can only # have a single alias, but at least this implementation should be # future-proof. - aliases: List[str] = [] + aliases: list[str] = [] paginator = self.__session.client("iam").get_paginator("list_account_aliases") response_iterator = paginator.paginate() for page in response_iterator: diff --git a/runway/core/providers/aws/_assume_role.py b/runway/core/providers/aws/_assume_role.py index e49c5a998..ae03ba32d 100644 --- a/runway/core/providers/aws/_assume_role.py +++ b/runway/core/providers/aws/_assume_role.py @@ -3,8 +3,9 @@ from __future__ import annotations import logging +from contextlib import AbstractContextManager from datetime import datetime -from typing import TYPE_CHECKING, ContextManager, Optional, Type, cast +from typing import TYPE_CHECKING, cast from typing_extensions import TypedDict @@ -12,6 +13,7 @@ from types import TracebackType from mypy_boto3_sts.type_defs import AssumedRoleUserTypeDef, CredentialsTypeDef + from typing_extensions import Self from ...._logging import RunwayLogger from ....context import RunwayContext @@ -19,12 +21,14 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__.replace("._", "."))) -_KwargsTypeDef = TypedDict( - "_KwargsTypeDef", DurationSeconds=int, RoleArn=str, RoleSessionName=str -) +class _KwargsTypeDef(TypedDict): + DurationSeconds: int + RoleArn: str + RoleSessionName: str -class AssumeRole(ContextManager["AssumeRole"]): + +class AssumeRole(AbstractContextManager["AssumeRole"]): """Context manager for assuming an AWS role.""" assumed_role_user: AssumedRoleUserTypeDef @@ -37,11 +41,11 @@ class AssumeRole(ContextManager["AssumeRole"]): def __init__( self, context: RunwayContext, - role_arn: Optional[str] = None, - duration_seconds: Optional[int] = None, + role_arn: str | None = None, + duration_seconds: int | None = None, revert_on_exit: bool = True, - session_name: Optional[str] = None, - ): + session_name: str | None = None, + ) -> None: """Instantiate class. Args: @@ -108,7 +112,7 @@ def restore_existing_iam_env_vars(self) -> None: if not self.role_arn: LOGGER.debug("no role was assumed; not reverting credentials") return - for k in self.ctx.current_aws_creds.keys(): + for k in self.ctx.current_aws_creds: old = "OLD_" + k if self.ctx.env.vars.get(old): self.ctx.env.vars[k] = self.ctx.env.vars.pop(old) @@ -124,7 +128,7 @@ def save_existing_iam_env_vars(self) -> None: LOGGER.debug('saving environment variable "%s" as "%s"', k, new) self.ctx.env.vars[new] = cast(str, v) - def __enter__(self) -> AssumeRole: + def __enter__(self) -> Self: """Enter the context manager.""" LOGGER.debug("entering aws.AssumeRole context manager...") self.assume() @@ -132,9 +136,9 @@ def __enter__(self) -> AssumeRole: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, ) -> None: """Exit the context manager.""" if self.revert_on_exit: diff --git a/runway/core/providers/aws/_response.py b/runway/core/providers/aws/_response.py index 41778a31d..a06407e44 100644 --- a/runway/core/providers/aws/_response.py +++ b/runway/core/providers/aws/_response.py @@ -1,7 +1,7 @@ """Base class for AWS responses.""" from http import HTTPStatus -from typing import Any, Dict +from typing import Any from pydantic import Field @@ -45,7 +45,7 @@ class ResponseMetadata(BaseModel): host_id: str = Field(default="", alias="HostId") """Host ID data.""" - https_headers: Dict[str, Any] = Field(default={}, alias="HTTPHeaders") + https_headers: dict[str, Any] = Field(default={}, alias="HTTPHeaders") """A map of response header keys and their respective values.""" http_status_code: int = Field(default=200, alias="HTTPStatusCode") @@ -83,7 +83,5 @@ class BaseResponse(BaseModel): error: ResponseError = Field(default=ResponseError(), alias="Error") """Information about a service or networking error.""" - metadata: ResponseMetadata = Field( - default=ResponseMetadata(), alias="ResponseMetadata" - ) + metadata: ResponseMetadata = Field(default=ResponseMetadata(), alias="ResponseMetadata") """Information about the request.""" diff --git a/runway/core/providers/aws/s3/_bucket.py b/runway/core/providers/aws/s3/_bucket.py index 48a2da3d3..0586fa280 100644 --- a/runway/core/providers/aws/s3/_bucket.py +++ b/runway/core/providers/aws/s3/_bucket.py @@ -4,7 +4,7 @@ import json import logging -from typing import TYPE_CHECKING, Any, List, Optional, Union +from typing import TYPE_CHECKING, Any from botocore.exceptions import ClientError @@ -31,9 +31,9 @@ class Bucket(DelCachedPropMixin): def __init__( self, - context: Union[CfnginContext, RunwayContext], + context: CfnginContext | RunwayContext, name: str, - region: Optional[str] = None, + region: str | None = None, ) -> None: """Instantiate class. @@ -63,7 +63,7 @@ def forbidden(self) -> bool: return self.head.metadata.forbidden @cached_property - def head(self): + def head(self) -> BaseResponse: """Check if a bucket exists and you have permission to access it. To use this operation, the user must have permissions to perform the @@ -73,14 +73,17 @@ def head(self): """ try: - return BaseResponse(**self.client.head_bucket(Bucket=self.name) or {}) + return BaseResponse( + **self.client.head_bucket(Bucket=self.name) + or {} # pyright: ignore[reportArgumentType] + ) except ClientError as err: LOGGER.debug( 'received an error from AWS S3 when trying to head bucket "%s"', self.name, exc_info=True, ) - return BaseResponse.parse_obj(err.response) + return BaseResponse.model_validate(err.response) @property def not_found(self) -> bool: @@ -92,7 +95,7 @@ def session(self) -> boto3.Session: """Create cached boto3 session.""" return self.__ctx.get_session(region=self._region) - def create(self, **kwargs: Any) -> Optional[CreateBucketOutputTypeDef]: + def create(self, **kwargs: Any) -> CreateBucketOutputTypeDef | None: """Create an S3 Bucket if it does not already exist. Bucket creation will be skipped if it already exists or access is forbidden. @@ -139,9 +142,7 @@ def enable_versioning(self) -> None: ) LOGGER.debug('enabled versioning for bucket "%s"', self.name) - def format_bucket_path_uri( - self, *, key: Optional[str] = None, prefix: Optional[str] = None - ) -> str: + def format_bucket_path_uri(self, *, key: str | None = None, prefix: str | None = None) -> str: """Format bucket path URI. Args: @@ -176,10 +177,10 @@ def sync_from_local( src_directory: str, *, delete: bool = False, - exclude: Optional[List[str]] = None, + exclude: list[str] | None = None, follow_symlinks: bool = False, - include: Optional[List[str]] = None, - prefix: Optional[str] = None, + include: list[str] | None = None, + prefix: str | None = None, ) -> None: """Sync local directory to the S3 Bucket. @@ -209,10 +210,10 @@ def sync_to_local( dest_directory: str, *, delete: bool = False, - exclude: Optional[List[str]] = None, + exclude: list[str] | None = None, follow_symlinks: bool = False, - include: Optional[List[str]] = None, - prefix: Optional[str] = None, + include: list[str] | None = None, + prefix: str | None = None, ) -> None: """Sync S3 bucket to local directory. diff --git a/runway/core/providers/aws/s3/_helpers/action_architecture.py b/runway/core/providers/aws/s3/_helpers/action_architecture.py index 577fc0915..8f2cb1aaf 100644 --- a/runway/core/providers/aws/s3/_helpers/action_architecture.py +++ b/runway/core/providers/aws/s3/_helpers/action_architecture.py @@ -9,7 +9,7 @@ import logging from queue import Queue -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, cast from typing_extensions import Literal, TypedDict @@ -50,12 +50,12 @@ class _CommandDictTypeDef(TypedDict): """Type definition for command_dict.""" - comparator: List[Comparator] - file_generator: List[FileGenerator] - file_info_builder: List[FileInfoBuilder] - filters: List[Any] - s3_handler: List[S3TransferHandler] - setup: List[FormatPathResult] + comparator: list[Comparator] + file_generator: list[FileGenerator] + file_info_builder: list[FileInfoBuilder] + filters: list[Any] + s3_handler: list[S3TransferHandler] + setup: list[FormatPathResult] class ActionArchitecture: @@ -76,7 +76,7 @@ def __init__( botocore_session: Session, action: Literal["sync"], parameters: ParametersDataModel, - runtime_config: Optional[TransferConfigDict] = None, + runtime_config: TransferConfigDict | None = None, ) -> None: """Instantiate class.""" self.botocore_session = botocore_session @@ -92,7 +92,7 @@ def client(self) -> S3Client: return self.session.client("s3") @cached_property - def instructions(self) -> List[_InstructionTypeDef]: + def instructions(self) -> list[_InstructionTypeDef]: """Create the instructions based on the command name and parameters. Note that all commands must have an s3_handler instruction in the @@ -100,7 +100,7 @@ def instructions(self) -> List[_InstructionTypeDef]: sends the request to S3 and does not yield anything. """ - result: List[_InstructionTypeDef] = ["file_generator"] + result: list[_InstructionTypeDef] = ["file_generator"] if self.parameters.exclude or self.parameters.include: result.append("filters") if self.action == "sync": @@ -109,7 +109,7 @@ def instructions(self) -> List[_InstructionTypeDef]: result.append("s3_handler") return result - def choose_sync_strategies(self) -> Dict[str, BaseSync]: + def choose_sync_strategies(self) -> dict[str, BaseSync]: """Determine the sync strategy for the command. It defaults to the default sync strategies but a customizable sync @@ -117,27 +117,28 @@ def choose_sync_strategies(self) -> Dict[str, BaseSync]: of its self when the event is emitted. """ - sync_strategies: Dict[str, BaseSync] = { + sync_strategies: dict[str, BaseSync] = { "file_at_src_and_dest_sync_strategy": SizeAndLastModifiedSync(), "file_not_at_dest_sync_strategy": MissingFileSync(), "file_not_at_src_sync_strategy": NeverSync(), } # Determine what strategies to override if any. - responses: Optional[List[Tuple[Any, BaseSync]]] = self.botocore_session.emit( - "choosing-s3-sync-strategy", params=self.parameters + responses = cast( + "list[tuple[Any, BaseSync]] | None", + self.botocore_session.emit("choosing-s3-sync-strategy", params=self.parameters), ) if responses is not None: for response in responses: override_sync_strategy = response[1] - if override_sync_strategy is not None: + if override_sync_strategy: sync_type = override_sync_strategy.sync_type sync_type += "_sync_strategy" sync_strategies[sync_type] = override_sync_strategy return sync_strategies - def run(self): + def run(self) -> Literal[1, 2, 0]: """Wire together all of the generators and completes the action. First a dictionary is created that is indexed first by @@ -170,7 +171,7 @@ def run(self): "s3local": "download", "s3": "delete", } - result_queue: "Queue[Any]" = Queue() + result_queue: Queue[Any] = Queue() operation_name = action_translation[paths_type] file_generator = FileGenerator( @@ -189,9 +190,7 @@ def run(self): result_queue=result_queue, request_parameters=self._get_file_generator_request_parameters_skeleton(), ) - file_info_builder = FileInfoBuilder( - client=self.client, parameters=self.parameters - ) + file_info_builder = FileInfoBuilder(client=self.client, parameters=self.parameters) s3_transfer_handler = S3TransferHandlerFactory( config_params=self.parameters, runtime_config=self._runtime_config )(self.client, result_queue) @@ -243,5 +242,5 @@ def run(self): return return_code @staticmethod - def _get_file_generator_request_parameters_skeleton() -> Dict[str, Dict[str, Any]]: + def _get_file_generator_request_parameters_skeleton() -> dict[str, dict[str, Any]]: return {"HeadObject": {}, "ListObjects": {}, "ListObjectsV2": {}} diff --git a/runway/core/providers/aws/s3/_helpers/comparator.py b/runway/core/providers/aws/s3/_helpers/comparator.py index 06ee42dc0..51d581cca 100644 --- a/runway/core/providers/aws/s3/_helpers/comparator.py +++ b/runway/core/providers/aws/s3/_helpers/comparator.py @@ -8,9 +8,11 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Generator, Iterator, Optional, cast +from typing import TYPE_CHECKING, cast if TYPE_CHECKING: + from collections.abc import Generator, Iterator + from .file_generator import FileStats from .sync_strategy.base import BaseSync @@ -26,13 +28,13 @@ def __init__( file_at_src_and_dest_sync_strategy: BaseSync, file_not_at_dest_sync_strategy: BaseSync, file_not_at_src_sync_strategy: BaseSync, - ): + ) -> None: """Instantiate class.""" self._sync_strategy = file_at_src_and_dest_sync_strategy self._not_at_dest_sync_strategy = file_not_at_dest_sync_strategy self._not_at_src_sync_strategy = file_not_at_src_sync_strategy - def call( # pylint: disable=too-many-statements + def call( # noqa: C901, PLR0912, PLR0915 self, src_files: Iterator[FileStats], dest_files: Iterator[FileStats] ) -> Generator[FileStats, None, None]: """Preform the actual comparisons. @@ -70,10 +72,10 @@ def call( # pylint: disable=too-many-statements Yields the FilInfo objects of the files that need to be operated on. """ - dest_file: Optional[FileStats] = None + dest_file: FileStats | None = None dest_done = False # True if there are no more files form the dest left. dest_take = True # Take the next dest file from the generated files if true - src_file: Optional[FileStats] = None + src_file: FileStats | None = None src_done = False # True if there are no more files from the source left. src_take = True # Take the next source file from the generated files if true while True: @@ -97,9 +99,7 @@ def call( # pylint: disable=too-many-statements compare_keys = self.compare_comp_key(src_file, dest_file) if compare_keys == "equal": - should_sync = self._sync_strategy.determine_should_sync( - src_file, dest_file - ) + should_sync = self._sync_strategy.determine_should_sync(src_file, dest_file) if should_sync: yield cast("FileStats", src_file) elif compare_keys == "less_than": @@ -121,25 +121,19 @@ def call( # pylint: disable=too-many-statements elif (not src_done) and dest_done: src_take = True - should_sync = self._not_at_dest_sync_strategy.determine_should_sync( - src_file, None - ) + should_sync = self._not_at_dest_sync_strategy.determine_should_sync(src_file, None) if should_sync: yield cast("FileStats", src_file) elif src_done and (not dest_done): dest_take = True - should_sync = self._not_at_src_sync_strategy.determine_should_sync( - None, dest_file - ) + should_sync = self._not_at_src_sync_strategy.determine_should_sync(None, dest_file) if should_sync: yield cast("FileStats", dest_file) else: break # cov: ignore @staticmethod - def compare_comp_key( - src_file: Optional[FileStats], dest_file: Optional[FileStats] - ) -> str: + def compare_comp_key(src_file: FileStats | None, dest_file: FileStats | None) -> str: """Compare the source & destination compare_key.""" src_comp_key = (src_file.compare_key if src_file else None) or "" dest_comp_key = (dest_file.compare_key if dest_file else None) or "" diff --git a/runway/core/providers/aws/s3/_helpers/file_generator.py b/runway/core/providers/aws/s3/_helpers/file_generator.py index 9ba2503bb..8cfe149cb 100644 --- a/runway/core/providers/aws/s3/_helpers/file_generator.py +++ b/runway/core/providers/aws/s3/_helpers/file_generator.py @@ -7,7 +7,6 @@ from __future__ import annotations -import datetime import os import stat from copy import deepcopy @@ -17,12 +16,6 @@ from typing import ( TYPE_CHECKING, Any, - Dict, - Generator, - List, - Optional, - Tuple, - Union, cast, ) @@ -41,6 +34,9 @@ ) if TYPE_CHECKING: + import datetime + from collections.abc import Generator + from mypy_boto3_s3.client import S3Client from mypy_boto3_s3.type_defs import HeadObjectOutputTypeDef, ObjectTypeDef @@ -62,7 +58,7 @@ def is_readable(path: Path) -> bool: return False else: try: - with open(path, "r", encoding="utf-8"): + with open(path, encoding="utf-8"): # noqa: PTH123 pass except OSError: return False @@ -87,23 +83,19 @@ def is_special_file(path: Path) -> bool: if stat.S_ISFIFO(mode): return True # Socket. - if stat.S_ISSOCK(mode): - return True - return False - - -FileStatsDict = TypedDict( - "FileStatsDict", - src="AnyPath", - compare_key=Optional[str], - dest_type=Optional["SupportedPathType"], - dest=Optional[str], - last_update=datetime.datetime, - operation_name=Optional[str], - response_data=Optional[Union["HeadObjectOutputTypeDef", "ObjectTypeDef"]], - size=Optional[int], - src_type=Optional["SupportedPathType"], -) + return bool(stat.S_ISSOCK(mode)) + + +class FileStatsDict(TypedDict): + src: AnyPath + compare_key: str | None + dest_type: SupportedPathType | None + dest: str | None + last_update: datetime.datetime + operation_name: str | None + response_data: HeadObjectOutputTypeDef | ObjectTypeDef | None + size: int | None + src_type: SupportedPathType | None @dataclass @@ -126,23 +118,23 @@ class FileStats: """ src: AnyPath - compare_key: Optional[str] = None - dest: Optional[str] = None - dest_type: Optional[SupportedPathType] = None + compare_key: str | None = None + dest: str | None = None + dest_type: SupportedPathType | None = None last_update: datetime.datetime = EPOCH_TIME - operation_name: Optional[str] = None - response_data: Optional[Union[HeadObjectOutputTypeDef, ObjectTypeDef]] = None - size: Optional[int] = None - src_type: Optional[SupportedPathType] = None + operation_name: str | None = None + response_data: HeadObjectOutputTypeDef | ObjectTypeDef | None = None + size: int | None = None + src_type: SupportedPathType | None = None def dict(self) -> FileStatsDict: """Dump contents of object to a dict.""" return deepcopy(cast(FileStatsDict, self.__dict__)) -_LastModifiedAndSize = TypedDict( - "_LastModifiedAndSize", Size=int, LastModified=datetime.datetime -) +class _LastModifiedAndSize(TypedDict): + Size: int + LastModified: datetime.datetime class FileGenerator: @@ -153,17 +145,17 @@ class FileGenerator: """ - result_queue: "Queue[Any]" + result_queue: Queue[Any] def __init__( self, client: S3Client, operation_name: str, follow_symlinks: bool = True, - page_size: Optional[int] = None, - result_queue: Optional["Queue[Any]"] = None, + page_size: int | None = None, + result_queue: Queue[Any] | None = None, request_parameters: Any = None, - ): + ) -> None: """Instantiate class. Args: @@ -187,9 +179,7 @@ def __init__( def call(self, files: FormatPathResult) -> Generator[FileStats, None, None]: """Generalized function to yield the ``FileInfo`` objects.""" function_table = {"s3": self.list_objects, "local": self.list_files} - file_iterator = function_table[files["src"]["type"]]( - files["src"]["path"], files["dir_op"] - ) + file_iterator = function_table[files["src"]["type"]](files["src"]["path"], files["dir_op"]) for src_path, extra_information in file_iterator: dest_path, compare_key = find_dest_path_comp_key(files, src_path) file_stat_kwargs: FileStatsDict = { @@ -205,14 +195,14 @@ def call(self, files: FormatPathResult) -> Generator[FileStats, None, None]: } if files["src"]["type"] == "s3": file_stat_kwargs["response_data"] = cast( - Optional[Union["HeadObjectOutputTypeDef", "ObjectTypeDef"]], + "HeadObjectOutputTypeDef | ObjectTypeDef | None", extra_information, ) yield FileStats(**file_stat_kwargs) def list_files( self, path: AnyPath, dir_op: bool - ) -> Generator[Tuple[Path, _LastModifiedAndSize], None, None]: + ) -> Generator[tuple[Path, _LastModifiedAndSize], None, None]: """Yield the appropriate local file or local files under a directory. For directories a depth first search is implemented in order to @@ -232,24 +222,23 @@ def list_files( # using os.listdir instead of Path.iterdir so we can sort the list # but not load the entire tree into memory listdir_names = os.listdir(path) - names: List[str] = [] + names: list[str] = [] for name in listdir_names: if (path / name).is_dir(): - name = name + os.path.sep + name = name + os.path.sep # noqa: PLW2901 names.append(name) self.normalize_sort(names, os.sep, "/") for name in names: file_path = path / name if file_path.is_dir(): - for result in self.list_files(file_path, dir_op): - yield result + yield from self.list_files(file_path, dir_op) else: stats = self.safely_get_file_stats(file_path) if stats: yield stats @staticmethod - def normalize_sort(names: List[str], os_sep: str, character: str) -> None: + def normalize_sort(names: list[str], os_sep: str, character: str) -> None: """Ensure that the same path separator is used when sorting. On Windows, the path operator is a backslash as opposed to a forward slash @@ -263,9 +252,7 @@ def normalize_sort(names: List[str], os_sep: str, character: str) -> None: """ names.sort(key=lambda item: item.replace(os_sep, character)) - def safely_get_file_stats( - self, path: Path - ) -> Optional[Tuple[Path, _LastModifiedAndSize]]: + def safely_get_file_stats(self, path: Path) -> tuple[Path, _LastModifiedAndSize] | None: """Get file stats with handling for some common errors. Args: @@ -282,14 +269,13 @@ def safely_get_file_stats( return None def _validate_update_time( - self, update_time: Optional[datetime.datetime], path: Path + self, update_time: datetime.datetime | None, path: Path ) -> datetime.datetime: """Handle missing last modified time.""" if update_time is None: warning = create_warning( path=path, - error_message="File has an invalid timestamp. Passing epoch " - "time as timestamp.", + error_message="File has an invalid timestamp. Passing epoch time as timestamp.", skip_file=False, ) self.result_queue.put(warning) @@ -303,14 +289,11 @@ def should_ignore_file(self, path: Path) -> bool: warnings. """ - if not self.follow_symlinks: - if path.is_dir() and path.is_symlink(): - # is_symlink returns False if it does not exist - return True - warning_triggered = self.triggers_warning(path) - if warning_triggered: + if not self.follow_symlinks and path.is_dir() and path.is_symlink(): + # is_symlink returns False if it does not exist return True - return False + warning_triggered = self.triggers_warning(path) + return bool(warning_triggered) def triggers_warning(self, path: Path) -> bool: """Check the specific types and properties of a file. @@ -330,10 +313,7 @@ def triggers_warning(self, path: Path) -> bool: if is_special_file(path): warning = create_warning( path, - ( - "File is character special device, " - "block special device, FIFO, or socket." - ), + ("File is character special device, block special device, FIFO, or socket."), ) self.result_queue.put(warning) return True @@ -345,9 +325,7 @@ def triggers_warning(self, path: Path) -> bool: def list_objects( self, s3_path: str, dir_op: bool - ) -> Generator[ - Tuple[str, Union[HeadObjectOutputTypeDef, ObjectTypeDef]], None, None - ]: + ) -> Generator[tuple[str, HeadObjectOutputTypeDef | ObjectTypeDef], None, None]: """Yield the appropriate object or objects under a common prefix. It yields the file's source path, size, and last update. @@ -387,7 +365,7 @@ def list_objects( else: yield source_path, response_data - def _list_single_object(self, s3_path: str) -> Tuple[str, HeadObjectOutputTypeDef]: + def _list_single_object(self, s3_path: str) -> tuple[str, HeadObjectOutputTypeDef]: """List single object.""" # When we know we're dealing with a single object, we can avoid # a ListObjects operation (which causes concern for anyone setting @@ -401,7 +379,7 @@ def _list_single_object(self, s3_path: str) -> Tuple[str, HeadObjectOutputTypeDe return s3_path, {"Size": None, "LastModified": None} # type: ignore bucket, key = find_bucket_key(s3_path) try: - params: Dict[str, Any] = {"Bucket": bucket, "Key": key} + params: dict[str, Any] = {"Bucket": bucket, "Key": key} # params.update(self.request_parameters.get("HeadObject", {})) response = self._client.head_object(**params) except ClientError as exc: diff --git a/runway/core/providers/aws/s3/_helpers/file_info.py b/runway/core/providers/aws/s3/_helpers/file_info.py index a0823e07b..1d27d1da5 100644 --- a/runway/core/providers/aws/s3/_helpers/file_info.py +++ b/runway/core/providers/aws/s3/_helpers/file_info.py @@ -7,7 +7,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, Optional, Union +from typing import TYPE_CHECKING, Any from ......compat import cached_property from .utils import EPOCH_TIME @@ -35,18 +35,18 @@ def __init__( self, src: AnyPath, *, - client: Optional[S3Client] = None, - compare_key: Optional[str] = None, - dest_type: Optional[SupportedPathType] = None, - dest: Optional[AnyPath] = None, + client: S3Client | None = None, + compare_key: str | None = None, + dest_type: SupportedPathType | None = None, + dest: AnyPath | None = None, is_stream: bool = False, - last_update: Optional[datetime.datetime] = None, - operation_name: Optional[str] = None, - parameters: Optional[Dict[str, Any]] = None, - response_data: Optional[Union[HeadObjectOutputTypeDef, ObjectTypeDef]] = None, - size: Optional[int] = None, - source_client: Optional[S3Client] = None, - src_type: Optional[SupportedPathType] = None, + last_update: datetime.datetime | None = None, + operation_name: str | None = None, + parameters: dict[str, Any] | None = None, + response_data: HeadObjectOutputTypeDef | ObjectTypeDef | None = None, + size: int | None = None, + source_client: S3Client | None = None, + src_type: SupportedPathType | None = None, ) -> None: """Instantiate class. @@ -108,21 +108,18 @@ def is_glacier_compatible(self) -> bool: return True def _is_glacier_object( - self, response_data: Optional[Union[HeadObjectOutputTypeDef, ObjectTypeDef]] + self, response_data: HeadObjectOutputTypeDef | ObjectTypeDef | None ) -> bool: """Determine if a file info object is glacier compatible.""" glacier_storage_classes = ["GLACIER", "DEEP_ARCHIVE"] - if response_data: - if response_data.get( - "StorageClass" - ) in glacier_storage_classes and not self._is_restored(response_data): - return True - return False + return bool( + response_data + and response_data.get("StorageClass") in glacier_storage_classes + and not self._is_restored(response_data) + ) @staticmethod - def _is_restored( - response_data: Union[HeadObjectOutputTypeDef, ObjectTypeDef] - ) -> bool: + def _is_restored(response_data: HeadObjectOutputTypeDef | ObjectTypeDef) -> bool: """Return True is this is a glacier object that has been restored back to S3.""" # 'Restore' looks like: 'ongoing-request="false", expiry-date="..."' return 'ongoing-request="false"' in response_data.get("Restore", "") diff --git a/runway/core/providers/aws/s3/_helpers/file_info_builder.py b/runway/core/providers/aws/s3/_helpers/file_info_builder.py index e60907d00..279fe16b4 100644 --- a/runway/core/providers/aws/s3/_helpers/file_info_builder.py +++ b/runway/core/providers/aws/s3/_helpers/file_info_builder.py @@ -7,11 +7,13 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Generator, Iterable, Optional +from typing import TYPE_CHECKING, Any from .file_info import FileInfo if TYPE_CHECKING: + from collections.abc import Generator, Iterable + from mypy_boto3_s3.client import S3Client from .file_generator import FileStats @@ -26,8 +28,8 @@ def __init__( *, client: S3Client, is_stream: bool = False, - parameters: Optional[ParametersDataModel] = None, - source_client: Optional[Any] = None, + parameters: ParametersDataModel | None = None, + source_client: Any | None = None, ) -> None: """Instantiate class. @@ -61,7 +63,7 @@ def _inject_info(self, file_base: FileStats) -> FileInfo: """ delete_enabled = self._parameters.delete if self._parameters else False return FileInfo( - **file_base.dict(), + **file_base.dict(), # pyright: ignore[reportArgumentType] **( {"client": self._source_client, "source_client": self._client} if file_base.operation_name == "delete" and delete_enabled diff --git a/runway/core/providers/aws/s3/_helpers/filters.py b/runway/core/providers/aws/s3/_helpers/filters.py index 73e3cbe94..f26a60d66 100644 --- a/runway/core/providers/aws/s3/_helpers/filters.py +++ b/runway/core/providers/aws/s3/_helpers/filters.py @@ -13,15 +13,7 @@ from typing import ( TYPE_CHECKING, ClassVar, - Generator, - Iterable, - Iterator, - List, NamedTuple, - Optional, - Set, - Tuple, - cast, ) from typing_extensions import Literal @@ -29,6 +21,8 @@ from .utils import split_s3_bucket_key if TYPE_CHECKING: + from collections.abc import Generator, Iterable, Iterator + from .file_generator import FileStats from .parameters import ParametersDataModel @@ -36,14 +30,22 @@ _FilterType = Literal["exclude", "include"] -FileStatus = NamedTuple("FileStatus", [("file_stats", "FileStats"), ("include", bool)]) -FilterPattern = NamedTuple("FilterPattern", [("type", _FilterType), ("pattern", str)]) + + +class FileStatus(NamedTuple): + file_stats: FileStats + include: bool + + +class FilterPattern(NamedTuple): + type: _FilterType + pattern: str class Filter: """Universal exclude/include filter.""" - FILTER_TYPES: ClassVar[Tuple[_FilterType, ...]] = ( + FILTER_TYPES: ClassVar[tuple[_FilterType, ...]] = ( "exclude", "include", ) @@ -51,8 +53,8 @@ class Filter: def __init__( self, patterns: Iterable[FilterPattern], - src_rootdir: Optional[str], - dest_rootdir: Optional[str], + src_rootdir: str | None, + dest_rootdir: str | None, ) -> None: """Instantiate class. @@ -70,8 +72,8 @@ def __init__( @staticmethod def _full_path_patterns( - patterns: Iterable[FilterPattern], rootdir: Optional[str] - ) -> List[FilterPattern]: + patterns: Iterable[FilterPattern], rootdir: str | None + ) -> list[FilterPattern]: """Prefix each pattern with the root directory. Args: @@ -82,7 +84,9 @@ def _full_path_patterns( if rootdir: return sorted( # sort for consistency [ - FilterPattern(type=f.type, pattern=os.path.join(rootdir, f.pattern)) + FilterPattern( + type=f.type, pattern=os.path.join(rootdir, f.pattern) # noqa: PTH118 + ) for f in patterns ] ) @@ -119,9 +123,7 @@ def call(self, files: Iterator[FileStats]) -> Generator[FileStats, None, None]: yield file_stats @staticmethod - def _match_pattern( - filter_pattern: FilterPattern, file_stats: FileStats - ) -> Optional[FileStatus]: + def _match_pattern(filter_pattern: FilterPattern, file_stats: FileStats) -> FileStatus | None: """Match file to pattern. Args: @@ -155,12 +157,10 @@ def parse_params(cls, parameters: ParametersDataModel) -> Filter: """Parse parameters to create a Filter instance.""" if not (parameters.exclude or parameters.include): return Filter([], None, None) - filter_patterns: Set[FilterPattern] = set() + filter_patterns: set[FilterPattern] = set() for filter_type in cls.FILTER_TYPES: for pat in parameters[filter_type]: - filter_patterns.add( - FilterPattern(type=cast(_FilterType, filter_type), pattern=pat) - ) + filter_patterns.add(FilterPattern(type=filter_type, pattern=pat)) return Filter( filter_patterns, cls.parse_rootdir(parameters.src), @@ -190,8 +190,8 @@ def _parse_rootdir_local(path: str, dir_op: bool = True) -> str: """ if dir_op: - return os.path.abspath(path) - return os.path.abspath(os.path.dirname(path)) + return os.path.abspath(path) # noqa: PTH100 + return os.path.abspath(os.path.dirname(path)) # noqa: PTH100, PTH120 @staticmethod def _parse_rootdir_s3(path: str, dir_op: bool = True) -> str: @@ -205,4 +205,4 @@ def _parse_rootdir_s3(path: str, dir_op: bool = True) -> str: bucket, key = split_s3_bucket_key(path) if not (dir_op or key.endswith("/")): key = "/".join(key.split("/")[:-1]) - return "/".join([bucket, key]) + return f"{bucket}/{key}" diff --git a/runway/core/providers/aws/s3/_helpers/format_path.py b/runway/core/providers/aws/s3/_helpers/format_path.py index 818000849..cacdd021f 100644 --- a/runway/core/providers/aws/s3/_helpers/format_path.py +++ b/runway/core/providers/aws/s3/_helpers/format_path.py @@ -9,21 +9,22 @@ import os from pathlib import Path -from typing import Tuple from typing_extensions import Literal, TypedDict SupportedPathType = Literal["local", "s3"] -FormattedPathDetails = TypedDict( - "FormattedPathDetails", path=str, type=SupportedPathType -) -FormatPathResult = TypedDict( - "FormattedPaths", - dest=FormattedPathDetails, - dir_op=bool, - src=FormattedPathDetails, - use_src_name=bool, -) + + +class FormattedPathDetails(TypedDict): + path: str + type: SupportedPathType + + +class FormatPathResult(TypedDict): + dest: FormattedPathDetails + dir_op: bool + src: FormattedPathDetails + use_src_name: bool class FormatPath: @@ -47,7 +48,7 @@ def format(cls, src: str, dest: str) -> FormatPathResult: } @staticmethod - def format_local_path(path: str, dir_op: bool = True) -> Tuple[str, bool]: + def format_local_path(path: str, dir_op: bool = True) -> tuple[str, bool]: """Format the path of local files. Returns whether the destination will keep its own name or take the @@ -80,7 +81,7 @@ def format_local_path(path: str, dir_op: bool = True) -> Tuple[str, bool]: return str(full_path), False @staticmethod - def format_s3_path(path: str, dir_op: bool = True) -> Tuple[str, bool]: + def format_s3_path(path: str, dir_op: bool = True) -> tuple[str, bool]: """Format the path of S3 files. Returns whether the destination will keep its own name or take the @@ -110,7 +111,7 @@ def format_s3_path(path: str, dir_op: bool = True) -> Tuple[str, bool]: return path, False @staticmethod - def identify_path_type(path: str) -> Tuple[SupportedPathType, str]: + def identify_path_type(path: str) -> tuple[SupportedPathType, str]: """Parse path. Args: diff --git a/runway/core/providers/aws/s3/_helpers/parameters.py b/runway/core/providers/aws/s3/_helpers/parameters.py index 027d0b397..637fe139f 100644 --- a/runway/core/providers/aws/s3/_helpers/parameters.py +++ b/runway/core/providers/aws/s3/_helpers/parameters.py @@ -3,14 +3,17 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast -from pydantic import validator +from pydantic import field_validator, model_validator from typing_extensions import Literal from ......utils import BaseModel from .utils import find_bucket_key +if TYPE_CHECKING: + from typing_extensions import Self + PathsType = Literal["local", "locallocal", "locals3", "s3", "s3local", "s3s3"] @@ -49,46 +52,39 @@ class ParametersDataModel(BaseModel): dest: str src: str # these need to be set after dest & src so their validators can access the value if needed - content_type: Optional[str] = None + content_type: str | None = None delete: bool = False dir_op: bool = False dryrun: bool = False exact_timestamps: bool = False - exclude: List[str] = [] - expected_size: Optional[int] = None + exclude: list[str] = [] + expected_size: int | None = None follow_symlinks: bool = False force_glacier_transfer: bool = False guess_mime_type: bool = True ignore_glacier_warnings: bool = False - include: List[str] = [] + include: list[str] = [] is_move: bool = False is_stream: bool = False no_progress: bool = False only_show_errors: bool = False - page_size: Optional[int] = None + page_size: int | None = None paths_type: PathsType = "local" # will be overwritten quiet: bool = False size_only: bool = False - sse_c: Optional[str] = None - sse_c_key: Optional[str] = None - storage_class: Optional[str] = None + sse_c: str | None = None + sse_c_key: str | None = None + storage_class: str | None = None - @validator("paths_type", always=True, pre=True) - @classmethod - def _determine_paths_type( - cls, - v: Optional[str], # pylint: disable=unused-argument - values: Dict[str, Any], - ) -> PathsType: + @model_validator(mode="after") + def _determine_paths_type(self: Self) -> Self: """Determine paths type for the given src and dest.""" - # these have already been validated so it's "safe" to cast them - dest = cast(str, values.get("dest", "")) - src = cast(str, values.get("src", "")) - src_type = "s3" if src.startswith("s3://") else "local" - dest_type = "s3" if dest.startswith("s3://") else "local" - return cast(PathsType, f"{src_type}{dest_type}") - - @validator("dest", "src", pre=True) + src_type = "s3" if self.src.startswith("s3://") else "local" + dest_type = "s3" if self.dest.startswith("s3://") else "local" + self.paths_type = cast(PathsType, f"{src_type}{dest_type}") + return self + + @field_validator("dest", "src", mode="before") @classmethod def _normalize_s3_trailing_slash(cls, v: str) -> str: """Add a trailing "/" if the root of an S3 bucket was provided.""" @@ -102,9 +98,7 @@ def _normalize_s3_trailing_slash(cls, v: str) -> str: class Parameters: """Initial error based on the parameters and arguments passed to sync.""" - def __init__( - self, action: str, parameters: Union[Dict[str, Any], ParametersDataModel] - ): + def __init__(self, action: str, parameters: dict[str, Any] | ParametersDataModel) -> None: """Instantiate class. Args: @@ -113,7 +107,7 @@ def __init__( """ self.action = action - self.data = ParametersDataModel.parse_obj(parameters) + self.data = ParametersDataModel.model_validate(parameters) if self.action in ["sync", "mb", "rb"]: self.data.dir_op = True if self.action == "mv": @@ -139,8 +133,6 @@ def _validate_path_args(self) -> None: def _same_path(self) -> bool: """Evaluate if the src and dest are the same path.""" - if not self.data.paths_type == "s3s3": + if self.data.paths_type != "s3s3": return False - if self.data.src == self.data.dest: - return True - return False + return self.data.src == self.data.dest diff --git a/runway/core/providers/aws/s3/_helpers/results.py b/runway/core/providers/aws/s3/_helpers/results.py index 8ee70af9d..7f667ccc5 100644 --- a/runway/core/providers/aws/s3/_helpers/results.py +++ b/runway/core/providers/aws/s3/_helpers/results.py @@ -19,19 +19,13 @@ Any, Callable, ClassVar, - Dict, - List, NamedTuple, - Optional, TextIO, - Tuple, - Type, - Union, + TypedDict, cast, ) from s3transfer.exceptions import FatalError -from typing_extensions import Literal from ......utils import ensure_string from .utils import ( @@ -46,6 +40,7 @@ from types import TracebackType from s3transfer.futures import TransferFuture + from typing_extensions import Literal, Self, TypeAlias from ......_logging import RunwayLogger from ......type_defs import AnyPath @@ -59,53 +54,53 @@ class CommandResult(NamedTuple): num_tasks_failed: int num_tasks_warned: int - dest: Optional[str] = None - src: Optional[str] = None - transfer_type: Optional[str] = None + dest: str | None = None + src: str | None = None + transfer_type: str | None = None class CtrlCResult(NamedTuple): """Keyboard exit.""" exception: Exception - dest: Optional[str] = None - src: Optional[str] = None - transfer_type: Optional[str] = None + dest: str | None = None + src: str | None = None + transfer_type: str | None = None class DryRunResult(NamedTuple): """Dry run result.""" - dest: Optional[str] = None - src: Optional[str] = None - transfer_type: Optional[str] = None + dest: str | None = None + src: str | None = None + transfer_type: str | None = None class ErrorResult(NamedTuple): """Error.""" exception: BaseException - dest: Optional[str] = None - src: Optional[str] = None - transfer_type: Optional[str] = None + dest: str | None = None + src: str | None = None + transfer_type: str | None = None class FailureResult(NamedTuple): """Failure.""" exception: Exception - dest: Optional[str] = None - src: Optional[str] = None - transfer_type: Optional[str] = None + dest: str | None = None + src: str | None = None + transfer_type: str | None = None class FinalTotalSubmissionsResult(NamedTuple): """Final total submissions.""" total_submissions: int - dest: Optional[str] = None - src: Optional[str] = None - transfer_type: Optional[str] = None + dest: str | None = None + src: str | None = None + transfer_type: str | None = None class ProgressResult(NamedTuple): @@ -114,26 +109,26 @@ class ProgressResult(NamedTuple): bytes_transferred: int timestamp: float total_transfer_size: int - dest: Optional[str] = None - src: Optional[str] = None - transfer_type: Optional[str] = None + dest: str | None = None + src: str | None = None + transfer_type: str | None = None class SuccessResult(NamedTuple): """Success.""" - dest: Optional[str] = None - src: Optional[str] = None - transfer_type: Optional[str] = None + dest: str | None = None + src: str | None = None + transfer_type: str | None = None class QueuedResult(NamedTuple): """Queued.""" total_transfer_size: int - dest: Optional[str] = None - src: Optional[str] = None - transfer_type: Optional[str] = None + dest: str | None = None + src: str | None = None + transfer_type: str | None = None AllResultTypes = ( @@ -147,17 +142,23 @@ class QueuedResult(NamedTuple): SuccessResult, QueuedResult, ) -AnyResult = Union[ - CommandResult, - CtrlCResult, - DryRunResult, - ErrorResult, - FailureResult, - FinalTotalSubmissionsResult, - ProgressResult, - QueuedResult, - SuccessResult, -] +AnyResultType: TypeAlias = ( + "CommandResult | CtrlCResult | DryRunResult | ErrorResult | FailureResult | FinalTotalSubmissionsResult | ProgressResult | QueuedResult | SuccessResult" # noqa: E501 +) + + +class _ResultHandlerMappingTypedDict(TypedDict, total=False): + + CommandResult: Callable[..., None] + CtrlCResult: Callable[..., None] + DryRunResult: Callable[..., None] + ErrorResult: Callable[..., None] + FailureResult: Callable[..., None] + FinalTotalSubmissionsResult: Callable[..., None] + PrintTask: Callable[..., None] + ProgressResult: Callable[..., None] + QueuedResult: Callable[..., None] + SuccessResult: Callable[..., None] class ShutdownThreadRequest: @@ -167,11 +168,9 @@ class ShutdownThreadRequest: class BaseResultSubscriber(OnDoneFilteredSubscriber): """Base result subscriber.""" - TRANSFER_TYPE: ClassVar[Optional[str]] = None + TRANSFER_TYPE: ClassVar = None - def __init__( - self, result_queue: "queue.Queue[Any]", transfer_type: Optional[str] = None - ): + def __init__(self, result_queue: queue.Queue[Any], transfer_type: str | None = None) -> None: """Send result notifications during transfer process. Args: @@ -181,7 +180,7 @@ def __init__( """ self._result_queue = result_queue - self._result_kwargs_cache: Dict[str, Any] = {} + self._result_kwargs_cache: dict[str, Any] = {} self._transfer_type = transfer_type if transfer_type is None: self._transfer_type = self.TRANSFER_TYPE @@ -193,12 +192,10 @@ def on_queued(self, future: TransferFuture, **_: Any) -> None: queued_result = QueuedResult(**result_kwargs) self._result_queue.put(queued_result) - def on_progress( - self, future: TransferFuture, bytes_transferred: int, **_: Any - ) -> None: + def on_progress(self, future: TransferFuture, bytes_transferred: int, **_: Any) -> None: """On progress.""" - result_kwargs: Dict[str, Any] = self._result_kwargs_cache.get( - cast(str, future.meta.transfer_id), cast(Dict[str, Any], {}) + result_kwargs: dict[str, Any] = self._result_kwargs_cache.get( + cast(str, future.meta.transfer_id), cast("dict[str, Any]", {}) ) progress_result = ProgressResult( bytes_transferred=bytes_transferred, timestamp=time.time(), **result_kwargs @@ -232,17 +229,15 @@ def _add_to_result_kwargs_cache(self, future: TransferFuture) -> None: } self._result_kwargs_cache[cast(str, future.meta.transfer_id)] = result_kwargs - def _on_done_pop_from_result_kwargs_cache( - self, future: TransferFuture - ) -> Dict[str, Any]: + def _on_done_pop_from_result_kwargs_cache(self, future: TransferFuture) -> dict[str, Any]: """On done, pop from results cache.""" - result_kwargs: Dict[str, Any] = self._result_kwargs_cache.pop( + result_kwargs: dict[str, Any] = self._result_kwargs_cache.pop( cast(str, future.meta.transfer_id) ) result_kwargs.pop("total_transfer_size") return result_kwargs - def _get_src_dest(self, future: TransferFuture) -> Tuple[str, str]: + def _get_src_dest(self, future: TransferFuture) -> tuple[str, str]: """Get source destination.""" raise NotImplementedError("_get_src_dest()") @@ -252,7 +247,7 @@ class UploadResultSubscriber(BaseResultSubscriber): TRANSFER_TYPE: ClassVar[Literal["upload"]] = "upload" - def _get_src_dest(self, future: TransferFuture) -> Tuple[str, str]: + def _get_src_dest(self, future: TransferFuture) -> tuple[str, str]: call_args = future.meta.call_args src = self._get_src(call_args.fileobj) dest = "s3://" + call_args.bucket + "/" + call_args.key @@ -265,7 +260,7 @@ def _get_src(self, fileobj: AnyPath) -> str: class UploadStreamResultSubscriber(UploadResultSubscriber): """Upload stream result subscriber.""" - def _get_src(self, fileobj: AnyPath) -> str: + def _get_src(self, fileobj: AnyPath) -> str: # noqa: ARG002 return "-" @@ -274,7 +269,7 @@ class DownloadResultSubscriber(BaseResultSubscriber): TRANSFER_TYPE: ClassVar[Literal["download"]] = "download" - def _get_src_dest(self, future: TransferFuture) -> Tuple[str, str]: + def _get_src_dest(self, future: TransferFuture) -> tuple[str, str]: call_args = future.meta.call_args src = "s3://" + call_args.bucket + "/" + call_args.key dest = self._get_dest(call_args.fileobj) @@ -287,7 +282,7 @@ def _get_dest(self, fileobj: AnyPath) -> str: class DownloadStreamResultSubscriber(DownloadResultSubscriber): """Download stream result subscriber.""" - def _get_dest(self, fileobj: AnyPath) -> str: + def _get_dest(self, fileobj: AnyPath) -> str: # noqa: ARG002 return "-" @@ -296,7 +291,7 @@ class CopyResultSubscriber(BaseResultSubscriber): TRANSFER_TYPE: ClassVar[Literal["copy"]] = "copy" - def _get_src_dest(self, future: TransferFuture) -> Tuple[str, str]: + def _get_src_dest(self, future: TransferFuture) -> tuple[str, str]: call_args = future.meta.call_args copy_source = call_args.copy_source src = "s3://" + copy_source["Bucket"] + "/" + copy_source["Key"] @@ -309,7 +304,7 @@ class DeleteResultSubscriber(BaseResultSubscriber): TRANSFER_TYPE: ClassVar[Literal["delete"]] = "delete" - def _get_src_dest(self, future: TransferFuture) -> Tuple[str, None]: # type: ignore + def _get_src_dest(self, future: TransferFuture) -> tuple[str, None]: # type: ignore call_args = future.meta.call_args src = "s3://" + call_args.bucket + "/" + call_args.key return src, None @@ -326,7 +321,7 @@ def __call__(self, result: Any) -> None: class ResultRecorder(BaseResultHandler): """Record and track transfer statistics based on results received.""" - def __init__(self): + def __init__(self) -> None: """Instantiate class.""" self.bytes_transferred = 0 self.bytes_failed_to_transfer = 0 @@ -342,44 +337,42 @@ def __init__(self): self.bytes_transfer_speed = 0 self._ongoing_progress = defaultdict(int) - self._ongoing_total_sizes: Dict[str, int] = {} - - self._result_handler_map = { - QueuedResult: self._record_queued_result, - ProgressResult: self._record_progress_result, - SuccessResult: self._record_success_result, - FailureResult: self._record_failure_result, - PrintTask: self._record_warning_result, - ErrorResult: self._record_error_result, - CtrlCResult: self._record_error_result, - FinalTotalSubmissionsResult: self._record_final_expected_files, + self._ongoing_total_sizes: dict[str, int] = {} + + self._result_handler_map: _ResultHandlerMappingTypedDict = { + "CtrlCResult": self._record_error_result, + "ErrorResult": self._record_error_result, + "FailureResult": self._record_failure_result, + "FinalTotalSubmissionsResult": self._record_final_expected_files, + "PrintTask": self._record_warning_result, + "ProgressResult": self._record_progress_result, + "QueuedResult": self._record_queued_result, + "SuccessResult": self._record_success_result, } def expected_totals_are_final(self) -> bool: """Assess if expected totals are final.""" return self.final_expected_files_transferred == self.expected_files_transferred - def __call__(self, result: Any) -> None: + def __call__(self, result: AnyResultType | PrintTask) -> None: """Record the result of an individual Result object.""" - self._result_handler_map.get(type(result), self._record_noop)(result=result) + self._result_handler_map.get(type(result).__name__, self._record_noop)(result=result) @staticmethod - def _get_ongoing_dict_key(result: Union[AnyResult, object]) -> str: + def _get_ongoing_dict_key(result: AnyResultType | object) -> str: if not isinstance(result, AllResultTypes): raise TypeError( "Any result using _get_ongoing_dict_key must be one of " f"{', '.join(str(i) for i in AllResultTypes)}. " f"Provided result is of type: {type(result)}" ) - key_parts: List[str] = [] + key_parts: list[str] = [] for result_property in [result.transfer_type, result.src, result.dest]: if result_property is not None: - key_parts.append(ensure_string(result_property)) + key_parts.append(ensure_string(result_property)) # noqa: PERF401 return ":".join(key_parts) - def _pop_result_from_ongoing_dicts( - self, result: AnyResult - ) -> Tuple[int, Optional[int]]: + def _pop_result_from_ongoing_dicts(self, result: AnyResultType) -> tuple[int, int | None]: ongoing_key = self._get_ongoing_dict_key(result) total_progress = self._ongoing_progress.pop(ongoing_key, 0) total_file_size = self._ongoing_total_sizes.pop(ongoing_key, None) @@ -392,9 +385,7 @@ def _record_queued_result(self, result: QueuedResult, **_: Any) -> None: if self.start_time is None: self.start_time = time.time() total_transfer_size = result.total_transfer_size - self._ongoing_total_sizes[self._get_ongoing_dict_key(result)] = ( - total_transfer_size - ) + self._ongoing_total_sizes[self._get_ongoing_dict_key(result)] = total_transfer_size # The total transfer size can be None if we do not know the size # immediately so do not add to the total right away. if total_transfer_size: @@ -428,7 +419,9 @@ def _update_ongoing_transfer_size_if_unknown(self, result: ProgressResult) -> No # If the total size is no longer None that means we just learned # of the size so let's update the appropriate places with this # knowledge - if result.total_transfer_size is not None: + if ( + result.total_transfer_size is not None + ): # pyright: ignore[reportUnnecessaryComparison] self._ongoing_total_sizes[ongoing_key] = total_transfer_size # Figure out how many bytes have been unaccounted for as # the recorder has been keeping track of how many bytes @@ -442,11 +435,11 @@ def _update_ongoing_transfer_size_if_unknown(self, result: ProgressResult) -> No else: self.expected_bytes_transferred += result.bytes_transferred - def _record_success_result(self, result: AnyResult, **_: Any) -> None: + def _record_success_result(self, result: AnyResultType, **_: Any) -> None: self._pop_result_from_ongoing_dicts(result) self.files_transferred += 1 - def _record_failure_result(self, result: AnyResult, **_: Any) -> None: + def _record_failure_result(self, result: AnyResultType, **_: Any) -> None: """On failure, account for the failure in count for bytes transferred.""" total_progress, total_file_size = self._pop_result_from_ongoing_dicts(result) if total_file_size is not None: @@ -462,9 +455,7 @@ def _record_warning_result(self, **_: Any) -> None: def _record_error_result(self, **_: Any) -> None: self.errors += 1 - def _record_final_expected_files( - self, result: FinalTotalSubmissionsResult, **_: Any - ) -> None: + def _record_final_expected_files(self, result: FinalTotalSubmissionsResult, **_: Any) -> None: self.final_expected_files_transferred = result.total_submissions @@ -483,9 +474,7 @@ class ResultPrinter(BaseResultHandler): ) SUCCESS_FORMAT: ClassVar[str] = "{transfer_type}: {transfer_location}" DRY_RUN_FORMAT: ClassVar[str] = "(dryrun) " + SUCCESS_FORMAT - FAILURE_FORMAT: ClassVar[str] = ( - "{transfer_type} failed: {transfer_location} {exception}" - ) + FAILURE_FORMAT: ClassVar[str] = "{transfer_type} failed: {transfer_location} {exception}" WARNING_FORMAT: ClassVar[str] = "{message}" ERROR_FORMAT: ClassVar[str] = "fatal error: {exception}" CTRL_C_MSG: ClassVar[str] = "cancelled: ctrl-c received" @@ -497,9 +486,9 @@ def __init__( self, result_recorder: ResultRecorder, *, - out_file: Optional[TextIO] = None, - error_file: Optional[TextIO] = None, - ): + out_file: TextIO | None = None, + error_file: TextIO | None = None, + ) -> None: """Instantiate class. Args: @@ -518,20 +507,20 @@ def __init__( if self._error_file is None: self._error_file = sys.stderr self._progress_length = 0 - self._result_handler_map = { - ProgressResult: self._print_progress, - SuccessResult: self._print_success, - FailureResult: self._print_failure, - PrintTask: self._print_warning, - ErrorResult: self._print_error, - CtrlCResult: self._print_ctrl_c, - DryRunResult: self._print_dry_run, - FinalTotalSubmissionsResult: self._clear_progress_if_no_more_expected_transfers, + self._result_handler_map: _ResultHandlerMappingTypedDict = { + "CtrlCResult": self._print_ctrl_c, + "DryRunResult": self._print_dry_run, + "ErrorResult": self._print_error, + "FailureResult": self._print_failure, + "FinalTotalSubmissionsResult": self._clear_progress_if_no_more_expected_transfers, + "PrintTask": self._print_warning, + "ProgressResult": self._print_progress, + "SuccessResult": self._print_success, } - def __call__(self, result: Any) -> None: + def __call__(self, result: AnyResultType | PrintTask) -> None: """Print the progress of the ongoing transfer based on a result.""" - self._result_handler_map.get(type(result), self._print_noop)(result=result) + self._result_handler_map.get(type(result).__name__, self._print_noop)(result=result) def _print_noop(self, **_: Any) -> None: """If result does not have a handler, then do nothing with it.""" @@ -566,19 +555,15 @@ def _print_warning(self, result: Any, **_: Any) -> None: self._redisplay_progress() def _print_error(self, result: ErrorResult, **_: Any) -> None: - # pylint: disable=logging-format-interpolation LOGGER.error(self.ERROR_FORMAT.format(exception=result.exception)) - # pylint: disable=unused-argument - def _print_ctrl_c(self, result: CtrlCResult, **_: Any) -> None: + def _print_ctrl_c(self, result: CtrlCResult, **_: Any) -> None: # noqa: ARG002 LOGGER.warning(self.CTRL_C_MSG) - def _get_transfer_location(self, result: AnyResult) -> str: + def _get_transfer_location(self, result: AnyResultType) -> str: if result.dest is None: return self.SRC_TRANSFER_LOCATION_FORMAT.format(src=result.src) - return self.SRC_DEST_TRANSFER_LOCATION_FORMAT.format( - src=result.src, dest=result.dest - ) + return self.SRC_DEST_TRANSFER_LOCATION_FORMAT.format(src=result.src, dest=result.dest) def _redisplay_progress(self) -> None: # Reset to zero because done statements are printed with new lines @@ -611,8 +596,7 @@ def _print_progress(self, **_: Any) -> None: ) transfer_speed = ( - human_readable_size(self._result_recorder.bytes_transfer_speed) - or "0 Bytes" + human_readable_size(self._result_recorder.bytes_transfer_speed) or "0 Bytes" ) + "/s" progress_statement = self.BYTE_PROGRESS_FORMAT.format( bytes_completed=bytes_completed, @@ -632,9 +616,7 @@ def _print_progress(self, **_: Any) -> None: progress_statement += self._STILL_CALCULATING_TOTALS # Make sure that it overrides any previous progress bar. - progress_statement = self._adjust_statement_padding( - progress_statement, ending_char="\r" - ) + progress_statement = self._adjust_statement_padding(progress_statement, ending_char="\r") # We do not want to include the carriage return in this calculation # as progress length is used for determining whitespace padding. # So we subtract one off of the length. @@ -643,14 +625,12 @@ def _print_progress(self, **_: Any) -> None: # Print the progress out. self._print_to_out_file(progress_statement) - def _get_expected_total(self, expected_total: Optional[str]) -> Optional[str]: + def _get_expected_total(self, expected_total: str | None) -> str | None: if not self._result_recorder.expected_totals_are_final(): return self._ESTIMATED_EXPECTED_TOTAL.format(expected_total=expected_total) return expected_total - def _adjust_statement_padding( - self, print_statement: str, ending_char: str = "\n" - ) -> str: + def _adjust_statement_padding(self, print_statement: str, ending_char: str = "\n") -> str: print_statement = print_statement.ljust(self._progress_length, " ") return print_statement + ending_char @@ -695,8 +675,8 @@ class ResultProcessor(threading.Thread): def __init__( self, - result_queue: "queue.Queue[Any]", - result_handlers: Optional[List[Callable[..., Any]]] = None, + result_queue: queue.Queue[Any], + result_handlers: list[Callable[..., Any]] | None = None, ) -> None: """Instantiate class. @@ -734,11 +714,11 @@ def run(self) -> None: except queue.Empty: # cov: ignore pass - def _process_result(self, result: AnyResult) -> None: + def _process_result(self, result: AnyResultType) -> None: for result_handler in self._result_handlers: try: result_handler(result) - except Exception as exc: # pylint: disable=broad-except + except Exception as exc: # noqa: BLE001 LOGGER.debug( "Error processing result %s with handler %s: %s", result, @@ -758,7 +738,7 @@ class CommandResultRecorder: def __init__( self, - result_queue: "queue.Queue[Any]", + result_queue: queue.Queue[Any], result_recorder: ResultRecorder, result_processor: ResultProcessor, ) -> None: @@ -789,8 +769,7 @@ def shutdown(self) -> None: def get_command_result(self) -> CommandResult: """Get the CommandResult representing the result of a command.""" return CommandResult( - num_tasks_failed=self._result_recorder.files_failed - + self._result_recorder.errors, + num_tasks_failed=self._result_recorder.files_failed + self._result_recorder.errors, num_tasks_warned=self._result_recorder.files_warned, ) @@ -798,7 +777,7 @@ def notify_total_submissions(self, total: int) -> None: """Notify total submissions.""" self.result_queue.put(FinalTotalSubmissionsResult(total_submissions=total)) - def __enter__(self) -> CommandResultRecorder: + def __enter__(self) -> Self: """Enter the context manager. Returns: @@ -810,10 +789,10 @@ def __enter__(self) -> CommandResultRecorder: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> Optional[bool]: + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> bool | None: """Exit the context manager.""" if exc_type: LOGGER.debug( diff --git a/runway/core/providers/aws/s3/_helpers/s3handler.py b/runway/core/providers/aws/s3/_helpers/s3handler.py index 8666b6605..40b359c36 100644 --- a/runway/core/providers/aws/s3/_helpers/s3handler.py +++ b/runway/core/providers/aws/s3/_helpers/s3handler.py @@ -10,17 +10,12 @@ import logging import os import sys +from pathlib import Path from typing import ( TYPE_CHECKING, Any, Callable, ClassVar, - Dict, - Iterator, - List, - Optional, - Tuple, - Type, cast, ) @@ -41,7 +36,6 @@ ResultProcessor, ResultRecorder, SuccessResult, - Union, UploadResultSubscriber, UploadStreamResultSubscriber, ) @@ -66,6 +60,7 @@ ) if TYPE_CHECKING: + from collections.abc import Iterator from queue import Queue from mypy_boto3_s3.client import S3Client @@ -109,9 +104,7 @@ def __init__( self._config_params = config_params self._runtime_config = runtime_config - def __call__( - self, client: S3Client, result_queue: "Queue[Any]" - ) -> S3TransferHandler: + def __call__(self, client: S3Client, result_queue: Queue[Any]) -> S3TransferHandler: """Create a S3TransferHandler instance. Args: @@ -120,9 +113,7 @@ def __call__( for the S3TransferHandler. """ - transfer_config = create_transfer_config_from_runtime_config( - self._runtime_config - ) + transfer_config = create_transfer_config_from_runtime_config(self._runtime_config) transfer_config.max_in_memory_upload_chunks = self.MAX_IN_MEMORY_CHUNKS transfer_config.max_in_memory_download_chunks = self.MAX_IN_MEMORY_CHUNKS @@ -134,7 +125,7 @@ def __call__( transfer_config.multipart_chunksize, ) result_recorder = ResultRecorder() - result_processor_handlers: List[Any] = [result_recorder] + result_processor_handlers: list[Any] = [result_recorder] self._add_result_printer(result_recorder, result_processor_handlers) result_processor = ResultProcessor( result_queue=result_queue, result_handlers=result_processor_handlers @@ -154,20 +145,13 @@ def __call__( def _add_result_printer( self, result_recorder: ResultRecorder, - result_processor_handlers: List[ - Union[ - NoProgressResultPrinter, - OnlyShowErrorsResultPrinter, - ResultPrinter, - ResultRecorder, - ] + result_processor_handlers: list[ + NoProgressResultPrinter | OnlyShowErrorsResultPrinter | ResultPrinter | ResultRecorder ], ) -> None: if self._config_params.quiet: return - if self._config_params.only_show_errors: - result_printer = OnlyShowErrorsResultPrinter(result_recorder) - elif self._config_params.is_stream: + if self._config_params.only_show_errors or self._config_params.is_stream: result_printer = OnlyShowErrorsResultPrinter(result_recorder) elif self._config_params.no_progress: result_printer = NoProgressResultPrinter(result_recorder) @@ -225,18 +209,15 @@ def call(self, fileinfos: Iterator[FileInfo]) -> CommandResult: failures and warnings encountered. """ - with self._result_command_recorder: - with self._transfer_manager: - total_submissions = 0 - for fileinfo in fileinfos: - for submitter in self._submitters: - if submitter.can_submit(fileinfo): - if submitter.submit(fileinfo): - total_submissions += 1 - break - self._result_command_recorder.notify_total_submissions( - total_submissions - ) + with self._result_command_recorder, self._transfer_manager: + total_submissions = 0 + for fileinfo in fileinfos: + for submitter in self._submitters: + if submitter.can_submit(fileinfo): + if submitter.submit(fileinfo): + total_submissions += 1 + break + self._result_command_recorder.notify_total_submissions(total_submissions) return self._result_command_recorder.get_command_result() @@ -249,17 +230,15 @@ class BaseTransferRequestSubmitter: """ - REQUEST_MAPPER_METHOD: ClassVar[ - Optional[Callable[[Dict[Any, Any], Dict[Any, Any]], Any]] - ] = None - RESULT_SUBSCRIBER_CLASS: ClassVar[Optional[Type[BaseSubscriber]]] = None + REQUEST_MAPPER_METHOD: ClassVar[Callable[[dict[Any, Any], dict[Any, Any]], Any] | None] = None + RESULT_SUBSCRIBER_CLASS: ClassVar[type[BaseSubscriber] | None] = None def __init__( self, transfer_manager: TransferManager, - result_queue: "Queue[Any]", + result_queue: Queue[Any], config_params: ParametersDataModel, - ): + ) -> None: """Instantiate class. Args: @@ -273,7 +252,7 @@ def __init__( self._result_queue = result_queue self._config_params = config_params - def submit(self, fileinfo: FileInfo) -> Optional[TransferFuture]: + def submit(self, fileinfo: FileInfo) -> TransferFuture | None: """Submit a transfer request based on the FileInfo provided. There is no guarantee that the transfer request will be made on @@ -309,23 +288,21 @@ def can_submit(self, fileinfo: FileInfo) -> bool: """ raise NotImplementedError("can_submit()") - def _do_submit(self, fileinfo: FileInfo) -> Optional[TransferFuture]: + def _do_submit(self, fileinfo: FileInfo) -> TransferFuture | None: """Do submit.""" - extra_args: Dict[Any, Any] = {} + extra_args: dict[Any, Any] = {} if self.REQUEST_MAPPER_METHOD: - # pylint: disable=not-callable - # TODO revisit in future releases of pyright - not seeing second arg + # TODO (kyle): revisit in future releases of pyright - not seeing second arg self.REQUEST_MAPPER_METHOD(extra_args, self._config_params.dict()) # type: ignore - subscribers: List[BaseSubscriber] = [] + subscribers: list[BaseSubscriber] = [] self._add_additional_subscribers(subscribers, fileinfo) # The result subscriber class should always be the last registered # subscriber to ensure it is not missing any information that # may have been added in a different subscriber such as size. if self.RESULT_SUBSCRIBER_CLASS: - result_kwargs: Dict[str, Any] = {"result_queue": self._result_queue} + result_kwargs: dict[str, Any] = {"result_queue": self._result_queue} if self._config_params.is_move: result_kwargs["transfer_type"] = "move" - # pylint: disable=not-callable subscribers.append(self.RESULT_SUBSCRIBER_CLASS(**result_kwargs)) if not self._config_params.dryrun: @@ -338,36 +315,27 @@ def _submit_dryrun(self, fileinfo: FileInfo) -> None: if self._config_params.is_move: transfer_type = "move" src, dest = self._format_src_dest(fileinfo) - self._result_queue.put( - DryRunResult(transfer_type=transfer_type, src=src, dest=dest) - ) + self._result_queue.put(DryRunResult(transfer_type=transfer_type, src=src, dest=dest)) def _add_additional_subscribers( - self, subscribers: List[BaseSubscriber], fileinfo: FileInfo + self, subscribers: list[BaseSubscriber], fileinfo: FileInfo ) -> None: """Add additional subscribers.""" def _submit_transfer_request( self, fileinfo: FileInfo, - extra_args: Dict[str, Any], - subscribers: List[BaseSubscriber], + extra_args: dict[str, Any], + subscribers: list[BaseSubscriber], ) -> TransferFuture: """Submit transfer request.""" raise NotImplementedError("_submit_transfer_request()") def _warn_and_signal_if_skip(self, fileinfo: FileInfo) -> bool: """Warn and signal if skip.""" - for warning_handler in self._get_warning_handlers(): - if warning_handler(fileinfo): - # On the first warning handler that returns a signal to skip - # immediately propagate this signal and no longer check - # the other warning handlers as no matter what the file will - # be skipped. - return True - return False + return any(warning_handler(fileinfo) for warning_handler in self._get_warning_handlers()) - def _get_warning_handlers(self) -> List[Callable[[FileInfo], Any]]: + def _get_warning_handlers(self) -> list[Callable[[FileInfo], Any]]: """Return a list of warning handlers, which are callables. Handlers take in a single parameter representing a FileInfo. @@ -379,32 +347,28 @@ def _get_warning_handlers(self) -> List[Callable[[FileInfo], Any]]: def _should_inject_content_type(self) -> bool: """If should inject content type.""" - return bool( - self._config_params.guess_mime_type and not self._config_params.content_type - ) + return bool(self._config_params.guess_mime_type and not self._config_params.content_type) def _warn_glacier(self, fileinfo: FileInfo) -> bool: """Warn glacier.""" - if not self._config_params.force_glacier_transfer: - if not fileinfo.is_glacier_compatible: - LOGGER.debug( - "Encountered glacier object s3://%s. Not performing " - "%s on object.", - fileinfo.src, - fileinfo.operation_name, + if not self._config_params.force_glacier_transfer and not fileinfo.is_glacier_compatible: + LOGGER.debug( + "Encountered glacier object s3://%s. Not performing %s on object.", + fileinfo.src, + fileinfo.operation_name, + ) + if not self._config_params.ignore_glacier_warnings: + warning = create_warning( + f"s3://{fileinfo.src}", + "Object is of storage class GLACIER. Unable to " + f"perform {fileinfo.operation_name} operations on GLACIER objects. " + "You must restore the object to be able to perform the " + f"operation. See aws s3 {fileinfo.operation_name} help " + "for additional parameter options to ignore or force these " + "transfers.", ) - if not self._config_params.ignore_glacier_warnings: - warning = create_warning( - f"s3://{fileinfo.src}", - "Object is of storage class GLACIER. Unable to " - f"perform {fileinfo.operation_name} operations on GLACIER objects. " - "You must restore the object to be able to perform the " - f"operation. See aws s3 {fileinfo.operation_name} help " - "for additional parameter options to ignore or force these " - "transfers.", - ) - self._result_queue.put(warning) - return True + self._result_queue.put(warning) + return True return False def _warn_parent_reference(self, fileinfo: FileInfo) -> bool: @@ -418,24 +382,20 @@ def _warn_parent_reference(self, fileinfo: FileInfo) -> bool: else False ) if escapes_cwd: - warning = create_warning( - fileinfo.compare_key, "File references a parent directory." - ) + warning = create_warning(fileinfo.compare_key, "File references a parent directory.") self._result_queue.put(warning) return True return False - def _format_src_dest( - self, fileinfo: FileInfo - ) -> Tuple[Optional[str], Optional[str]]: + def _format_src_dest(self, fileinfo: FileInfo) -> tuple[str | None, str | None]: """Return formatted versions of a fileinfos source and destination.""" raise NotImplementedError("_format_src_dest()") - def _format_local_path(self, path: Optional[AnyPath]) -> Optional[str]: + def _format_local_path(self, path: AnyPath | None) -> str | None: """Format local path.""" return relative_path(path) - def _format_s3_path(self, path: Optional[AnyPath]) -> Optional[str]: + def _format_s3_path(self, path: AnyPath | None) -> str | None: """Format s3 path.""" if not path: return None @@ -448,12 +408,10 @@ def _format_s3_path(self, path: Optional[AnyPath]) -> Optional[str]: class UploadRequestSubmitter(BaseTransferRequestSubmitter): """Upload request submitter.""" - REQUEST_MAPPER_METHOD: ClassVar[Callable[[Dict[Any, Any], Dict[Any, Any]], Any]] = ( + REQUEST_MAPPER_METHOD: ClassVar[Callable[[dict[Any, Any], dict[Any, Any]], Any] | None] = ( RequestParamsMapper.map_put_object_params ) - RESULT_SUBSCRIBER_CLASS: ClassVar[Type[UploadResultSubscriber]] = ( - UploadResultSubscriber - ) + RESULT_SUBSCRIBER_CLASS: ClassVar[type[BaseSubscriber] | None] = UploadResultSubscriber def can_submit(self, fileinfo: FileInfo) -> bool: """Check whether it can submit a particular FileInfo. @@ -470,7 +428,7 @@ def can_submit(self, fileinfo: FileInfo) -> bool: return fileinfo.operation_name == "upload" def _add_additional_subscribers( - self, subscribers: List[BaseSubscriber], fileinfo: FileInfo + self, subscribers: list[BaseSubscriber], fileinfo: FileInfo ) -> None: """Add additional subscribers.""" subscribers.append(ProvideSizeSubscriber(fileinfo.size)) @@ -482,8 +440,8 @@ def _add_additional_subscribers( def _submit_transfer_request( self, fileinfo: FileInfo, - extra_args: Dict[str, Any], - subscribers: List[BaseSubscriber], + extra_args: dict[str, Any], + subscribers: list[BaseSubscriber], ) -> TransferFuture: """Submit transfer request.""" bucket, key = find_bucket_key(str(fileinfo.dest)) @@ -501,7 +459,7 @@ def _get_filein(fileinfo: FileInfo) -> str: """Get file in.""" return str(fileinfo.src) - def _get_warning_handlers(self) -> List[Callable[[FileInfo], Any]]: + def _get_warning_handlers(self) -> list[Callable[[FileInfo], Any]]: """Get warning handlers.""" return [self._warn_if_too_large] @@ -516,9 +474,7 @@ def _warn_if_too_large(self, fileinfo: FileInfo) -> None: warning = create_warning(file_path, warning_message, skip_file=False) self._result_queue.put(warning) - def _format_src_dest( - self, fileinfo: FileInfo - ) -> Tuple[Optional[str], Optional[str]]: + def _format_src_dest(self, fileinfo: FileInfo) -> tuple[str | None, str | None]: """Return formatted versions of a fileinfos source and destination.""" src = self._format_local_path(fileinfo.src) dest = self._format_s3_path(fileinfo.dest) @@ -528,12 +484,10 @@ def _format_src_dest( class DownloadRequestSubmitter(BaseTransferRequestSubmitter): """Download request submitter.""" - REQUEST_MAPPER_METHOD: ClassVar[Callable[[Dict[Any, Any], Dict[Any, Any]], Any]] = ( + REQUEST_MAPPER_METHOD: ClassVar[Callable[[dict[Any, Any], dict[Any, Any]], Any] | None] = ( RequestParamsMapper.map_get_object_params ) - RESULT_SUBSCRIBER_CLASS: ClassVar[Type[DownloadResultSubscriber]] = ( - DownloadResultSubscriber - ) + RESULT_SUBSCRIBER_CLASS: ClassVar[type[BaseSubscriber] | None] = DownloadResultSubscriber def can_submit(self, fileinfo: FileInfo) -> bool: """Check whether it can submit a particular FileInfo. @@ -550,7 +504,7 @@ def can_submit(self, fileinfo: FileInfo) -> bool: return fileinfo.operation_name == "download" def _add_additional_subscribers( - self, subscribers: List[BaseSubscriber], fileinfo: FileInfo + self, subscribers: list[BaseSubscriber], fileinfo: FileInfo ) -> None: """Add additional subscribers.""" subscribers.append(ProvideSizeSubscriber(fileinfo.size)) @@ -559,15 +513,13 @@ def _add_additional_subscribers( ProvideLastModifiedTimeSubscriber(fileinfo.last_update, self._result_queue) ) if self._config_params.is_move: - subscribers.append( - DeleteSourceObjectSubscriber(fileinfo.source_client) # type: ignore - ) + subscribers.append(DeleteSourceObjectSubscriber(fileinfo.source_client)) # type: ignore def _submit_transfer_request( self, fileinfo: FileInfo, - extra_args: Dict[str, Any], - subscribers: List[BaseSubscriber], + extra_args: dict[str, Any], + subscribers: list[BaseSubscriber], ) -> TransferFuture: """Submit transfer request.""" bucket, key = find_bucket_key(str(fileinfo.src)) @@ -584,13 +536,11 @@ def _get_fileout(fileinfo: FileInfo) -> str: """Get file out.""" return str(fileinfo.dest) - def _get_warning_handlers(self) -> List[Callable[[FileInfo], Any]]: + def _get_warning_handlers(self) -> list[Callable[[FileInfo], Any]]: """Get warning handlers.""" return [self._warn_glacier, self._warn_parent_reference] - def _format_src_dest( - self, fileinfo: FileInfo - ) -> Tuple[Optional[str], Optional[str]]: + def _format_src_dest(self, fileinfo: FileInfo) -> tuple[str | None, str | None]: """Return formatted versions of a fileinfos source and destination.""" src = self._format_s3_path(fileinfo.src) dest = self._format_local_path(fileinfo.dest) @@ -600,10 +550,10 @@ def _format_src_dest( class CopyRequestSubmitter(BaseTransferRequestSubmitter): """Copy request submitter.""" - REQUEST_MAPPER_METHOD: ClassVar[Callable[[Dict[Any, Any], Dict[Any, Any]], Any]] = ( + REQUEST_MAPPER_METHOD: ClassVar[Callable[[dict[Any, Any], dict[Any, Any]], Any] | None] = ( RequestParamsMapper.map_copy_object_params ) - RESULT_SUBSCRIBER_CLASS: ClassVar[Type[CopyResultSubscriber]] = CopyResultSubscriber + RESULT_SUBSCRIBER_CLASS: ClassVar[type[BaseSubscriber] | None] = CopyResultSubscriber def can_submit(self, fileinfo: FileInfo) -> bool: """Check whether it can submit a particular FileInfo. @@ -620,7 +570,7 @@ def can_submit(self, fileinfo: FileInfo) -> bool: return fileinfo.operation_name == "copy" def _add_additional_subscribers( - self, subscribers: List[BaseSubscriber], fileinfo: FileInfo + self, subscribers: list[BaseSubscriber], fileinfo: FileInfo ) -> None: """Add additional subscribers.""" subscribers.append(ProvideSizeSubscriber(fileinfo.size)) @@ -634,8 +584,8 @@ def _add_additional_subscribers( def _submit_transfer_request( self, fileinfo: FileInfo, - extra_args: Dict[str, Any], - subscribers: List[BaseSubscriber], + extra_args: dict[str, Any], + subscribers: list[BaseSubscriber], ) -> TransferFuture: """Submit transfer request.""" bucket, key = find_bucket_key(str(fileinfo.dest)) @@ -650,13 +600,11 @@ def _submit_transfer_request( source_client=cast("S3Client", fileinfo.source_client), ) - def _get_warning_handlers(self) -> List[Callable[[FileInfo], Any]]: + def _get_warning_handlers(self) -> list[Callable[[FileInfo], Any]]: """Get warning handlers.""" return [self._warn_glacier] - def _format_src_dest( - self, fileinfo: FileInfo - ) -> Tuple[Optional[str], Optional[str]]: + def _format_src_dest(self, fileinfo: FileInfo) -> tuple[str | None, str | None]: """Return formatted versions of a fileinfos source and destination.""" src = self._format_s3_path(fileinfo.src) dest = self._format_s3_path(fileinfo.dest) @@ -666,9 +614,7 @@ def _format_src_dest( class UploadStreamRequestSubmitter(UploadRequestSubmitter): """Upload stream request submitter.""" - RESULT_SUBSCRIBER_CLASS: ClassVar[Type[UploadStreamResultSubscriber]] = ( - UploadStreamResultSubscriber - ) + RESULT_SUBSCRIBER_CLASS: ClassVar[type[BaseSubscriber] | None] = UploadStreamResultSubscriber def can_submit(self, fileinfo: FileInfo) -> bool: """Check whether it can submit a particular FileInfo. @@ -682,12 +628,10 @@ def can_submit(self, fileinfo: FileInfo) -> bool: request to the underlying transfer manager. False, otherwise. """ - return bool( - fileinfo.operation_name == "upload" and self._config_params.is_stream - ) + return bool(fileinfo.operation_name == "upload" and self._config_params.is_stream) def _add_additional_subscribers( - self, subscribers: List[BaseSubscriber], fileinfo: FileInfo + self, subscribers: list[BaseSubscriber], fileinfo: FileInfo # noqa: ARG002 ) -> None: """Add additional subscribers.""" expected_size = self._config_params.expected_size @@ -695,13 +639,13 @@ def _add_additional_subscribers( subscribers.append(ProvideSizeSubscriber(int(expected_size))) @staticmethod - def _get_filein(fileinfo: FileInfo) -> NonSeekableStream: # type: ignore + def _get_filein(fileinfo: FileInfo) -> NonSeekableStream: # type: ignore # noqa: ARG004 """Get file in.""" if sys.stdin is None: - raise StdinMissingError() + raise StdinMissingError return NonSeekableStream(sys.stdin.buffer) - def _format_local_path(self, path: Optional[AnyPath]) -> str: + def _format_local_path(self, path: AnyPath | None) -> str: # noqa: ARG002 """Format local path.""" return "-" @@ -709,9 +653,7 @@ def _format_local_path(self, path: Optional[AnyPath]) -> str: class DownloadStreamRequestSubmitter(DownloadRequestSubmitter): """Download stream result subscriber.""" - RESULT_SUBSCRIBER_CLASS: ClassVar[Type[DownloadStreamResultSubscriber]] = ( - DownloadStreamResultSubscriber - ) + RESULT_SUBSCRIBER_CLASS: ClassVar[type[BaseSubscriber] | None] = DownloadStreamResultSubscriber def can_submit(self, fileinfo: FileInfo) -> bool: """Check whether it can submit a particular FileInfo. @@ -725,21 +667,19 @@ def can_submit(self, fileinfo: FileInfo) -> bool: request to the underlying transfer manager. False, otherwise. """ - return bool( - fileinfo.operation_name == "download" and self._config_params.is_stream - ) + return bool(fileinfo.operation_name == "download" and self._config_params.is_stream) def _add_additional_subscribers( - self, subscribers: List[BaseSubscriber], fileinfo: FileInfo + self, subscribers: list[BaseSubscriber], fileinfo: FileInfo ) -> None: """Add additional subscribers.""" @staticmethod - def _get_fileout(fileinfo: FileInfo) -> StdoutBytesWriter: # type: ignore + def _get_fileout(fileinfo: FileInfo) -> StdoutBytesWriter: # type: ignore # noqa: ARG004 """Get file out.""" return StdoutBytesWriter() - def _format_local_path(self, path: Optional[AnyPath]) -> str: + def _format_local_path(self, path: AnyPath | None) -> str: # noqa: ARG002 """Format local path.""" return "-" @@ -747,12 +687,10 @@ def _format_local_path(self, path: Optional[AnyPath]) -> str: class DeleteRequestSubmitter(BaseTransferRequestSubmitter): """Delete request submitter.""" - REQUEST_MAPPER_METHOD: ClassVar[Callable[[Dict[Any, Any], Dict[Any, Any]], Any]] = ( + REQUEST_MAPPER_METHOD: ClassVar[Callable[[dict[Any, Any], dict[Any, Any]], Any] | None] = ( RequestParamsMapper.map_delete_object_params ) - RESULT_SUBSCRIBER_CLASS: ClassVar[Type[DeleteResultSubscriber]] = ( - DeleteResultSubscriber - ) + RESULT_SUBSCRIBER_CLASS: ClassVar[type[BaseSubscriber] | None] = DeleteResultSubscriber def can_submit(self, fileinfo: FileInfo) -> bool: """Check whether it can submit a particular FileInfo. @@ -771,8 +709,8 @@ def can_submit(self, fileinfo: FileInfo) -> bool: def _submit_transfer_request( self, fileinfo: FileInfo, - extra_args: Dict[str, Any], - subscribers: List[BaseSubscriber], + extra_args: dict[str, Any], + subscribers: list[BaseSubscriber], ) -> TransferFuture: """Submit transfer request.""" bucket, key = find_bucket_key(str(fileinfo.src)) @@ -780,9 +718,7 @@ def _submit_transfer_request( bucket=bucket, key=key, extra_args=extra_args, subscribers=subscribers ) - def _format_src_dest( - self, fileinfo: FileInfo - ) -> Tuple[Optional[str], Optional[str]]: + def _format_src_dest(self, fileinfo: FileInfo) -> tuple[str | None, str | None]: """Return formatted versions of a fileinfos source and destination.""" return self._format_s3_path(fileinfo.src), None @@ -790,10 +726,8 @@ def _format_src_dest( class LocalDeleteRequestSubmitter(BaseTransferRequestSubmitter): """Local delete request submitter.""" - REQUEST_MAPPER_METHOD: ClassVar[ - Optional[Callable[[Dict[Any, Any], Dict[Any, Any]], Any]] - ] = None - RESULT_SUBSCRIBER_CLASS: ClassVar[Optional[Type[BaseSubscriber]]] = None + REQUEST_MAPPER_METHOD: ClassVar[Callable[[dict[Any, Any], dict[Any, Any]], Any] | None] = None + RESULT_SUBSCRIBER_CLASS: ClassVar[type[BaseSubscriber] | None] = None def can_submit(self, fileinfo: FileInfo) -> bool: """Check whether it can submit a particular FileInfo. @@ -812,8 +746,8 @@ def can_submit(self, fileinfo: FileInfo) -> bool: def _submit_transfer_request( # type: ignore self, fileinfo: FileInfo, - extra_args: Dict[str, Any], - subscribers: List[BaseSubscriber], + extra_args: dict[str, Any], # noqa: ARG002 + subscribers: list[BaseSubscriber], # noqa: ARG002 ) -> bool: """Submit transfer request. @@ -835,13 +769,11 @@ def _submit_transfer_request( # type: ignore result_kwargs = {"transfer_type": "delete", "src": src, "dest": dest} try: self._result_queue.put(QueuedResult(total_transfer_size=0, **result_kwargs)) - os.remove(fileinfo.src) + Path(fileinfo.src).unlink() self._result_queue.put(SuccessResult(**result_kwargs)) - except Exception as exc: # pylint: disable=broad-except + except Exception as exc: # noqa: BLE001 self._result_queue.put(FailureResult(exception=exc, **result_kwargs)) return True - def _format_src_dest( - self, fileinfo: FileInfo - ) -> Tuple[Optional[str], Optional[str]]: + def _format_src_dest(self, fileinfo: FileInfo) -> tuple[str | None, str | None]: return self._format_local_path(fileinfo.src), None diff --git a/runway/core/providers/aws/s3/_helpers/sync_strategy/base.py b/runway/core/providers/aws/s3/_helpers/sync_strategy/base.py index 9cfcff080..580091e1c 100644 --- a/runway/core/providers/aws/s3/_helpers/sync_strategy/base.py +++ b/runway/core/providers/aws/s3/_helpers/sync_strategy/base.py @@ -8,9 +8,9 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, ClassVar, List, Optional +from typing import TYPE_CHECKING, Any, ClassVar -from typing_extensions import Literal +from typing_extensions import Literal, Self if TYPE_CHECKING: from botocore.session import Session @@ -22,7 +22,7 @@ LOGGER = logging.getLogger(__name__.replace("._", ".")) ValidSyncType = Literal["file_at_src_and_dest", "file_not_at_dest", "file_not_at_src"] -VALID_SYNC_TYPES: List[ValidSyncType] = [ +VALID_SYNC_TYPES: list[ValidSyncType] = [ "file_at_src_and_dest", "file_not_at_dest", "file_not_at_src", @@ -36,7 +36,7 @@ class BaseSync: """ - NAME: ClassVar[Optional[str]] = None + NAME: ClassVar = None sync_type: ValidSyncType @@ -52,7 +52,7 @@ def __init__(self, sync_type: ValidSyncType = "file_at_src_and_dest") -> None: self.sync_type = sync_type @property - def name(self) -> Optional[str]: + def name(self) -> str | None: """Retrieve the ``name`` of the sync strategy's ``ARGUMENT``.""" return self.NAME @@ -68,7 +68,7 @@ def register_strategy(self, session: Session) -> None: session.register("choosing-s3-sync-strategy", self.use_sync_strategy) def determine_should_sync( - self, src_file: Optional[FileStats], dest_file: Optional[FileStats] + self, src_file: FileStats | None, dest_file: FileStats | None ) -> bool: """Determine if file should sync. @@ -104,7 +104,7 @@ def determine_should_sync( """ raise NotImplementedError("determine_should_sync") - def use_sync_strategy(self, params: ParametersDataModel, **_) -> Optional[BaseSync]: + def use_sync_strategy(self, params: ParametersDataModel, **_: Any) -> Self | None: """Determine which sync strategy to use. The sync strategy object must be returned by this method @@ -114,24 +114,19 @@ def use_sync_strategy(self, params: ParametersDataModel, **_) -> Optional[BaseSy params: All arguments that a sync strategy is able to process. """ - if self.name: - if params.get(self.name): - # Return the sync strategy object to be used for syncing. - return self + if self.name and params.get(self.name): + # Return the sync strategy object to be used for syncing. + return self return None @staticmethod - def compare_size( - src_file: Optional[FileStats], dest_file: Optional[FileStats] - ) -> bool: + def compare_size(src_file: FileStats | None, dest_file: FileStats | None) -> bool: """Compare the size of two FileStats objects.""" if not (src_file and dest_file): raise ValueError("src_file and dest_file must not be None") return src_file.size == dest_file.size - def compare_time( - self, src_file: Optional[FileStats], dest_file: Optional[FileStats] - ) -> bool: + def compare_time(self, src_file: FileStats | None, dest_file: FileStats | None) -> bool: """Compare modified time of two FileStats objects. Returns: @@ -145,22 +140,14 @@ def compare_time( delta = dest_file.last_update - src_file.last_update cmd = src_file.operation_name if cmd in ["copy", "upload"]: - if delta.total_seconds() >= 0: - # Destination is newer than source. - return True - return False - if cmd == "download": - if delta.total_seconds() <= 0: - return True - return False + return delta.total_seconds() >= 0 + return bool(cmd == "download" and delta.total_seconds() <= 0) class MissingFileSync(BaseSync): """File is missing from destination.""" - def __init__( - self, sync_type: Literal["file_not_at_dest"] = "file_not_at_dest" - ) -> None: + def __init__(self, sync_type: Literal["file_not_at_dest"] = "file_not_at_dest") -> None: """Instantiate class. Args: @@ -171,7 +158,7 @@ def __init__( super().__init__(sync_type) def determine_should_sync( - self, src_file: Optional[FileStats], dest_file: Optional[FileStats] + self, src_file: FileStats | None, dest_file: FileStats | None # noqa: ARG002 ) -> bool: """Determine if file should sync.""" LOGGER.debug( @@ -185,9 +172,7 @@ def determine_should_sync( class NeverSync(BaseSync): """Never sync file.""" - def __init__( - self, sync_type: Literal["file_not_at_src"] = "file_not_at_src" - ) -> None: + def __init__(self, sync_type: Literal["file_not_at_src"] = "file_not_at_src") -> None: """Instantiate class. Args: @@ -198,7 +183,7 @@ def __init__( super().__init__(sync_type) def determine_should_sync( - self, src_file: Optional[FileStats], dest_file: Optional[FileStats] + self, src_file: FileStats | None, dest_file: FileStats | None # noqa: ARG002 ) -> bool: """Determine if file should sync.""" return False @@ -208,7 +193,7 @@ class SizeAndLastModifiedSync(BaseSync): """Sync based on size and last modified date.""" def determine_should_sync( - self, src_file: Optional[FileStats], dest_file: Optional[FileStats] + self, src_file: FileStats | None, dest_file: FileStats | None ) -> bool: """Determine if file should sync.""" same_size = self.compare_size(src_file, dest_file) diff --git a/runway/core/providers/aws/s3/_helpers/sync_strategy/delete.py b/runway/core/providers/aws/s3/_helpers/sync_strategy/delete.py index bcc0656a7..a7ccd305b 100644 --- a/runway/core/providers/aws/s3/_helpers/sync_strategy/delete.py +++ b/runway/core/providers/aws/s3/_helpers/sync_strategy/delete.py @@ -8,13 +8,13 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, ClassVar, Optional - -from typing_extensions import Literal +from typing import TYPE_CHECKING, ClassVar from .base import BaseSync if TYPE_CHECKING: + from typing_extensions import Literal + from ..file_generator import FileStats @@ -27,7 +27,7 @@ class DeleteSync(BaseSync): NAME: ClassVar[Literal["delete"]] = "delete" def determine_should_sync( - self, src_file: Optional[FileStats], dest_file: Optional[FileStats] + self, src_file: FileStats | None, dest_file: FileStats | None # noqa: ARG002 ) -> bool: """Determine if file should sync.""" if dest_file: diff --git a/runway/core/providers/aws/s3/_helpers/sync_strategy/exact_timestamps.py b/runway/core/providers/aws/s3/_helpers/sync_strategy/exact_timestamps.py index 372c8a91f..75d7fc89f 100644 --- a/runway/core/providers/aws/s3/_helpers/sync_strategy/exact_timestamps.py +++ b/runway/core/providers/aws/s3/_helpers/sync_strategy/exact_timestamps.py @@ -8,13 +8,13 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, ClassVar, Optional - -from typing_extensions import Literal +from typing import TYPE_CHECKING, ClassVar from .base import SizeAndLastModifiedSync if TYPE_CHECKING: + from typing_extensions import Literal + from ..file_generator import FileStats LOGGER = logging.getLogger(__name__.replace("._", ".")) @@ -25,9 +25,7 @@ class ExactTimestampsSync(SizeAndLastModifiedSync): NAME: ClassVar[Literal["exact_timestamps"]] = "exact_timestamps" - def compare_time( - self, src_file: Optional[FileStats], dest_file: Optional[FileStats] - ) -> bool: + def compare_time(self, src_file: FileStats | None, dest_file: FileStats | None) -> bool: """Compare modified time of two FileStats objects. Returns: diff --git a/runway/core/providers/aws/s3/_helpers/sync_strategy/register.py b/runway/core/providers/aws/s3/_helpers/sync_strategy/register.py index 6a9af7a7c..cc359f51e 100644 --- a/runway/core/providers/aws/s3/_helpers/sync_strategy/register.py +++ b/runway/core/providers/aws/s3/_helpers/sync_strategy/register.py @@ -7,7 +7,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Type +from typing import TYPE_CHECKING, Any from .delete import DeleteSync from .exact_timestamps import ExactTimestampsSync @@ -21,9 +21,9 @@ def register_sync_strategy( session: Session, - strategy_cls: Type[BaseSync], + strategy_cls: type[BaseSync], sync_type: ValidSyncType = "file_at_src_and_dest", -): +) -> None: """Register a single sync strategy. Args: diff --git a/runway/core/providers/aws/s3/_helpers/sync_strategy/size_only.py b/runway/core/providers/aws/s3/_helpers/sync_strategy/size_only.py index c157b98f6..e8acafd7e 100644 --- a/runway/core/providers/aws/s3/_helpers/sync_strategy/size_only.py +++ b/runway/core/providers/aws/s3/_helpers/sync_strategy/size_only.py @@ -8,13 +8,13 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, ClassVar, Optional - -from typing_extensions import Literal +from typing import TYPE_CHECKING, ClassVar from .base import BaseSync if TYPE_CHECKING: + from typing_extensions import Literal + from ..file_generator import FileStats @@ -27,7 +27,7 @@ class SizeOnlySync(BaseSync): NAME: ClassVar[Literal["size_only"]] = "size_only" def determine_should_sync( - self, src_file: Optional[FileStats], dest_file: Optional[FileStats] + self, src_file: FileStats | None, dest_file: FileStats | None ) -> bool: """Determine if file should sync.""" same_size = self.compare_size(src_file, dest_file) diff --git a/runway/core/providers/aws/s3/_helpers/transfer_config.py b/runway/core/providers/aws/s3/_helpers/transfer_config.py index 63f57f47d..94316de32 100644 --- a/runway/core/providers/aws/s3/_helpers/transfer_config.py +++ b/runway/core/providers/aws/s3/_helpers/transfer_config.py @@ -7,24 +7,24 @@ from __future__ import annotations -from typing import Any, ClassVar, Dict, List, NoReturn, Optional, Union +from typing import Any, ClassVar, NoReturn from s3transfer.manager import TransferConfig from typing_extensions import TypedDict from .utils import human_readable_to_bytes + # If the user does not specify any overrides, # these are the default values we use for the s3 transfer # commands. -TransferConfigDict = TypedDict( - "TransferConfigDict", - max_bandwidth=Optional[Union[int, str]], - max_concurrent_requests=int, - max_queue_size=int, - multipart_chunksize=Union[int, str], - multipart_threshold=Union[int, str], -) +class TransferConfigDict(TypedDict): + max_bandwidth: int | str | None + max_concurrent_requests: int + max_queue_size: int + multipart_chunksize: int | str + multipart_threshold: int | str + DEFAULTS: TransferConfigDict = { "max_bandwidth": None, @@ -42,18 +42,18 @@ class InvalidConfigError(Exception): class RuntimeConfig: """Runtime configuration.""" - POSITIVE_INTEGERS: ClassVar[List[str]] = [ + POSITIVE_INTEGERS: ClassVar[list[str]] = [ "max_bandwidth", "max_concurrent_requests", "max_queue_size", "multipart_chunksize", "multipart_threshold", ] - HUMAN_READABLE_SIZES: ClassVar[List[str]] = [ + HUMAN_READABLE_SIZES: ClassVar[list[str]] = [ "multipart_chunksize", "multipart_threshold", ] - HUMAN_READABLE_RATES: ClassVar[List[str]] = ["max_bandwidth"] + HUMAN_READABLE_RATES: ClassVar[list[str]] = ["max_bandwidth"] @staticmethod def defaults() -> TransferConfigDict: @@ -64,11 +64,11 @@ def defaults() -> TransferConfigDict: def build_config( cls, *, - max_bandwidth: Optional[Union[int, str]] = None, - max_concurrent_requests: Optional[Union[int, str]] = None, - max_queue_size: Optional[Union[int, str]] = None, - multipart_chunksize: Optional[Union[int, str]] = None, - multipart_threshold: Optional[Union[int, str]] = None, + max_bandwidth: int | str | None = None, + max_concurrent_requests: int | str | None = None, + max_queue_size: int | str | None = None, + multipart_chunksize: int | str | None = None, + multipart_threshold: int | str | None = None, ) -> TransferConfigDict: """Create and convert a runtime config dictionary. @@ -85,9 +85,7 @@ def build_config( "multipart_chunksize": multipart_chunksize, "multipart_threshold": multipart_threshold, } - runtime_config.update( - {k: v for k, v in kwargs.items() if v is not None} # type: ignore - ) + runtime_config.update({k: v for k, v in kwargs.items() if v is not None}) # type: ignore cls._convert_human_readable_sizes(runtime_config) cls._convert_human_readable_rates(runtime_config) cls._validate_config(runtime_config) @@ -127,9 +125,7 @@ def _validate_config(cls, runtime_config: TransferConfigDict) -> None: @staticmethod def _error_positive_value(name: str, value: int) -> NoReturn: - raise InvalidConfigError( - f"Value for {name} must be a positive integer: {value}" - ) + raise InvalidConfigError(f"Value for {name} must be a positive integer: {value}") def create_transfer_config_from_runtime_config( @@ -151,7 +147,7 @@ def create_transfer_config_from_runtime_config( "multipart_chunksize": "multipart_chunksize", "multipart_threshold": "multipart_threshold", } - kwargs: Dict[str, Any] = {} + kwargs: dict[str, Any] = {} for key, value in runtime_config.items(): if key not in translation_map: continue diff --git a/runway/core/providers/aws/s3/_helpers/utils.py b/runway/core/providers/aws/s3/_helpers/utils.py index 85f81f57d..cac0c94f3 100644 --- a/runway/core/providers/aws/s3/_helpers/utils.py +++ b/runway/core/providers/aws/s3/_helpers/utils.py @@ -23,13 +23,8 @@ Any, BinaryIO, Callable, - Dict, - Generator, NamedTuple, - Optional, TextIO, - Tuple, - Union, overload, ) @@ -38,10 +33,11 @@ from s3transfer.subscribers import BaseSubscriber if TYPE_CHECKING: + from collections.abc import Generator from queue import Queue from mypy_boto3_s3.client import S3Client - from mypy_boto3_s3.type_defs import ObjectTypeDef + from mypy_boto3_s3.type_defs import DeleteObjectRequestRequestTypeDef, ObjectTypeDef from s3transfer.futures import TransferFuture from s3transfer.utils import CallArgs @@ -67,8 +63,7 @@ } _S3_ACCESSPOINT_TO_BUCKET_KEY_REGEX = re.compile( - r"^(?Parn:(aws).*:s3:[a-z\-0-9]+:[0-9]{12}:accesspoint[:/][^/]+)/?" - r"(?P.*)$" + r"^(?Parn:(aws).*:s3:[a-z\-0-9]+:[0-9]{12}:accesspoint[:/][^/]+)/?(?P.*)$" ) _S3_OUTPOST_TO_BUCKET_KEY_REGEX = re.compile( r"^(?Parn:(aws).*:s3-outposts:[a-z\-0-9]+:[0-9]{12}:outpost[/:]" @@ -93,7 +88,7 @@ def _get_filename(self, future: TransferFuture) -> str: raise NotImplementedError("_get_filename()") -def _date_parser(date_string: Union[datetime, str]) -> datetime: +def _date_parser(date_string: datetime | str) -> datetime: """Parse date string into a datetime object.""" if isinstance(date_string, datetime): return date_string @@ -106,7 +101,7 @@ class BucketLister: def __init__( self, client: S3Client, - date_parser: Callable[[Union[datetime, str]], datetime] = _date_parser, + date_parser: Callable[[datetime | str], datetime] = _date_parser, ) -> None: """Instantiate class. @@ -121,10 +116,10 @@ def __init__( def list_objects( self, bucket: str, - prefix: Optional[str] = None, - page_size: Optional[int] = None, + prefix: str | None = None, + page_size: int | None = None, extra_args: Any = None, - ) -> Generator[Tuple[str, ObjectTypeDef], None, None]: + ) -> Generator[tuple[str, ObjectTypeDef], None, None]: """List objects in S3 bucket. Args: @@ -163,7 +158,7 @@ def on_done(self, future: TransferFuture, **_: Any) -> None: """On done.""" try: future.result() - except Exception as exc: # pylint: disable=broad-except + except Exception as exc: # noqa: BLE001 self._on_failure(future, exc) else: self._on_success(future) @@ -181,7 +176,7 @@ class DeleteSourceSubscriber(OnDoneFilteredSubscriber): def _on_success(self, future: TransferFuture) -> None: try: self._delete_source(future) - except Exception as exc: # pylint: disable=broad-except + except Exception as exc: # noqa: BLE001 future.set_exception(exc) def _delete_source(self, future: TransferFuture) -> None: @@ -192,13 +187,13 @@ class DeleteSourceFileSubscriber(DeleteSourceSubscriber): """A subscriber which deletes a file.""" def _delete_source(self, future: TransferFuture) -> None: - os.remove(future.meta.call_args.fileobj) + Path(future.meta.call_args.fileobj).unlink() class DeleteSourceObjectSubscriber(DeleteSourceSubscriber): """A subscriber which deletes an object.""" - def __init__(self, client: S3Client): + def __init__(self, client: S3Client) -> None: """Instantiate class.""" self._client = client @@ -215,7 +210,7 @@ def _get_key(call_args: CallArgs) -> str: def _delete_source(self, future: TransferFuture) -> None: """Delete source.""" call_args = future.meta.call_args - delete_object_kwargs = { + delete_object_kwargs: DeleteObjectRequestRequestTypeDef = { "Bucket": self._get_bucket(call_args), "Key": self._get_key(call_args), } @@ -243,16 +238,15 @@ class CreateDirectoryError(Exception): class DirectoryCreatorSubscriber(BaseSubscriber): """Creates a directory to download if it does not exist.""" - def on_queued(self, future: TransferFuture, **_: Any): + def on_queued(self, future: TransferFuture, **_: Any) -> None: """On queued.""" - dirname = os.path.dirname(str(future.meta.call_args.fileobj)) + dirname = Path(future.meta.call_args.fileobj).parent try: - if not os.path.exists(dirname): - os.makedirs(dirname) + dirname.mkdir(exist_ok=True, parents=True) except OSError as exc: if exc.errno != errno.EEXIST: raise CreateDirectoryError( - f"Could not create directory {dirname}: {exc}" + f"Could not create directory {dirname.name}: {exc}" ) from exc @@ -275,11 +269,11 @@ class NonSeekableStream: """ - def __init__(self, fileobj: BinaryIO): + def __init__(self, fileobj: BinaryIO) -> None: """Instantiate class.""" self._fileobj = fileobj - def read(self, amt: Optional[int] = None) -> bytes: + def read(self, amt: int | None = None) -> bytes: """Read.""" if amt is None: return self._fileobj.read() @@ -300,7 +294,7 @@ class PrintTask(NamedTuple): message: str error: bool = False - total_parts: Optional[int] = None + total_parts: int | None = None warning: bool = False @@ -314,9 +308,7 @@ def _get_filename(self, future: TransferFuture) -> str: class ProvideLastModifiedTimeSubscriber(OnDoneFilteredSubscriber): """Sets utime for a downloaded file.""" - def __init__( - self, last_modified_time: datetime, result_queue: "Queue[Any]" - ) -> None: + def __init__(self, last_modified_time: datetime, result_queue: Queue[Any]) -> None: """Instantiate class.""" self._last_modified_time = last_modified_time self._result_queue = result_queue @@ -327,7 +319,7 @@ def _on_success(self, future: TransferFuture, **_: Any) -> None: last_update_tuple = self._last_modified_time.timetuple() mod_timestamp = time.mktime(last_update_tuple) set_file_utime(filename, int(mod_timestamp)) - except Exception as exc: # pylint: disable=broad-except + except Exception as exc: # noqa: BLE001 warning_message = ( f"Successfully Downloaded {filename} but was unable to update the " f"last modified time. {exc}" @@ -338,7 +330,7 @@ def _on_success(self, future: TransferFuture, **_: Any) -> None: class ProvideSizeSubscriber(BaseSubscriber): """A subscriber which provides the transfer size before it's queued.""" - def __init__(self, size: Optional[int]): + def __init__(self, size: int | None) -> None: """Instantiate class.""" self.size = size or 0 @@ -377,7 +369,7 @@ class RequestParamsMapper: @classmethod def map_copy_object_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Map config params to CopyObject request params. @@ -399,7 +391,7 @@ def map_copy_object_params( @classmethod def map_create_multipart_upload_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Map config params to CreateMultipartUpload request params. @@ -419,7 +411,7 @@ def map_create_multipart_upload_params( @classmethod def map_delete_object_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Map config params to DeleteObject request params. @@ -435,7 +427,7 @@ def map_delete_object_params( @classmethod def map_get_object_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Map config params to GetObject request params. @@ -452,7 +444,7 @@ def map_get_object_params( @classmethod def map_head_object_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Map config params to HeadObject request params. @@ -469,7 +461,7 @@ def map_head_object_params( @classmethod def map_list_objects_v2_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Map config params to DeleteObjectV2 request params. @@ -485,7 +477,7 @@ def map_list_objects_v2_params( @classmethod def map_put_object_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Map config params to PutObject request params. @@ -505,7 +497,7 @@ def map_put_object_params( @classmethod def map_upload_part_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Map config params to UploadPart request params. @@ -522,7 +514,7 @@ def map_upload_part_params( @classmethod def map_upload_part_copy_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Map config params to UploadPartCopy request params. @@ -538,11 +530,9 @@ def map_upload_part_copy_params( cls._set_request_payer_param(request_params, config_params) @classmethod - def _auto_populate_metadata_directive(cls, request_params: Dict[Any, Any]) -> None: + def _auto_populate_metadata_directive(cls, request_params: dict[Any, Any]) -> None: """Auto populate metadata directive.""" - if request_params.get("Metadata") and not request_params.get( - "MetadataDirective" - ): + if request_params.get("Metadata") and not request_params.get("MetadataDirective"): request_params["MetadataDirective"] = "REPLACE" @classmethod @@ -560,8 +550,8 @@ def _permission_to_param(cls, permission: str) -> str: @classmethod def _set_general_object_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] - ): + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] + ) -> None: """Set general object params. Parameters set in this method should be applicable to the following @@ -588,7 +578,7 @@ def _set_general_object_params( @classmethod def _set_grant_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Set grant params.""" if config_params.get("grants"): @@ -596,14 +586,12 @@ def _set_grant_params( try: permission, grantee = grant.split("=", 1) except ValueError: - raise ValueError( - "grants should be of the form permission=principal" - ) from None + raise ValueError("grants should be of the form permission=principal") from None request_params[cls._permission_to_param(permission)] = grantee @classmethod def _set_metadata_directive_param( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Set metadata directive param.""" if config_params.get("metadata_directive"): @@ -611,7 +599,7 @@ def _set_metadata_directive_param( @classmethod def _set_metadata_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Get metadata params.""" if config_params.get("metadata"): @@ -619,15 +607,15 @@ def _set_metadata_params( @classmethod def _set_request_payer_param( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] - ): + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] + ) -> None: """Set request payer param.""" if config_params.get("request_payer"): request_params["RequestPayer"] = config_params["request_payer"] @classmethod def _set_sse_c_and_copy_source_request_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Set SSE-C and copy source request params.""" cls._set_sse_c_request_params(request_params, config_params) @@ -635,19 +623,15 @@ def _set_sse_c_and_copy_source_request_params( @classmethod def _set_sse_c_copy_source_request_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: if config_params.get("sse_c_copy_source"): - request_params["CopySourceSSECustomerAlgorithm"] = config_params[ - "sse_c_copy_source" - ] - request_params["CopySourceSSECustomerKey"] = config_params[ - "sse_c_copy_source_key" - ] + request_params["CopySourceSSECustomerAlgorithm"] = config_params["sse_c_copy_source"] + request_params["CopySourceSSECustomerKey"] = config_params["sse_c_copy_source_key"] @classmethod def _set_sse_c_request_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Set SSE-C request params.""" if config_params.get("sse_c"): @@ -656,7 +640,7 @@ def _set_sse_c_request_params( @classmethod def _set_sse_request_params( - cls, request_params: Dict[Any, Any], config_params: Dict[Any, Any] + cls, request_params: dict[Any, Any], config_params: dict[Any, Any] ) -> None: """Set SSE request params.""" if config_params.get("sse"): @@ -668,7 +652,7 @@ def _set_sse_request_params( class StdoutBytesWriter: """Acts as a file-like object that performs the bytes_print function on write.""" - def __init__(self, stdout: Optional[TextIO] = None) -> None: + def __init__(self, stdout: TextIO | None = None) -> None: """Instantiate class.""" self._stdout = stdout @@ -692,9 +676,7 @@ def block_s3_object_lambda(s3_path: str) -> None: raise ValueError("S3 action does not support S3 Object Lambda resources") -def create_warning( - path: Optional[AnyPath], error_message: str, skip_file: bool = True -) -> PrintTask: +def create_warning(path: AnyPath | None, error_message: str, skip_file: bool = True) -> PrintTask: """Create a ``PrintTask`` for whenever a warning is to be thrown.""" print_string = "warning: " if skip_file: @@ -703,7 +685,7 @@ def create_warning( return PrintTask(message=print_string, error=False, warning=True) -def find_bucket_key(s3_path: str) -> Tuple[str, str]: +def find_bucket_key(s3_path: str) -> tuple[str, str]: """Given an S3 path return the bucket and the key represented by the S3 path. Args: @@ -726,8 +708,8 @@ def find_bucket_key(s3_path: str) -> Tuple[str, str]: def find_dest_path_comp_key( - files: FormatPathResult, src_path: Optional[AnyPath] = None -) -> Tuple[str, str]: + files: FormatPathResult, src_path: AnyPath | None = None +) -> tuple[str, str]: """Determine destination path and compare key. Args: @@ -742,17 +724,13 @@ def find_dest_path_comp_key( if src_path is None: src_path = src["path"] if isinstance(src_path, Path): # convert path to absolute path str - if src_path.is_dir(): - src_path = f"{src_path.resolve()}{os.sep}" - else: - src_path = str(src_path.resolve()) + src_path = f"{src_path.resolve()}{os.sep}" if src_path.is_dir() else str(src_path.resolve()) sep_table = {"s3": "/", "local": os.sep} - if files["dir_op"]: - rel_path = src_path[len(src["path"]) :] - else: - rel_path = src_path.split(sep_table[src_type])[-1] + rel_path = ( + src_path[len(src["path"]) :] if files["dir_op"] else src_path.split(sep_table[src_type])[-1] + ) compare_key = rel_path.replace(sep_table[src_type], "/") if files["use_src_name"]: dest_path = dest["path"] @@ -762,11 +740,11 @@ def find_dest_path_comp_key( return dest_path, compare_key -def get_file_stat(path: Path) -> Tuple[int, Optional[datetime]]: +def get_file_stat(path: Path) -> tuple[int, datetime | None]: """Get size of file in bytes and last modified time stamp.""" try: stats = path.stat() - except IOError as exc: + except OSError as exc: raise ValueError(f"Could not retrieve file stat of {path}: {exc}") from exc try: @@ -777,7 +755,7 @@ def get_file_stat(path: Path) -> Tuple[int, Optional[datetime]]: return stats.st_size, update_time -def guess_content_type(filename: AnyPath) -> Optional[str]: +def guess_content_type(filename: AnyPath) -> str | None: """Given a filename, guess it's content type. If the type cannot be guessed, a value of None is returned. @@ -794,7 +772,7 @@ def guess_content_type(filename: AnyPath) -> Optional[str]: return None -def human_readable_size(value: float) -> Optional[str]: +def human_readable_size(value: float) -> str | None: """Convert a size in bytes into a human readable format. For example:: @@ -843,11 +821,7 @@ def human_readable_to_bytes(value: str) -> int: """ value = value.lower() - if value[-2:] == "ib": - # Assume IEC suffix. - suffix = value[-3:].lower() - else: - suffix = value[-2:].lower() + suffix = value[-3:].lower() if value[-2:] == "ib" else value[-2:].lower() has_size_identifier = len(value) >= 2 and suffix in SIZE_SUFFIX if not has_size_identifier: try: @@ -868,14 +842,10 @@ def relative_path(filename: None, start: AnyPath = ...) -> None: ... @overload -def relative_path( - filename: Optional[AnyPath], start: AnyPath = ... -) -> Optional[str]: ... +def relative_path(filename: AnyPath | None, start: AnyPath = ...) -> str | None: ... -def relative_path( - filename: Optional[AnyPath], start: AnyPath = os.path.curdir -) -> Optional[str]: +def relative_path(filename: AnyPath | None, start: AnyPath = os.path.curdir) -> str | None: """Cross platform relative path of a filename. If no relative path can be calculated (i.e different @@ -888,16 +858,16 @@ def relative_path( try: dirname, basename = os.path.split(str(filename)) relative_dir = os.path.relpath(dirname, start) - return os.path.join(relative_dir, basename) + return os.path.join(relative_dir, basename) # noqa: PTH118 except ValueError: - return os.path.abspath(str(filename)) + return os.path.abspath(str(filename)) # noqa: PTH100 class SetFileUtimeError(Exception): """Set file update time error.""" -def set_file_utime(filename: AnyPath, desired_time: float): +def set_file_utime(filename: AnyPath, desired_time: float) -> None: """Set the utime of a file, and if it fails, raise a more explicit error. Args: @@ -921,7 +891,7 @@ def set_file_utime(filename: AnyPath, desired_time: float): ) from exc -def split_s3_bucket_key(s3_path: str) -> Tuple[str, str]: +def split_s3_bucket_key(s3_path: str) -> tuple[str, str]: """Split s3 path into bucket and key prefix. This will also handle the s3:// prefix. @@ -938,20 +908,19 @@ def split_s3_bucket_key(s3_path: str) -> Tuple[str, str]: return find_bucket_key(s3_path) -def uni_print(statement: str, out_file: Optional[TextIO] = None) -> None: +def uni_print(statement: str, out_file: TextIO | None = None) -> None: """Write unicode to a file, usually stdout or stderr. Ensures that the proper encoding is used if the statement is not a string type. """ - if out_file is None: - out_file = sys.stdout + out: TextIO | Any = sys.stdout if out_file is None else out_file try: - out_file.write(statement) + out.write(statement) except UnicodeEncodeError: - new_encoding = getattr(out_file, "encoding", "ascii") + new_encoding = getattr(out, "encoding", "ascii") if not new_encoding: new_encoding = "ascii" new_statement = statement.encode(new_encoding, "replace").decode(new_encoding) - out_file.write(new_statement) - out_file.flush() + out.write(new_statement) + out.flush() diff --git a/runway/core/providers/aws/s3/_sync_handler.py b/runway/core/providers/aws/s3/_sync_handler.py index 9f4d0b528..2f4c509c4 100644 --- a/runway/core/providers/aws/s3/_sync_handler.py +++ b/runway/core/providers/aws/s3/_sync_handler.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING from .....compat import cached_property from ._helpers.action_architecture import ActionArchitecture @@ -23,15 +23,15 @@ class S3SyncHandler: def __init__( self, - context: Union[CfnginContext, RunwayContext], + context: CfnginContext | RunwayContext, *, delete: bool = False, dest: str, - exclude: Optional[List[str]] = None, + exclude: list[str] | None = None, follow_symlinks: bool = False, - include: Optional[List[str]] = None, - page_size: Optional[int] = None, - session: Optional[boto3.Session] = None, + include: list[str] | None = None, + page_size: int | None = None, + session: boto3.Session | None = None, src: str, ) -> None: """Instantiate class. @@ -80,15 +80,17 @@ def client(self) -> S3Client: def transfer_config(self) -> TransferConfigDict: """Get runtime transfer config.""" return RuntimeConfig.build_config( - **self._botocore_session.get_scoped_config().get("s3", {}) + **self._botocore_session.get_scoped_config().get( # pyright: ignore[reportUnknownArgumentType] + "s3", {} + ) ) def run(self) -> None: """Run sync.""" - register_sync_strategies(self._botocore_session) # type: ignore + register_sync_strategies(self._botocore_session) ActionArchitecture( session=self._session, - botocore_session=self._botocore_session, # type: ignore + botocore_session=self._botocore_session, action="sync", parameters=self.parameters.data, runtime_config=self.transfer_config, diff --git a/runway/core/providers/aws/type_defs.py b/runway/core/providers/aws/type_defs.py index 4c44ad68f..c6efd8484 100644 --- a/runway/core/providers/aws/type_defs.py +++ b/runway/core/providers/aws/type_defs.py @@ -2,8 +2,6 @@ from __future__ import annotations -from typing import List - from typing_extensions import TypedDict @@ -14,4 +12,4 @@ class TagTypeDef(TypedDict): Value: str -TagSetTypeDef = List[TagTypeDef] +TagSetTypeDef = list[TagTypeDef] diff --git a/runway/dependency_managers/_pip.py b/runway/dependency_managers/_pip.py index 6d479c557..e7150f457 100644 --- a/runway/dependency_managers/_pip.py +++ b/runway/dependency_managers/_pip.py @@ -6,9 +6,7 @@ import re import subprocess from pathlib import Path -from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Tuple, Union, cast - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, ClassVar, cast from ..compat import cached_property, shlex_join from ..exceptions import RunwayError @@ -16,6 +14,8 @@ from .base_classes import DependencyManager if TYPE_CHECKING: + from collections.abc import Iterable + from _typeshed import StrPath from .._logging import RunwayLogger @@ -30,8 +30,7 @@ class PipInstallFailedError(RunwayError): def __init__(self, *args: Any, **kwargs: Any) -> None: """Instantiate class. All args/kwargs are passed to parent method.""" self.message = ( - "pip failed to install dependencies; " - "review pip's output above to troubleshoot" + "pip failed to install dependencies; review pip's output above to troubleshoot" ) super().__init__(*args, **kwargs) @@ -39,10 +38,10 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class Pip(DependencyManager): """pip CLI interface.""" - CONFIG_FILES: Final[Tuple[Literal["requirements.txt"]]] = ("requirements.txt",) + CONFIG_FILES: ClassVar[tuple[str, ...]] = ("requirements.txt",) """Configuration files used by pip.""" - EXECUTABLE: Final[Literal["pip"]] = "pip" + EXECUTABLE: ClassVar[str] = "pip" """CLI executable.""" @cached_property @@ -51,9 +50,7 @@ def python_version(self) -> Version: cmd_output = self._run_command([self.EXECUTABLE, "--version"]) match = re.search(r"^pip \S* from .+ \(python (?P\S*)\)$", cmd_output) if not match: - LOGGER.warning( - "unable to parse Python version from output:\n%s", cmd_output - ) + LOGGER.warning("unable to parse Python version from output:\n%s", cmd_output) return Version("0.0.0") return Version(match.group("version")) @@ -73,24 +70,23 @@ def dir_is_project(cls, directory: StrPath, **kwargs: Any) -> bool: Args: directory: Directory to check. + **kwargs: Arbitrary keyword arguments. """ kwargs.setdefault("file_name", cls.CONFIG_FILES[0]) requirements_txt = Path(directory) / kwargs["file_name"] - if requirements_txt.is_file(): - return True - return False + return bool(requirements_txt.is_file()) @classmethod def generate_install_command( cls, *, - cache_dir: Optional[StrPath] = None, + cache_dir: StrPath | None = None, no_cache_dir: bool = False, no_deps: bool = False, requirements: StrPath, target: StrPath, - ) -> List[str]: + ) -> list[str]: """Generate the command that when run will install dependencies. This method is exposed to easily format the command to be run by with @@ -118,8 +114,8 @@ def generate_install_command( def install( self, *, - cache_dir: Optional[StrPath] = None, - extend_args: Optional[List[str]] = None, + cache_dir: StrPath | None = None, + extend_args: list[str] | None = None, no_cache_dir: bool = False, no_deps: bool = False, requirements: StrPath, @@ -165,14 +161,15 @@ def install( @classmethod def generate_command( cls, - command: Union[List[str], str], - **kwargs: Optional[Union[bool, Iterable[str], str]], - ) -> List[str]: + command: list[str] | str, + **kwargs: bool | Iterable[str] | str | None, + ) -> list[str]: """Generate command to be executed and log it. Args: command: Command to run. args: Additional args to pass to the command. + **kwargs: Arbitrary keyword arguments. Returns: The full command to be passed into a subprocess. diff --git a/runway/dependency_managers/_pipenv.py b/runway/dependency_managers/_pipenv.py index 031f18452..17d03f439 100644 --- a/runway/dependency_managers/_pipenv.py +++ b/runway/dependency_managers/_pipenv.py @@ -7,9 +7,7 @@ import re import subprocess from pathlib import Path -from typing import TYPE_CHECKING, Any, Tuple - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, ClassVar from ..compat import cached_property from ..exceptions import RunwayError @@ -50,13 +48,13 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class Pipenv(DependencyManager): """Pipenv dependency manager.""" - CONFIG_FILES: Final[Tuple[Literal["Pipfile"], Literal["Pipfile.lock"]]] = ( + CONFIG_FILES: ClassVar[tuple[str, ...]] = ( "Pipfile", "Pipfile.lock", ) """Configuration files used by pipenv.""" - EXECUTABLE: Final[Literal["pipenv"]] = "pipenv" + EXECUTABLE: ClassVar[str] = "pipenv" """CLI executable.""" @cached_property @@ -65,9 +63,7 @@ def version(self) -> Version: cmd_output = self._run_command([self.EXECUTABLE, "--version"]) match = re.search(r"^pipenv, version (?P\S*)", cmd_output) if not match: - LOGGER.warning( - "unable to parse pipenv version from output:\n%s", cmd_output - ) + LOGGER.warning("unable to parse pipenv version from output:\n%s", cmd_output) return Version("0.0.0") return Version(match.group("version")) @@ -111,8 +107,5 @@ def export(self, *, dev: bool = False, output: StrPath) -> Path: except subprocess.CalledProcessError as exc: raise PipenvExportFailedError from exc output.parent.mkdir(exist_ok=True, parents=True) # ensure directory exists - # python3.7 w/ pylint 2.12.[12] crashes if result is not wrapped in str() - output.write_text( - str(result), encoding=locale.getpreferredencoding(do_setlocale=False) - ) + output.write_text(str(result), encoding=locale.getpreferredencoding(do_setlocale=False)) return output diff --git a/runway/dependency_managers/_poetry.py b/runway/dependency_managers/_poetry.py index ebe109fe6..d2b982716 100644 --- a/runway/dependency_managers/_poetry.py +++ b/runway/dependency_managers/_poetry.py @@ -6,10 +6,9 @@ import re import subprocess from pathlib import Path -from typing import TYPE_CHECKING, Any, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, ClassVar import tomli -from typing_extensions import Final, Literal from ..compat import cached_property from ..exceptions import RunwayError @@ -30,6 +29,8 @@ def __init__(self, output: str, *args: Any, **kwargs: Any) -> None: Args: output: The output from running ``poetry export``. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.message = f"poetry export failed with the following output:\n{output}" @@ -52,13 +53,13 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class Poetry(DependencyManager): """Poetry dependency manager.""" - CONFIG_FILES: Final[Tuple[Literal["poetry.lock"], Literal["pyproject.toml"]]] = ( + CONFIG_FILES: ClassVar[tuple[str, ...]] = ( "poetry.lock", "pyproject.toml", ) """Configuration files used by poetry.""" - EXECUTABLE: Final[Literal["poetry"]] = "poetry" + EXECUTABLE: ClassVar[str] = "poetry" """CLI executable.""" @cached_property @@ -67,9 +68,7 @@ def version(self) -> Version: cmd_output = self._run_command([self.EXECUTABLE, "--version"]) match = re.search(r"^Poetry version (?P\S*)", cmd_output) if not match: - LOGGER.warning( - "unable to parse poetry version from output:\n%s", cmd_output - ) + LOGGER.warning("unable to parse poetry version from output:\n%s", cmd_output) return Version("0.0.0") return Version(match.group("version")) @@ -88,9 +87,7 @@ def dir_is_project(cls, directory: StrPath, **__kwargs: Any) -> bool: # check for PEP-517 definition pyproject = tomli.loads(pyproject_path.read_text()) - build_system_requires: Optional[List[str]] = pyproject.get( - "build-system", {} - ).get("requires") + build_system_requires: list[str] | None = pyproject.get("build-system", {}).get("requires") if build_system_requires: for req in build_system_requires: @@ -103,7 +100,7 @@ def export( self, *, dev: bool = False, - extras: Optional[List[str]] = None, + extras: list[str] | None = None, output: StrPath, output_format: str = "requirements.txt", with_credentials: bool = True, diff --git a/runway/dependency_managers/base_classes.py b/runway/dependency_managers/base_classes.py index a40bc457e..40a7151d9 100644 --- a/runway/dependency_managers/base_classes.py +++ b/runway/dependency_managers/base_classes.py @@ -3,7 +3,7 @@ from __future__ import annotations from pathlib import Path -from typing import TYPE_CHECKING, Any, ClassVar, Tuple, Union +from typing import TYPE_CHECKING, Any, ClassVar from ..compat import cached_property from ..mixins import CliInterfaceMixin @@ -24,12 +24,10 @@ class DependencyManager(CliInterfaceMixin): """ - CONFIG_FILES: ClassVar[Tuple[str, ...]] + CONFIG_FILES: ClassVar[tuple[str, ...]] """Configuration files used by the dependency manager.""" - def __init__( - self, context: Union[CfnginContext, RunwayContext], cwd: StrPath - ) -> None: + def __init__(self, context: CfnginContext | RunwayContext, cwd: StrPath) -> None: """Instantiate class. Args: diff --git a/runway/env_mgr/__init__.py b/runway/env_mgr/__init__.py index be75f02fb..e06635f65 100644 --- a/runway/env_mgr/__init__.py +++ b/runway/env_mgr/__init__.py @@ -8,12 +8,13 @@ import shutil import sys from pathlib import Path -from typing import TYPE_CHECKING, Generator, Optional, Union, cast +from typing import TYPE_CHECKING, cast from ..compat import cached_property from ..mixins import DelCachedPropMixin if TYPE_CHECKING: + from collections.abc import Generator from urllib.error import URLError from .._logging import RunwayLogger @@ -67,13 +68,11 @@ class EnvManager(DelCachedPropMixin): _bin_name: str - current_version: Optional[str] + current_version: str | None env_dir_name: str path: Path - def __init__( - self, bin_name: str, dir_name: str, path: Optional[Path] = None - ) -> None: + def __init__(self, bin_name: str, dir_name: str, path: Path | None = None) -> None: """Initialize class. Args: @@ -85,10 +84,8 @@ def __init__( """ self._bin_name = bin_name + self.command_suffix self.current_version = None - self.env_dir_name = ( - dir_name if platform.system() == "Windows" else "." + dir_name - ) - self.path = Path.cwd() if not path else path + self.env_dir_name = dir_name if platform.system() == "Windows" else "." + dir_name + self.path = path if path else Path.cwd() @property def bin(self) -> Path: @@ -129,7 +126,7 @@ def versions_dir(self) -> Path: return self.env_dir / "versions" @cached_property - def version_file(self) -> Optional[Path]: + def version_file(self) -> Path | None: """Find and return a "" file if one is present. Returns: @@ -138,7 +135,7 @@ def version_file(self) -> Optional[Path]: """ raise NotImplementedError - def install(self, version_requested: Optional[str] = None) -> str: + def install(self, version_requested: str | None = None) -> str: """Ensure is installed.""" raise NotImplementedError @@ -146,7 +143,7 @@ def list_installed(self) -> Generator[Path, None, None]: """List installed versions of .""" raise NotImplementedError - def uninstall(self, version: Union[str, Version]) -> bool: + def uninstall(self, version: str | Version) -> bool: """Uninstall a version of the managed binary. Args: diff --git a/runway/env_mgr/kbenv.py b/runway/env_mgr/kbenv.py index bc90cd8a3..6ad1b28f1 100644 --- a/runway/env_mgr/kbenv.py +++ b/runway/env_mgr/kbenv.py @@ -11,12 +11,11 @@ import shutil import sys import tempfile -from typing import TYPE_CHECKING, Generator, Optional, cast +from typing import TYPE_CHECKING, Final, cast from urllib.error import URLError from urllib.request import urlretrieve import requests -from typing_extensions import Final from ..compat import cached_property from ..exceptions import KubectlVersionNotSpecified @@ -24,6 +23,7 @@ from . import EnvManager, handle_bin_download_error if TYPE_CHECKING: + from collections.abc import Generator from pathlib import Path from .._logging import RunwayLogger @@ -50,11 +50,11 @@ def verify_kb_release(kb_url: str, download_dir: str, filename: str) -> None: # the ridiculousness should be short-lived as md5 & sha1 support won't last # long. try: - hash_alg: "hashlib._Hash" = hashlib.sha512() + hash_alg: hashlib._Hash = hashlib.sha512() checksum_filename = filename + "." + hash_alg.name LOGGER.debug("attempting download of kubectl %s checksum...", hash_alg.name) download_request = requests.get( - kb_url + "/" + checksum_filename, allow_redirects=True + kb_url + "/" + checksum_filename, allow_redirects=True, timeout=30 ) download_request.raise_for_status() except requests.exceptions.HTTPError: @@ -63,42 +63,35 @@ def verify_kb_release(kb_url: str, download_dir: str, filename: str) -> None: checksum_filename = filename + "." + hash_alg.name LOGGER.debug("attempting download of kubectl %s checksum...", hash_alg.name) download_request = requests.get( - kb_url + "/" + checksum_filename, allow_redirects=True + kb_url + "/" + checksum_filename, allow_redirects=True, timeout=30 ) download_request.raise_for_status() except requests.exceptions.HTTPError: try: - hash_alg = hashlib.sha1() + hash_alg = hashlib.sha1() # noqa: S324 checksum_filename = filename + "." + hash_alg.name - LOGGER.debug( - "attempting download of kubectl %s checksum...", hash_alg.name - ) + LOGGER.debug("attempting download of kubectl %s checksum...", hash_alg.name) download_request = requests.get( - kb_url + "/" + checksum_filename, allow_redirects=True + kb_url + "/" + checksum_filename, allow_redirects=True, timeout=30 ) download_request.raise_for_status() except requests.exceptions.HTTPError: try: - hash_alg = hashlib.md5() + hash_alg = hashlib.md5() # noqa: S324 checksum_filename = filename + "." + hash_alg.name - LOGGER.debug( - "attempting download of kubectl %s checksum...", hash_alg.name - ) + LOGGER.debug("attempting download of kubectl %s checksum...", hash_alg.name) download_request = requests.get( - kb_url + "/" + checksum_filename, allow_redirects=True + kb_url + "/" + checksum_filename, allow_redirects=True, timeout=30 ) download_request.raise_for_status() except requests.exceptions.HTTPError: LOGGER.error("Unable to retrieve kubectl checksum file") sys.exit(1) - if sys.version_info < (3, 0): - kb_hash = download_request.content.rstrip("\n") - else: - kb_hash = download_request.content.decode().rstrip("\n") + kb_hash = download_request.content.decode().rstrip("\n") checksum = FileHash(hash_alg) - checksum.add_file(os.path.join(download_dir, filename)) + checksum.add_file(os.path.join(download_dir, filename)) # noqa: PTH118 if kb_hash != checksum.hexdigest: LOGGER.error( "downloaded kubectl %s does not match %s checksum %s", @@ -113,8 +106,8 @@ def verify_kb_release(kb_url: str, download_dir: str, filename: str) -> None: def download_kb_release( version: str, versions_dir: Path, - kb_platform: Optional[str] = None, - arch: Optional[str] = None, + kb_platform: str | None = None, + arch: str | None = None, ) -> None: """Download kubectl and return path to it.""" version_dir = versions_dir / version @@ -142,14 +135,16 @@ def download_kb_release( try: LOGGER.verbose("downloading kubectl from %s...", kb_url) - urlretrieve(kb_url + "/" + filename, os.path.join(download_dir, filename)) + urlretrieve( # noqa: S310 + kb_url + "/" + filename, os.path.join(download_dir, filename) # noqa: PTH118 + ) except URLError as exc: handle_bin_download_error(exc, "kubectl") verify_kb_release(kb_url, download_dir, filename) version_dir.mkdir(parents=True, exist_ok=True) - shutil.move(os.path.join(download_dir, filename), version_dir / filename) + shutil.move(os.path.join(download_dir, filename), version_dir / filename) # noqa: PTH118 shutil.rmtree(download_dir) result = version_dir / filename result.chmod(result.stat().st_mode | 0o0111) # ensure it is executable @@ -164,9 +159,7 @@ class KBEnvManager(EnvManager): VERSION_REGEX: Final[str] = r"^(v)?(?P[0-9]+\.[0-9]+\.[0-9]+\S*)" - def __init__( - self, path: Optional[Path] = None, *, overlay_path: Optional[Path] = None - ) -> None: + def __init__(self, path: Path | None = None, *, overlay_path: Path | None = None) -> None: """Initialize class. Args: @@ -178,7 +171,7 @@ def __init__( self.overlay_path = overlay_path @cached_property - def version(self) -> Optional[Version]: + def version(self) -> Version | None: """Terraform version.""" if not self.current_version: self.current_version = self.get_version_from_file() @@ -187,7 +180,7 @@ def version(self) -> Optional[Version]: return self.parse_version_string(self.current_version) @cached_property - def version_file(self) -> Optional[Path]: + def version_file(self) -> Path | None: """Find and return a ".kubectl-version" file if one is present. Returns: @@ -204,7 +197,7 @@ def version_file(self) -> Optional[Path]: return tmp_path return None - def get_version_from_file(self, file_path: Optional[Path] = None) -> Optional[str]: + def get_version_from_file(self, file_path: Path | None = None) -> str | None: """Get kubectl version from a file. Args: @@ -219,7 +212,7 @@ def get_version_from_file(self, file_path: Optional[Path] = None) -> Optional[st LOGGER.debug("file path not provided and version file could not be found") return None - def install(self, version_requested: Optional[str] = None) -> str: + def install(self, version_requested: str | None = None) -> str: """Ensure kubectl is available.""" if not version_requested: if self.version: @@ -237,9 +230,7 @@ def install(self, version_requested: Optional[str] = None) -> str: # Return early (i.e before reaching out to the internet) if the # matching version is already installed if (self.versions_dir / version_requested).is_dir(): - LOGGER.verbose( - "kubectl version %s already installed; using it...", version_requested - ) + LOGGER.verbose("kubectl version %s already installed; using it...", version_requested) self.current_version = version_requested return str(self.bin) @@ -285,7 +276,5 @@ def parse_version_string(cls, version: str) -> Version: """ match = re.search(cls.VERSION_REGEX, version) if not match: - raise ValueError( - f"provided version doesn't conform to regex: {cls.VERSION_REGEX}" - ) + raise ValueError(f"provided version doesn't conform to regex: {cls.VERSION_REGEX}") return Version(f"v{match.group('version')}") diff --git a/runway/env_mgr/tfenv.py b/runway/env_mgr/tfenv.py index 45ec70357..f5ad4efa8 100644 --- a/runway/env_mgr/tfenv.py +++ b/runway/env_mgr/tfenv.py @@ -14,17 +14,7 @@ import sys import tempfile import zipfile -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generator, - List, - Optional, - Union, - cast, - overload, -) +from typing import TYPE_CHECKING, Any, Final, cast, overload from urllib.error import URLError from urllib.request import urlretrieve @@ -32,7 +22,6 @@ import hcl2 import requests from packaging.version import InvalidVersion -from typing_extensions import Final from ..compat import cached_property from ..exceptions import HclParserError @@ -40,6 +29,7 @@ from . import EnvManager, handle_bin_download_error if TYPE_CHECKING: + from collections.abc import Generator from pathlib import Path from types import ModuleType @@ -53,8 +43,8 @@ def download_tf_release( version: str, versions_dir: Path, command_suffix: str, - tf_platform: Optional[str] = None, - arch: Optional[str] = None, + tf_platform: str | None = None, + arch: str | None = None, ) -> None: """Download Terraform archive and return path to it.""" version_dir = versions_dir / version @@ -64,19 +54,15 @@ def download_tf_release( if tf_platform: tfver_os = tf_platform + "_" + arch + elif platform.system().startswith("Darwin"): + tfver_os = f"darwin_{arch}" + elif platform.system().startswith("Windows") or ( + platform.system().startswith("MINGW64") + or (platform.system().startswith("MSYS_NT") or platform.system().startswith("CYGWIN_NT")) + ): + tfver_os = f"windows_{arch}" else: - if platform.system().startswith("Darwin"): - tfver_os = f"darwin_{arch}" - elif platform.system().startswith("Windows") or ( - platform.system().startswith("MINGW64") - or ( - platform.system().startswith("MSYS_NT") - or platform.system().startswith("CYGWIN_NT") - ) - ): - tfver_os = f"windows_{arch}" - else: - tfver_os = f"linux_{arch}" + tfver_os = f"linux_{arch}" download_dir = tempfile.mkdtemp() filename = f"terraform_{version}_{tfver_os}.zip" @@ -86,20 +72,20 @@ def download_tf_release( try: LOGGER.verbose("downloading Terraform from %s...", tf_url) for i in [filename, shasums_name]: - urlretrieve(tf_url + "/" + i, os.path.join(download_dir, i)) + urlretrieve(tf_url + "/" + i, os.path.join(download_dir, i)) # noqa: PTH118, S310 except URLError as exc: handle_bin_download_error(exc, "Terraform") - tf_hash = get_hash_for_filename(filename, os.path.join(download_dir, shasums_name)) + tf_hash = get_hash_for_filename( + filename, os.path.join(download_dir, shasums_name) # noqa: PTH118 + ) checksum = FileHash(hashlib.sha256()) - checksum.add_file(os.path.join(download_dir, filename)) + checksum.add_file(os.path.join(download_dir, filename)) # noqa: PTH118 if tf_hash != checksum.hexdigest: - LOGGER.error( - "downloaded Terraform %s does not match sha256 %s", filename, tf_hash - ) + LOGGER.error("downloaded Terraform %s does not match sha256 %s", filename, tf_hash) sys.exit(1) - with zipfile.ZipFile(os.path.join(download_dir, filename)) as tf_zipfile: + with zipfile.ZipFile(os.path.join(download_dir, filename)) as tf_zipfile: # noqa: PTH118 version_dir.mkdir(parents=True, exist_ok=True) tf_zipfile.extractall(str(version_dir)) @@ -108,10 +94,10 @@ def download_tf_release( result.chmod(result.stat().st_mode | 0o0111) # ensure it is executable -def get_available_tf_versions(include_prerelease: bool = False) -> List[str]: +def get_available_tf_versions(include_prerelease: bool = False) -> list[str]: """Return available Terraform versions.""" tf_releases = json.loads( - requests.get("https://releases.hashicorp.com/index.json").text + requests.get("https://releases.hashicorp.com/index.json", timeout=30).text )["terraform"] # Remove versions that don't align with @@ -139,22 +125,22 @@ def get_latest_tf_version(include_prerelease: bool = False) -> str: return get_available_tf_versions(include_prerelease)[0] -def load_terraform_module(parser: ModuleType, path: Path) -> Dict[str, Any]: +def load_terraform_module(parser: ModuleType, path: Path) -> dict[str, Any]: """Load all Terraform files in a module into one dict. Args: - parser (Union[hcl, hcl2]): Parser to use when loading files. + parser: Parser to use when loading files. path: Terraform module path. All Terraform files in the path will be loaded. """ - result: Dict[str, Any] = {} + result: dict[str, Any] = {} LOGGER.debug("using %s parser to load module: %s", parser.__name__.upper(), path) for tf_file in path.glob("*.tf"): try: - tf_config = parser.loads(tf_file.read_text()) # type: ignore - result = merge_dicts(result, cast(Dict[str, Any], tf_config)) - except Exception as exc: + tf_config = parser.loads(tf_file.read_text()) + result = merge_dicts(result, cast("dict[str, Any]", tf_config)) + except Exception as exc: # noqa: BLE001 raise HclParserError(exc, tf_file, parser) from None return result @@ -171,38 +157,36 @@ class TFEnvManager(EnvManager): r"^Terraform v(?P[0-9]*\.[0-9]*\.[0-9]*)(?P-.*)?" ) - def __init__(self, path: Optional[Path] = None) -> None: + def __init__(self, path: Path | None = None) -> None: """Initialize class.""" super().__init__("terraform", "tfenv", path) @cached_property - def backend(self) -> Dict[str, Any]: + def backend(self) -> dict[str, Any]: """Backend config of the Terraform module.""" # Terraform can only have one backend configured; this formats the # data to make it easier to work with - return [ + return next( {"type": k, "config": v} for k, v in self.terraform_block.get( - "backend", {None: cast(Dict[str, str], {})} + "backend", {None: cast("dict[str, str]", {})} ).items() - ][0] + ) @cached_property - def terraform_block(self) -> Dict[str, Any]: + def terraform_block(self) -> dict[str, Any]: # noqa: C901 """Collect Terraform configuration blocks from a Terraform module.""" @overload - def _flatten_lists(data: Dict[str, Any]) -> Dict[str, Any]: ... + def _flatten_lists(data: dict[str, Any]) -> dict[str, Any]: ... @overload - def _flatten_lists(data: List[Any]) -> List[Any]: ... + def _flatten_lists(data: list[Any]) -> list[Any]: ... @overload def _flatten_lists(data: str) -> str: ... - def _flatten_lists( - data: Union[Dict[str, Any], List[Any], Any] - ) -> Union[Dict[str, Any], Any]: + def _flatten_lists(data: dict[str, Any] | list[Any] | Any) -> dict[str, Any] | Any: """Flatten HCL2 list attributes until its fixed. python-hcl2 incorrectly turns all attributes into lists so we need @@ -216,28 +200,28 @@ def _flatten_lists( """ if not isinstance(data, dict): return data - copy_data = cast(Dict[str, Any], data.copy()) + copy_data = data.copy() for attr, val in copy_data.items(): if isinstance(val, list): - if len(cast(List[Any], val)) == 1: + if len(cast("list[Any]", val)) == 1: # pull single values out of lists data[attr] = _flatten_lists(cast(Any, val[0])) else: - data[attr] = [_flatten_lists(v) for v in cast(List[Any], val)] + data[attr] = [_flatten_lists(v) for v in cast("list[Any]", val)] elif isinstance(val, dict): - data[attr] = _flatten_lists(cast(Dict[str, Any], val)) + data[attr] = _flatten_lists(cast("dict[str, Any]", val)) return data try: - result: Union[Dict[str, Any], List[Dict[str, Any]]] = load_terraform_module( + result: dict[str, Any] | list[dict[str, Any]] = load_terraform_module( hcl2, self.path - ).get("terraform", cast(Dict[str, Any], {})) + ).get("terraform", cast("dict[str, Any]", {})) except HclParserError as exc: LOGGER.warning(exc) LOGGER.warning("failed to parse as HCL2; trying HCL...") try: result = load_terraform_module(hcl, self.path).get( - "terraform", cast(Dict[str, Any], {}) + "terraform", cast("dict[str, Any]", {}) ) except HclParserError as exc2: LOGGER.warning(exc2) @@ -251,7 +235,7 @@ def _flatten_lists( return _flatten_lists(result) @cached_property - def version(self) -> Optional[Version]: + def version(self) -> Version | None: """Terraform version.""" version_requested = self.current_version or self.get_version_from_file() @@ -263,9 +247,7 @@ def version(self) -> Optional[Version]: version_requested = self.get_min_required() if re.match(r"^latest:.*$", version_requested): - regex = re.search(r"latest:(.*)", version_requested).group( # type: ignore - 1 - ) + regex = re.search(r"latest:(.*)", version_requested).group(1) # type: ignore include_prerelease_versions = False elif re.match(r"^latest$", version_requested): regex = r"^[0-9]+\.[0-9]+\.[0-9]+$" @@ -292,7 +274,7 @@ def version(self) -> Optional[Version]: return self.parse_version_string(self.current_version) @cached_property - def version_file(self) -> Optional[Path]: + def version_file(self) -> Path | None: """Find and return a ".terraform-version" file if one is present. Returns: @@ -334,7 +316,7 @@ def get_min_required(self) -> str: ) sys.exit(1) - def get_version_from_file(self, file_path: Optional[Path] = None) -> Optional[str]: + def get_version_from_file(self, file_path: Path | None = None) -> str | None: """Get Terraform version from a file. Args: @@ -349,7 +331,7 @@ def get_version_from_file(self, file_path: Optional[Path] = None) -> Optional[st LOGGER.debug("file path not provided and version file could not be found") return None - def install(self, version_requested: Optional[str] = None) -> str: + def install(self, version_requested: str | None = None) -> str: """Ensure Terraform is available.""" if version_requested: self.set_version(version_requested) @@ -362,9 +344,7 @@ def install(self, version_requested: Optional[str] = None) -> str: # Now that a version has been selected, skip downloading if it's # already been downloaded if (self.versions_dir / str(self.version)).is_dir(): - LOGGER.verbose( - "Terraform version %s already installed; using it...", self.version - ) + LOGGER.verbose("Terraform version %s already installed; using it...", self.version) return str(self.bin) LOGGER.info("downloading and using Terraform version %s ...", self.version) @@ -400,11 +380,11 @@ def set_version(self, version: str) -> None: @classmethod def get_version_from_executable( cls, - bin_path: Union[Path, str], + bin_path: Path | str, *, - cwd: Optional[Union[Path, str]] = None, - env: Optional[Dict[str, str]] = None, - ) -> Optional[Version]: + cwd: Path | str | None = None, + env: dict[str, str] | None = None, + ) -> Version | None: """Get Terraform version from an executable. Args: @@ -413,9 +393,7 @@ def get_version_from_executable( env: Environment variable overrides. """ - output = subprocess.check_output( - [str(bin_path), "-version"], cwd=cwd, env=env - ).decode() + output = subprocess.check_output([str(bin_path), "-version"], cwd=cwd, env=env).decode() match = re.search(cls.VERSION_OUTPUT_REGEX, output) if not match: return None @@ -432,7 +410,5 @@ def parse_version_string(cls, version: str) -> Version: """ match = re.search(cls.VERSION_REGEX, version) if not match: - raise ValueError( - f"provided version doesn't conform to regex: {cls.VERSION_REGEX}" - ) + raise ValueError(f"provided version doesn't conform to regex: {cls.VERSION_REGEX}") return Version(match.group("version")) diff --git a/runway/exceptions.py b/runway/exceptions.py index 8984e06d5..ea674e2d4 100644 --- a/runway/exceptions.py +++ b/runway/exceptions.py @@ -2,12 +2,12 @@ from __future__ import annotations -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any from .utils import DOC_SITE if TYPE_CHECKING: + from pathlib import Path from types import ModuleType from .variables import ( @@ -35,11 +35,11 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class ConfigNotFound(RunwayError): """Configuration file could not be found.""" - looking_for: List[str] + looking_for: list[str] message: str path: Path - def __init__(self, *, looking_for: Optional[List[str]] = None, path: Path) -> None: + def __init__(self, *, looking_for: list[str] | None = None, path: Path) -> None: """Instantiate class. Args: @@ -51,10 +51,7 @@ def __init__(self, *, looking_for: Optional[List[str]] = None, path: Path) -> No self.path = path if looking_for: - self.message = ( - f"config file not found at path {path}; " - f"looking for one of {looking_for}" - ) + self.message = f"config file not found at path {path}; looking for one of {looking_for}" else: self.message = f"config file not found at path {path}" super().__init__(self.path, self.looking_for) @@ -92,7 +89,7 @@ class DockerExecFailedError(RunwayError): exit_code: int """The ``StatusCode`` returned by Docker.""" - def __init__(self, response: Dict[str, Any]) -> None: + def __init__(self, response: dict[str, Any]) -> None: """Instantiate class. Args: @@ -103,7 +100,7 @@ def __init__(self, response: Dict[str, Any]) -> None: """ self.exit_code = response.get("StatusCode", 1) # we can assume this will be > 0 - error = response.get("Error") or {} # value from dict could be NoneType + error: dict[Any, Any] = response.get("Error") or {} # value from dict could be NoneType self.message = error.get("Message", "error message undefined") super().__init__() @@ -130,6 +127,8 @@ def __init__( lookup: The variable value lookup that was attempted and resulted in an exception being raised. cause: The exception that was raised. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.cause = cause @@ -156,13 +155,14 @@ def __init__( Args: variable: The variable containing the failed lookup. lookup_error: The exception that was raised directly before this one. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.cause = lookup_error self.variable = variable self.message = ( - f'Could not resolve lookup "{lookup_error.lookup}" ' - f'for variable "{variable.name}"' + f'Could not resolve lookup "{lookup_error.lookup}" for variable "{variable.name}"' ) super().__init__(*args, **kwargs) @@ -175,8 +175,8 @@ class HclParserError(RunwayError): def __init__( self, exc: Exception, - file_path: Union[Path, str], - parser: Optional[ModuleType] = None, + file_path: Path | str, + parser: ModuleType | None = None, ) -> None: """Instantiate class. @@ -189,9 +189,7 @@ def __init__( self.reason = exc self.file_path = file_path if parser: - self.message = ( - f"Unable to parse {file_path} as {parser.__name__.upper()}\n\n{exc}" - ) + self.message = f"Unable to parse {file_path} as {parser.__name__.upper()}\n\n{exc}" else: self.message = f"Unable to parse {file_path}\n\n{exc}" super().__init__() @@ -273,6 +271,8 @@ def __init__(self, stack_name: str, output: str, *args: Any, **kwargs: Any) -> N Args: stack_name: Name of the stack. output: The output that does not exist. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.stack_name = stack_name @@ -321,6 +321,8 @@ def __init__(self, lookup: VariableValueLookup, *args: Any, **kwargs: Any) -> No Args: lookup: Variable value lookup that could not find a handler. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.message = f'Unknown lookup type "{lookup.lookup_name.value}" in "{lookup}"' @@ -337,11 +339,11 @@ def __init__(self, variable: Variable, *args: Any, **kwargs: Any) -> None: Args: variable: The unresolved variable. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ - self.message = ( - f'Attempted to use variable "{variable.name}" before it was resolved' - ) + self.message = f'Attempted to use variable "{variable.name}" before it was resolved' self.variable = variable super().__init__(*args, **kwargs) @@ -363,6 +365,8 @@ def __init__(self, lookup: VariableValueLookup, *args: Any, **kwargs: Any) -> No Args: lookup: The variable value lookup that is not resolved. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. """ self.lookup = lookup diff --git a/runway/lookups/handlers/base.py b/runway/lookups/handlers/base.py index 9238265c8..d651c08b5 100644 --- a/runway/lookups/handlers/base.py +++ b/runway/lookups/handlers/base.py @@ -4,18 +4,9 @@ import json import logging -from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - Dict, - Optional, - Sequence, - Set, - Tuple, - Union, - cast, -) +from abc import ABC, abstractmethod +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypedDict, TypeVar, cast, overload import yaml from troposphere import BaseAWSObject @@ -31,22 +22,43 @@ LOGGER = logging.getLogger(__name__) +ContextTypeVar = TypeVar( + "ContextTypeVar", "CfnginContext", "RunwayContext", "CfnginContext | RunwayContext" +) +"""Type variable for context type.""" + + TransformToTypeLiteral = Literal["bool", "str"] -def str2bool(v: str): +def str2bool(v: str) -> bool: """Return boolean value of string.""" return v.lower() in ("yes", "true", "t", "1", "on", "y") -class LookupHandler: +class ParsedArgsTypeDef(TypedDict, total=False): + """Partial type definition for the args returned by :meth:`LookupHandler.parse`. + + This class can be subclassed to model all expected arguments if needed. + + """ + + default: str + get: str + indent: str + load: Literal["json", "troposphere", "yaml"] + region: str + transform: TransformToTypeLiteral + + +class LookupHandler(ABC, Generic[ContextTypeVar]): """Base class for lookup handlers.""" TYPE_NAME: ClassVar[str] """Name that the Lookup is registered as.""" @classmethod - def dependencies(cls, __lookup_query: VariableValue) -> Set[str]: + def dependencies(cls, __lookup_query: VariableValue) -> set[str]: """Calculate any dependencies required to perform this lookup. Note that lookup_query may not be (completely) resolved at this time. @@ -58,9 +70,9 @@ def dependencies(cls, __lookup_query: VariableValue) -> Set[str]: def format_results( cls, value: Any, - get: Optional[str] = None, - load: Optional[str] = None, - transform: Optional[TransformToTypeLiteral] = None, + get: str | None = None, + load: str | None = None, + transform: TransformToTypeLiteral | None = None, **kwargs: Any, ) -> Any: """Format results to be returned by a lookup. @@ -72,6 +84,7 @@ def format_results( and ``transform`` method. transform: Convert the final value to a different data type before returning it. + **kwargs: Arbitrary keyword arguments. Raises: TypeError: If ``get`` is provided but the value value is not a @@ -95,9 +108,7 @@ def format_results( elif isinstance(value, dict): value = value.get(get) else: - raise TypeError( - f'value must be dict type to use "get"; got type "{type(value)}"' - ) + raise TypeError(f'value must be dict type to use "get"; got type "{type(value)}"') if ( isinstance(value, str) and value.lower() in ["none", "null"] @@ -111,19 +122,25 @@ def format_results( return value.data return value + @overload @classmethod + @abstractmethod def handle( - cls, - __value: str, - context: Union[CfnginContext, RunwayContext], - *__args: Any, - provider: Optional[Provider] = None, - **__kwargs: Any, - ) -> Any: + cls, value: str, context: ContextTypeVar, *, provider: Provider, **_kwargs: Any + ) -> Any: ... + + @overload + @classmethod + @abstractmethod + def handle(cls, value: str, context: ContextTypeVar, **_kwargs: Any) -> Any: ... + + @classmethod + @abstractmethod + def handle(cls, value: str, context: ContextTypeVar, **_kwargs: Any) -> Any: """Perform the lookup. Args: - __value: Parameter(s) given to the lookup. + value: Parameter(s) given to the lookup. context: The current context object. provider: CFNgin AWS provider. @@ -131,7 +148,7 @@ def handle( raise NotImplementedError @classmethod - def parse(cls, value: str) -> Tuple[str, Dict[str, str]]: + def parse(cls, value: str) -> tuple[str, ParsedArgsTypeDef]: """Parse the value passed to a lookup in a standardized way. Args: @@ -146,12 +163,12 @@ def parse(cls, value: str) -> Tuple[str, Dict[str, str]]: colon_split = raw_value.split("::", 1) query = colon_split.pop(0) - args: Dict[str, str] = cls._parse_args(colon_split[0]) if colon_split else {} + args = cls._parse_args(colon_split[0]) if colon_split else {} - return query, args + return query, cast(ParsedArgsTypeDef, args) @classmethod - def _parse_args(cls, args: str) -> Dict[str, str]: + def _parse_args(cls, args: str) -> dict[str, str]: """Convert a string into an args dict. Each arg should be separated by ``,``. The key and value should @@ -167,12 +184,11 @@ def _parse_args(cls, args: str) -> Dict[str, str]: """ split_args = args.split(",") return { - key.strip(): value.strip() - for key, value in [arg.split("=", 1) for arg in split_args] + key.strip(): value.strip() for key, value in [arg.split("=", 1) for arg in split_args] } @classmethod - def load(cls, value: Any, parser: Optional[str] = None, **kwargs: Any) -> Any: + def load(cls, value: Any, parser: str | None = None, **kwargs: Any) -> Any: """Load a formatted string or object into a python data type. First action taken in :meth:`format_results`. @@ -183,6 +199,7 @@ def load(cls, value: Any, parser: Optional[str] = None, **kwargs: Any) -> Any: Args: value: What is being loaded. parser: Name of the parser to use. + **kwargs: Arbitrary keyword arguments. Returns: The loaded value. @@ -257,7 +274,7 @@ def transform( cls, value: Any, *, - to_type: Optional[TransformToTypeLiteral] = "str", + to_type: TransformToTypeLiteral | None = "str", **kwargs: Any, ) -> Any: """Transform the result of a lookup into another datatype. @@ -270,6 +287,7 @@ def transform( Args: value: What is to be transformed. to_type: The type the value will be transformed into. + **kwargs: Arbitrary keyword arguments. Returns: The transformed value. @@ -280,7 +298,7 @@ def transform( if not to_type: return value - return mapping[to_type](value, **kwargs) # type: ignore + return mapping[to_type](value, **kwargs) @classmethod def _transform_to_bool(cls, value: Any, **_: Any) -> bool: @@ -325,9 +343,7 @@ def _transform_to_string( value = value.data if isinstance(value, dict): # dumped twice for an escaped json dict - return json.dumps( - json.dumps(cast(Dict[str, Any], value), indent=int(indent)) - ) + return json.dumps(json.dumps(cast("dict[str, Any]", value), indent=int(indent))) if isinstance(value, bool): return json.dumps(str(value)) return str(value) diff --git a/runway/lookups/handlers/cfn.py b/runway/lookups/handlers/cfn.py index 6a7de8594..7c9d65ff9 100644 --- a/runway/lookups/handlers/cfn.py +++ b/runway/lookups/handlers/cfn.py @@ -6,15 +6,13 @@ """ -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import json import logging -from typing import TYPE_CHECKING, Any, Dict, NamedTuple, Optional, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, cast from botocore.exceptions import ClientError -from typing_extensions import Final, Literal from ...cfngin.exceptions import StackDoesNotExist from ...exceptions import OutputDoesNotExist @@ -25,6 +23,7 @@ from ...cfngin.providers.aws.default import Provider from ...context import CfnginContext, RunwayContext + from .base import ParsedArgsTypeDef LOGGER = logging.getLogger(__name__) @@ -36,14 +35,14 @@ class OutputQuery(NamedTuple): output_name: str -class CfnLookup(LookupHandler): +class CfnLookup(LookupHandler["CfnginContext | RunwayContext"]): """CloudFormation Stack Output lookup.""" - TYPE_NAME: Final[Literal["cfn"]] = "cfn" + TYPE_NAME: ClassVar[str] = "cfn" """Name that the Lookup is registered as.""" @staticmethod - def should_use_provider(args: Dict[str, str], provider: Optional[Provider]) -> bool: + def should_use_provider(args: ParsedArgsTypeDef, provider: Provider | None) -> bool: """Determine if the provider should be used for the lookup. This will open happen when the lookup is used with CFNgin. @@ -54,7 +53,7 @@ def should_use_provider(args: Dict[str, str], provider: Optional[Provider]) -> b """ if provider: - if args.get("region") and provider.region != args["region"]: + if "region" in args and provider.region != args["region"]: LOGGER.debug("not using provider; requested region does not match") return False LOGGER.debug("using provider") @@ -81,12 +80,12 @@ def get_stack_output(client: CloudFormationClient, query: OutputQuery) -> str: return outputs[query.output_name] @classmethod - def handle( # pylint: disable=arguments-differ + def handle( cls, value: str, - context: Union[CfnginContext, RunwayContext], + context: CfnginContext | RunwayContext, *, - provider: Optional[Provider] = None, + provider: Provider | None = None, **_: Any, ) -> Any: """Retrieve a value from CloudFormation Stack outputs. @@ -117,13 +116,9 @@ def handle( # pylint: disable=arguments-differ # args for testing to function correctly if cls.should_use_provider(args.copy(), provider): # this will only happen when used from cfngin - result = cast("Provider", provider).get_output( - query.stack_name, query.output_name - ) + result = cast("Provider", provider).get_output(query.stack_name, query.output_name) else: - cfn_client = context.get_session(region=args.get("region")).client( - "cloudformation" - ) + cfn_client = context.get_session(region=args.get("region")).client("cloudformation") result = cls.get_stack_output(cfn_client, query) except (ClientError, KeyError, StackDoesNotExist) as exc: # StackDoesNotExist is only raised by provider diff --git a/runway/lookups/handlers/ecr.py b/runway/lookups/handlers/ecr.py index 7625c31c2..db471408e 100644 --- a/runway/lookups/handlers/ecr.py +++ b/runway/lookups/handlers/ecr.py @@ -4,9 +4,7 @@ import base64 import logging -from typing import TYPE_CHECKING, Any, Union - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, ClassVar from ...lookups.handlers.base import LookupHandler @@ -18,10 +16,10 @@ LOGGER = logging.getLogger(__name__) -class EcrLookup(LookupHandler): +class EcrLookup(LookupHandler["CfnginContext | RunwayContext"]): """ECR Lookup.""" - TYPE_NAME: Final[Literal["ecr"]] = "ecr" + TYPE_NAME: ClassVar[str] = "ecr" """Name that the Lookup is registered as.""" @staticmethod @@ -35,13 +33,7 @@ def get_login_password(client: ECRClient) -> str: return password @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *__args: Any, - **__kwargs: Any, - ) -> Any: + def handle(cls, value: str, context: CfnginContext | RunwayContext, **_kwargs: Any) -> Any: """Retrieve a value from AWS Elastic Container Registry (ECR). Args: diff --git a/runway/lookups/handlers/env.py b/runway/lookups/handlers/env.py index 1ae48e21d..d64fe0c9f 100644 --- a/runway/lookups/handlers/env.py +++ b/runway/lookups/handlers/env.py @@ -1,32 +1,24 @@ """Retrieve a value from an environment variable.""" -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations -from typing import TYPE_CHECKING, Any, Union - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, ClassVar from .base import LookupHandler if TYPE_CHECKING: + from ...context import CfnginContext, RunwayContext -class EnvLookup(LookupHandler): +class EnvLookup(LookupHandler["CfnginContext | RunwayContext"]): """Environment variable Lookup.""" - TYPE_NAME: Final[Literal["env"]] = "env" + TYPE_NAME: ClassVar[str] = "env" """Name that the Lookup is registered as.""" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *__args: Any, - **__kwargs: Any, - ) -> Any: + def handle(cls, value: str, context: CfnginContext | RunwayContext, **_kwargs: Any) -> Any: """Retrieve an environment variable. The value is retrieved from a copy of the current environment variables diff --git a/runway/lookups/handlers/random_string.py b/runway/lookups/handlers/random_string.py index 1b1372b67..151405109 100644 --- a/runway/lookups/handlers/random_string.py +++ b/runway/lookups/handlers/random_string.py @@ -1,20 +1,18 @@ """Generate a random string.""" -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import logging import secrets import string -from typing import TYPE_CHECKING, Any, Callable, List, Sequence, Union - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, Callable, ClassVar from ...utils import BaseModel from .base import LookupHandler if TYPE_CHECKING: - from ...context import CfnginContext, RunwayContext + from collections.abc import Sequence + LOGGER = logging.getLogger(__name__) @@ -28,10 +26,10 @@ class ArgsDataModel(BaseModel): uppercase: bool = True -class RandomStringLookup(LookupHandler): +class RandomStringLookup(LookupHandler[Any]): """Random string lookup.""" - TYPE_NAME: Final[Literal["random.string"]] = "random.string" + TYPE_NAME: ClassVar[str] = "random.string" """Name that the Lookup is registered as.""" @staticmethod @@ -83,7 +81,7 @@ def ensure_has_one_of(cls, args: ArgsDataModel, value: str) -> bool: value: Value to check. """ - checks: List[Callable[[str], bool]] = [] + checks: list[Callable[[str], bool]] = [] if args.digits: checks.append(cls.has_digit) if args.lowercase: @@ -95,18 +93,11 @@ def ensure_has_one_of(cls, args: ArgsDataModel, value: str) -> bool: return sum(c(value) for c in checks) == len(checks) @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *__args: Any, - **__kwargs: Any, - ) -> Any: + def handle(cls, value: str, *_args: Any, **_kwargs: Any) -> Any: """Generate a random string. Args: value: The value passed to the Lookup. - context: The current context object. Raises: ValueError: Unable to find a value for the provided query and @@ -115,7 +106,7 @@ def handle( # pylint: disable=arguments-differ """ raw_length, raw_args = cls.parse(value) length = int(raw_length) - args = ArgsDataModel.parse_obj(raw_args) + args = ArgsDataModel.model_validate(raw_args) char_set = cls.calculate_char_set(args) while True: result = cls.generate_random_string(char_set, length) diff --git a/runway/lookups/handlers/ssm.py b/runway/lookups/handlers/ssm.py index 07db5a1aa..8537db214 100644 --- a/runway/lookups/handlers/ssm.py +++ b/runway/lookups/handlers/ssm.py @@ -3,32 +3,26 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Union - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, ClassVar from ...lookups.handlers.base import LookupHandler if TYPE_CHECKING: + from mypy_boto3_ssm.type_defs import ParameterTypeDef + from ...context import CfnginContext, RunwayContext LOGGER = logging.getLogger(__name__) -class SsmLookup(LookupHandler): +class SsmLookup(LookupHandler["CfnginContext | RunwayContext"]): """SSM Parameter Store Lookup.""" - TYPE_NAME: Final[Literal["ssm"]] = "ssm" + TYPE_NAME: ClassVar[str] = "ssm" """Name that the Lookup is registered as.""" @classmethod - def handle( # pylint: disable=arguments-differ - cls, - value: str, - context: Union[CfnginContext, RunwayContext], - *__args: Any, - **__kwargs: Any, - ) -> Any: + def handle(cls, value: str, context: CfnginContext | RunwayContext, **_kwargs: Any) -> Any: """Retrieve a value from SSM Parameter Store. Args: @@ -46,14 +40,9 @@ def handle( # pylint: disable=arguments-differ client = session.client("ssm") try: - response = client.get_parameter(Name=query, WithDecryption=True)[ - "Parameter" - ] return cls.format_results( - ( - response["Value"].split(",") - if response["Type"] == "StringList" - else response["Value"] + cls._handle_get_parameter( + client.get_parameter(Name=query, WithDecryption=True)["Parameter"] ), **args, ) @@ -62,3 +51,13 @@ def handle( # pylint: disable=arguments-differ args.pop("load", None) # don't load a default value return cls.format_results(args.pop("default"), **args) raise + + @staticmethod + def _handle_get_parameter(parameter: ParameterTypeDef) -> list[str] | str | None: + """Handle the return value of ``get_parameter``.""" + if "Value" not in parameter: + return None + value = parameter["Value"] + if parameter.get("Type") == "StringList": + return value.split(",") + return value diff --git a/runway/lookups/handlers/var.py b/runway/lookups/handlers/var.py index 40cf0fe84..ee6f98c60 100644 --- a/runway/lookups/handlers/var.py +++ b/runway/lookups/handlers/var.py @@ -1,16 +1,14 @@ """Retrieve a variable from the variables file or definition.""" -# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any - -from typing_extensions import Final, Literal +from typing import TYPE_CHECKING, Any, ClassVar from .base import LookupHandler if TYPE_CHECKING: + from ...utils import MutableMap @@ -18,16 +16,14 @@ TYPE_NAME = "var" -class VarLookup(LookupHandler): +class VarLookup(LookupHandler[Any]): """Variable definition Lookup.""" - TYPE_NAME: Final[Literal["var"]] = "var" + TYPE_NAME: ClassVar[str] = "var" """Name that the Lookup is registered as.""" @classmethod - def handle( # pylint: disable=arguments-differ - cls, value: str, *__args: Any, variables: MutableMap, **__kwargs: Any - ) -> Any: + def handle(cls, value: str, *_args: Any, variables: MutableMap, **_kwargs: Any) -> Any: """Retrieve a variable from the variable definition. The value is retrieved from the variables passed to Runway using diff --git a/runway/lookups/registry.py b/runway/lookups/registry.py index d4b956f71..4103397ca 100644 --- a/runway/lookups/registry.py +++ b/runway/lookups/registry.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Dict, Type, Union, cast +from typing import Any, cast from ..utils import load_object_from_string from .handlers.base import LookupHandler @@ -14,12 +14,12 @@ from .handlers.ssm import SsmLookup from .handlers.var import VarLookup -RUNWAY_LOOKUP_HANDLERS: Dict[str, Type[LookupHandler]] = {} +RUNWAY_LOOKUP_HANDLERS: dict[str, type[LookupHandler[Any]]] = {} LOGGER = logging.getLogger(__name__) def register_lookup_handler( - lookup_type: str, handler_or_path: Union[str, Type[LookupHandler]] + lookup_type: str, handler_or_path: str | type[LookupHandler[Any]] ) -> None: """Register a lookup handler. @@ -39,7 +39,7 @@ def register_lookup_handler( if issubclass(handler, LookupHandler): RUNWAY_LOOKUP_HANDLERS[lookup_type] = handler return - except Exception: # pylint: disable=broad-except + except Exception: # noqa: BLE001 LOGGER.debug("failed to validate lookup handler", exc_info=True) raise TypeError( f"lookup {handler_or_path} must be a subclass of " diff --git a/runway/mixins.py b/runway/mixins.py index 2cb3a855d..79958ff74 100644 --- a/runway/mixins.py +++ b/runway/mixins.py @@ -6,26 +6,17 @@ import platform import shutil import subprocess +from collections.abc import Iterable from contextlib import suppress -from typing import ( - TYPE_CHECKING, - ClassVar, - Dict, - Iterable, - List, - Optional, - Union, - cast, - overload, -) - -from typing_extensions import Literal +from typing import TYPE_CHECKING, ClassVar, cast, overload from .compat import shlex_join if TYPE_CHECKING: from pathlib import Path + from typing_extensions import Literal + from ._logging import RunwayLogger from .context import CfnginContext, RunwayContext @@ -38,7 +29,7 @@ class CliInterfaceMixin: EXECUTABLE: ClassVar[str] """CLI executable.""" - ctx: Union[CfnginContext, RunwayContext] + ctx: CfnginContext | RunwayContext """CFNgin or Runway context object.""" cwd: Path @@ -52,21 +43,20 @@ def convert_to_cli_arg(arg_name: str, *, prefix: str = "--") -> str: @classmethod def found_in_path(cls) -> bool: """Determine if executable is found in $PATH.""" - if shutil.which(cls.EXECUTABLE): - return True - return False + return bool(shutil.which(cls.EXECUTABLE)) @classmethod def generate_command( cls, - command: Union[List[str], str], - **kwargs: Optional[Union[bool, Iterable[str], str]], - ) -> List[str]: + command: list[str] | str, + **kwargs: bool | Iterable[str] | str | None, + ) -> list[str]: """Generate command to be executed and log it. Args: command: Command to run. args: Additional args to pass to the command. + **kwargs: Arbitrary keyword arguments. Returns: The full command to be passed into a subprocess. @@ -79,10 +69,10 @@ def generate_command( @classmethod def _generate_command_handle_kwargs( - cls, **kwargs: Optional[Union[bool, Iterable[str], str]] - ) -> List[str]: + cls, **kwargs: bool | Iterable[str] | str | None + ) -> list[str]: """Handle kwargs passed to generate_command.""" - result: List[str] = [] + result: list[str] = [] for k, v in kwargs.items(): if isinstance(v, str): result.extend([cls.convert_to_cli_arg(k), v]) @@ -107,28 +97,28 @@ def list2cmdline(split_command: Iterable[str]) -> str: @overload def _run_command( self, - command: Union[Iterable[str], str], + command: Iterable[str] | str, *, - env: Optional[Dict[str, str]] = ..., + env: dict[str, str] | None = ..., suppress_output: Literal[True] = ..., ) -> str: ... @overload def _run_command( self, - command: Union[Iterable[str], str], + command: Iterable[str] | str, *, - env: Optional[Dict[str, str]] = ..., + env: dict[str, str] | None = ..., suppress_output: Literal[False] = ..., ) -> None: ... def _run_command( self, - command: Union[Iterable[str], str], + command: Iterable[str] | str, *, - env: Optional[Dict[str, str]] = None, + env: dict[str, str] | None = None, suppress_output: bool = True, - ) -> Optional[str]: + ) -> str | None: """Run command. Args: diff --git a/runway/module/base.py b/runway/module/base.py index a5cb0fe30..3f59f639d 100644 --- a/runway/module/base.py +++ b/runway/module/base.py @@ -4,40 +4,58 @@ import logging import subprocess -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast from ..exceptions import NpmNotFound from ..utils import which from .utils import NPM_BIN, format_npm_command_for_logging, use_npm_ci if TYPE_CHECKING: + from pathlib import Path + from .._logging import PrefixAdaptor, RunwayLogger from ..context import RunwayContext LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) -class RunwayModule: +class ModuleOptions: + """Base class for Runway module options.""" + + def get(self, name: str, default: Any = None) -> Any: + """Get a value or return the default.""" + return getattr(self, name, default) + + def __eq__(self, other: object) -> bool: + """Assess equality.""" + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + +_ModuleOptionsTypeVar = TypeVar("_ModuleOptionsTypeVar", bound="ModuleOptions | dict[str, Any]") + + +class RunwayModule(Generic[_ModuleOptionsTypeVar]): """Base class for Runway modules.""" ctx: RunwayContext - explicitly_enabled: Optional[bool] - logger: Union[PrefixAdaptor, RunwayLogger] + explicitly_enabled: bool | None + logger: PrefixAdaptor | RunwayLogger name: str - options: Union[Dict[str, Any], ModuleOptions] + options: _ModuleOptionsTypeVar region: str def __init__( self, context: RunwayContext, *, - explicitly_enabled: Optional[bool] = False, + explicitly_enabled: bool | None = False, logger: RunwayLogger = LOGGER, module_root: Path, - name: Optional[str] = None, - options: Optional[Union[Dict[str, Any], ModuleOptions]] = None, - parameters: Optional[Dict[str, Any]] = None, + name: str | None = None, + options: _ModuleOptionsTypeVar | None = None, + parameters: dict[str, Any] | None = None, **_: Any, ) -> None: """Instantiate class. @@ -92,19 +110,19 @@ def __getitem__(self, key: str) -> Any: return getattr(self, key) -class RunwayModuleNpm(RunwayModule): # pylint: disable=abstract-method +class RunwayModuleNpm(RunwayModule[_ModuleOptionsTypeVar]): """Base class for Runway modules that use npm.""" def __init__( self, context: RunwayContext, *, - explicitly_enabled: Optional[bool] = False, + explicitly_enabled: bool | None = False, logger: RunwayLogger = LOGGER, module_root: Path, - name: Optional[str] = None, - options: Optional[Union[Dict[str, Any], ModuleOptions]] = None, - parameters: Optional[Dict[str, Any]] = None, + name: str | None = None, + options: _ModuleOptionsTypeVar | None = None, + parameters: dict[str, Any] | None = None, **_: Any, ) -> None: """Instantiate class. @@ -136,7 +154,7 @@ def __init__( self.check_for_npm(logger=self.logger) # fail fast self.warn_on_boto_env_vars(self.ctx.env.vars, logger=logger) - def log_npm_command(self, command: List[str]) -> None: + def log_npm_command(self, command: list[str]) -> None: """Log an npm command that is going to be run. Args: @@ -174,9 +192,7 @@ def package_json_missing(self) -> bool: return False @staticmethod - def check_for_npm( - *, logger: Union[logging.Logger, PrefixAdaptor, RunwayLogger] = LOGGER - ) -> None: + def check_for_npm(*, logger: logging.Logger | PrefixAdaptor | RunwayLogger = LOGGER) -> None: """Ensure npm is installed and in the current path. Args: @@ -192,9 +208,9 @@ def check_for_npm( @staticmethod def warn_on_boto_env_vars( - env_vars: Dict[str, str], + env_vars: dict[str, str], *, - logger: Union[logging.Logger, PrefixAdaptor, RunwayLogger] = LOGGER, + logger: logging.Logger | PrefixAdaptor | RunwayLogger = LOGGER, ) -> None: """Inform user if boto-specific environment variables are in use. @@ -210,17 +226,3 @@ def warn_on_boto_env_vars( "during use of nodejs-based module and AWS_PROFILE is " "not set -- you likely want to set AWS_PROFILE instead" ) - - -class ModuleOptions: - """Base class for Runway module options.""" - - def get(self, name: str, default: Any = None) -> Any: - """Get a value or return the default.""" - return getattr(self, name, default) - - def __eq__(self, other: Any) -> bool: - """Assess equality.""" - if isinstance(other, self.__class__): - return self.__dict__ == other.__dict__ - return False diff --git a/runway/module/cdk.py b/runway/module/cdk.py index 35f1300d3..65c066e5b 100644 --- a/runway/module/cdk.py +++ b/runway/module/cdk.py @@ -6,8 +6,7 @@ import platform import subprocess import sys -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast from typing_extensions import Literal @@ -19,6 +18,8 @@ from .utils import generate_node_command, run_module_command if TYPE_CHECKING: + from pathlib import Path + from .._logging import RunwayLogger from ..context import RunwayContext @@ -39,7 +40,41 @@ ] -class CloudDevelopmentKit(RunwayModuleNpm): +class CloudDevelopmentKitOptions(ModuleOptions): + """Module options for AWS Cloud Development Kit. + + Attributes: + build_steps: A list of commands to be executed before each action (e.g. + diff, deploy, destroy). + data: Options parsed into a data model. + skip_npm_ci: Skip running ``npm ci`` in the module directory prior to + processing the module. + + """ + + def __init__(self, data: RunwayCdkModuleOptionsDataModel) -> None: + """Instantiate class. + + Args: + data: Options parsed into a data model. + + """ + self.build_steps = data.build_steps + self.data = data + self.skip_npm_ci = data.skip_npm_ci + + @classmethod + def parse_obj(cls, obj: object) -> CloudDevelopmentKitOptions: + """Parse options definition and return an options object. + + Args: + obj: Object to parse. + + """ + return cls(data=RunwayCdkModuleOptionsDataModel.model_validate(obj)) + + +class CloudDevelopmentKit(RunwayModuleNpm[CloudDevelopmentKitOptions]): """CDK Runway Module.""" DEPRECATION_MSG = ( @@ -47,18 +82,16 @@ class CloudDevelopmentKit(RunwayModuleNpm): "may be removed in the next major release." ) - options: CloudDevelopmentKitOptions - def __init__( self, context: RunwayContext, *, - explicitly_enabled: Optional[bool] = False, + explicitly_enabled: bool | None = False, logger: RunwayLogger = LOGGER, module_root: Path, - name: Optional[str] = None, - options: Optional[Union[Dict[str, Any], ModuleOptions]] = None, - parameters: Optional[Dict[str, Any]] = None, + name: str | None = None, + options: dict[str, Any] | ModuleOptions | None = None, + parameters: dict[str, Any] | None = None, **_: Any, ) -> None: """Instantiate class. @@ -92,9 +125,9 @@ def __init__( LOGGER.warning("%s:%s", self.name, self.DEPRECATION_MSG) @cached_property - def cli_args(self) -> List[str]: + def cli_args(self) -> list[str]: """Generate CLI args from self used in all CDK commands.""" - result: List[str] = [] + result: list[str] = [] if self.ctx.no_color: result.append("--no-color") if self.ctx.env.debug: @@ -104,9 +137,9 @@ def cli_args(self) -> List[str]: return result @cached_property - def cli_args_context(self) -> List[str]: + def cli_args_context(self) -> list[str]: """Generate CLI args from self passed to CDK commands as ``--context``.""" - result: List[str] = [] + result: list[str] = [] args = {"environment": self.ctx.env.name} args.update(self.parameters) for key, val in args.items(): @@ -157,7 +190,7 @@ def cdk_destroy(self) -> None: ) self.logger.info("destroy (complete)") - def cdk_diff(self, stack_name: Optional[str] = None) -> None: + def cdk_diff(self, stack_name: str | None = None) -> None: """Execute ``cdk diff`` command.""" self.logger.info("plan (in progress)") try: @@ -179,11 +212,11 @@ def cdk_diff(self, stack_name: Optional[str] = None) -> None: "is not enabled", stack_name, ) - # TODO raise error instead of sys.exit() when refactoring cli error handling + # TODO (kyle): raise error instead of sys.exit() when refactoring cli error handling sys.exit(exc.returncode) self.logger.info("plan (complete)") - def cdk_list(self) -> List[str]: + def cdk_list(self) -> list[str]: """Execute ``cdk list`` command.""" result = subprocess.check_output( self.gen_cmd("list", include_context=True), @@ -213,10 +246,10 @@ def destroy(self) -> None: def gen_cmd( self, command: CdkCommandTypeDef, - args_list: Optional[List[str]] = None, + args_list: list[str] | None = None, *, include_context: bool = False, - ) -> List[str]: + ) -> list[str]: """Generate and log a CDK command. This does not execute the command, only prepares it for use. @@ -231,7 +264,7 @@ def gen_cmd( The full command to be passed into a subprocess. """ - args = [command] + self.cli_args + args = [command, *self.cli_args] args.extend(args_list or []) if include_context: args.extend(self.cli_args_context) @@ -283,37 +316,3 @@ def run_build_steps(self) -> None: ) raise self.logger.info("build steps (complete)") - - -class CloudDevelopmentKitOptions(ModuleOptions): - """Module options for AWS Cloud Development Kit. - - Attributes: - build_steps: A list of commands to be executed before each action (e.g. - diff, deploy, destroy). - data: Options parsed into a data model. - skip_npm_ci: Skip running ``npm ci`` in the module directory prior to - processing the module. - - """ - - def __init__(self, data: RunwayCdkModuleOptionsDataModel) -> None: - """Instantiate class. - - Args: - data: Options parsed into a data model. - - """ - self.build_steps = data.build_steps - self.data = data - self.skip_npm_ci = data.skip_npm_ci - - @classmethod - def parse_obj(cls, obj: object) -> CloudDevelopmentKitOptions: - """Parse options definition and return an options object. - - Args: - obj: Object to parse. - - """ - return cls(data=RunwayCdkModuleOptionsDataModel.parse_obj(obj)) diff --git a/runway/module/cloudformation.py b/runway/module/cloudformation.py index cf74037ac..3f943f215 100644 --- a/runway/module/cloudformation.py +++ b/runway/module/cloudformation.py @@ -3,14 +3,15 @@ from __future__ import annotations import logging -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast from .._logging import PrefixAdaptor from ..cfngin.cfngin import CFNgin from .base import RunwayModule if TYPE_CHECKING: + from pathlib import Path + from .._logging import RunwayLogger from ..context import RunwayContext from .base import ModuleOptions @@ -18,19 +19,19 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) -class CloudFormation(RunwayModule): +class CloudFormation(RunwayModule["ModuleOptions | dict[str, Any]"]): """CloudFormation (CFNgin) Runway Module.""" def __init__( self, context: RunwayContext, *, - explicitly_enabled: Optional[bool] = False, + explicitly_enabled: bool | None = False, logger: RunwayLogger = LOGGER, module_root: Path, - name: Optional[str] = None, - options: Optional[Union[Dict[str, Any], ModuleOptions]] = None, - parameters: Optional[Dict[str, Any]] = None, + name: str | None = None, + options: dict[str, Any] | ModuleOptions | None = None, + parameters: dict[str, Any] | None = None, **_: Any, ) -> None: """Instantiate class. diff --git a/runway/module/k8s.py b/runway/module/k8s.py index 344327331..acfe30eda 100644 --- a/runway/module/k8s.py +++ b/runway/module/k8s.py @@ -6,14 +6,13 @@ import subprocess import sys from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast from typing_extensions import Literal from .._logging import PrefixAdaptor from ..compat import cached_property from ..config.models.runway.options.k8s import RunwayK8sModuleOptionsDataModel -from ..core.components import DeployEnvironment from ..env_mgr.kbenv import KBEnvManager from ..exceptions import KubectlVersionNotSpecified from ..utils import which @@ -23,6 +22,7 @@ if TYPE_CHECKING: from .._logging import RunwayLogger from ..context import RunwayContext + from ..core.components import DeployEnvironment LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) @@ -49,7 +49,103 @@ ] -class K8s(RunwayModule): +class K8sOptions(ModuleOptions): + """Module options for Kubernetes. + + Attributes: + data: Options parsed into a data model. + deploy_environment: Runway deploy environment object. + kubectl_version: Version of kubectl to use. + path: Module path. + + """ + + data: RunwayK8sModuleOptionsDataModel + deploy_environment: DeployEnvironment + kubectl_version: str | None + path: Path + + def __init__( + self, + data: RunwayK8sModuleOptionsDataModel, + deploy_environment: DeployEnvironment, + path: Path, + ) -> None: + """Instantiate class. + + Args: + data: Options parsed into a data model. + deploy_environment: Current deploy environment. + path: Module path. + + """ + self.data = data + self.env = deploy_environment + self.kubectl_version = data.kubectl_version + self.path = path + + @cached_property + def kustomize_config(self) -> Path: + """Kustomize configuration file.""" + return self.overlay_path / "kustomization.yaml" + + @cached_property + def overlay_path(self) -> Path: + """Directory containing the kustomize overlay to use.""" + if self.data.overlay_path: + return self.data.overlay_path + return self.get_overlay_dir( + path=self.path / "overlays", + environment=self.env.name, + region=self.env.aws_region, + ) + + @staticmethod + def gen_overlay_dirs(environment: str, region: str) -> list[str]: + """Generate possible overlay directories. + + Prefers more explicit directory name but falls back to environment name only. + + Args: + environment: Current deploy environment. + region : Current AWS region. + + """ + return [f"{environment}-{region}", environment] + + @classmethod + def get_overlay_dir(cls, path: Path, environment: str, region: str) -> Path: + """Determine the overlay directory to use.""" + overlay_dir = path + for name in cls.gen_overlay_dirs(environment, region): + overlay_dir = path / name + if (overlay_dir / "kustomization.yaml").is_file(): + return overlay_dir + return overlay_dir + + @classmethod + def parse_obj( + cls, + deploy_environment: DeployEnvironment, + obj: object, + path: Path | None = None, + ) -> K8sOptions: + """Parse options definition and return an options object. + + Args: + deploy_environment: Current deploy environment. + obj: Object to parse. + path: Module path. + + """ + return cls( + data=RunwayK8sModuleOptionsDataModel.model_validate(obj), + deploy_environment=deploy_environment, + path=path or Path.cwd(), + ) + + +class K8s(RunwayModule[K8sOptions]): """Kubectl Runway Module.""" DEPRECATION_MSG = ( @@ -57,18 +153,16 @@ class K8s(RunwayModule): "may be removed in the next major release." ) - options: K8sOptions - def __init__( self, context: RunwayContext, *, - explicitly_enabled: Optional[bool] = False, + explicitly_enabled: bool | None = False, logger: RunwayLogger = LOGGER, module_root: Path, - name: Optional[str] = None, - options: Optional[Union[Dict[str, Any], ModuleOptions]] = None, - parameters: Optional[Dict[str, Any]] = None, + name: str | None = None, + options: dict[str, Any] | ModuleOptions | None = None, + parameters: dict[str, Any] | None = None, **_: Any, ) -> None: """Instantiate class. @@ -116,9 +210,7 @@ def kubectl_bin(self) -> str: except KubectlVersionNotSpecified as exc: self.logger.verbose("kubectl version not specified; checking path") if not which("kubectl"): - self.logger.error( - "kubectl not available and a version to install not specified" - ) + self.logger.error("kubectl not available and a version to install not specified") self.logger.error(exc.message) sys.exit(1) return "kubectl" @@ -127,18 +219,14 @@ def kubectl_bin(self) -> str: def skip(self) -> bool: """Determine if the module should be skipped.""" if self.options.kustomize_config.is_file(): - LOGGER.info( - "processing kustomize overlay: %s", self.options.kustomize_config - ) + LOGGER.info("processing kustomize overlay: %s", self.options.kustomize_config) return False LOGGER.info( "skipped; kustomize overlay for this environment/region not" " found -- looking for one of: %s", ", ".join( str(self.path / "overlays" / i / "kustomization.yaml") - for i in self.options.gen_overlay_dirs( - self.ctx.env.name, self.ctx.env.aws_region - ) + for i in self.options.gen_overlay_dirs(self.ctx.env.name, self.ctx.env.aws_region) ), ) return True @@ -160,8 +248,8 @@ def destroy(self) -> None: def gen_cmd( self, command: KubectlCommandTypeDef, - args_list: Optional[List[str]] = None, - ) -> List[str]: + args_list: list[str] | None = None, + ) -> list[str]: """Generate and log a kubectl command. This does not execute the command, only prepares it for use. @@ -233,102 +321,4 @@ def plan(self) -> None: """Run kustomize build and display generated plan.""" if self.skip: return - self.logger.info( - "kustomized yaml generated by kubectl:\n\n%s", self.kubectl_kustomize() - ) - - -class K8sOptions(ModuleOptions): - """Module options for Kubernetes. - - Attributes: - data: Options parsed into a data model. - deploy_environment: Runway deploy environment object. - kubectl_version: Version of kubectl to use. - path: Module path. - - """ - - data: RunwayK8sModuleOptionsDataModel - deploy_environment: DeployEnvironment - kubectl_version: Optional[str] - path: Path - - def __init__( - self, - data: RunwayK8sModuleOptionsDataModel, - deploy_environment: DeployEnvironment, - path: Path, - ) -> None: - """Instantiate class. - - Args: - data: Options parsed into a data model. - deploy_environment: Current deploy environment. - path: Module path. - - """ - self.data = data - self.env = deploy_environment - self.kubectl_version = data.kubectl_version - self.path = path - - @cached_property - def kustomize_config(self) -> Path: - """Kustomize configuration file.""" - return self.overlay_path / "kustomization.yaml" - - @cached_property - def overlay_path(self) -> Path: - """Directory containing the kustomize overlay to use.""" - if self.data.overlay_path: - return self.data.overlay_path - return self.get_overlay_dir( - path=self.path / "overlays", - environment=self.env.name, - region=self.env.aws_region, - ) - - @staticmethod - def gen_overlay_dirs(environment: str, region: str) -> List[str]: - """Generate possible overlay directories. - - Prefers more explicit directory name but falls back to environment name only. - - Args: - environment: Current deploy environment. - region : Current AWS region. - - """ - return [f"{environment}-{region}", environment] - - @classmethod - def get_overlay_dir(cls, path: Path, environment: str, region: str) -> Path: - """Determine the overlay directory to use.""" - overlay_dir = path - for name in cls.gen_overlay_dirs(environment, region): - overlay_dir = path / name - if (overlay_dir / "kustomization.yaml").is_file(): - return overlay_dir - return overlay_dir - - @classmethod - def parse_obj( - cls, - deploy_environment: DeployEnvironment, - obj: object, - path: Optional[Path] = None, - ) -> K8sOptions: - """Parse options definition and return an options object. - - Args: - deploy_environment: Current deploy environment. - obj: Object to parse. - path: Module path. - - """ - return cls( - data=RunwayK8sModuleOptionsDataModel.parse_obj(obj), - deploy_environment=deploy_environment, - path=path or Path.cwd(), - ) + self.logger.info("kustomized yaml generated by kubectl:\n\n%s", self.kubectl_kustomize()) diff --git a/runway/module/serverless.py b/runway/module/serverless.py index 1cfaea6ed..5d44cd395 100644 --- a/runway/module/serverless.py +++ b/runway/module/serverless.py @@ -11,7 +11,7 @@ import tempfile import uuid from pathlib import Path -from typing import IO, TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union, cast +from typing import IO, TYPE_CHECKING, Any, Callable, cast import yaml @@ -34,34 +34,113 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) -def gen_sls_config_files(stage: str, region: str) -> List[str]: +def gen_sls_config_files(stage: str, region: str) -> list[str]: """Generate possible SLS config files names.""" - names: List[str] = [] + names: list[str] = [] for ext in ["yml", "json"]: # Give preference to explicit stage-region files - names.append(os.path.join("env", f"{stage}-{region}.{ext}")) + names.append(os.path.join("env", f"{stage}-{region}.{ext}")) # noqa: PTH118 names.append(f"config-{stage}-{region}.{ext}") # Fallback to stage name only - names.append(os.path.join("env", f"{stage}.{ext}")) + names.append(os.path.join("env", f"{stage}.{ext}")) # noqa: PTH118 names.append(f"config-{stage}.{ext}") return names -class Serverless(RunwayModuleNpm): - """Serverless Runway Module.""" +class ServerlessOptions(ModuleOptions): + """Module options for Serverless Framework. - options: ServerlessOptions + Attributes: + data: Options parsed into a data model. + extend_serverless_yml: If provided, the value of this option will be + recursively merged into the module's Serverless config file. + promotezip: If provided, promote Serverless Framework generated zip files + between environments from a build AWS account. + skip_npm_ci: Skip running ``npm ci`` in the module directory prior to + processing the module. + + """ + + def __init__(self, data: RunwayServerlessModuleOptionsDataModel) -> None: + """Instantiate class. + + Args: + data: Options parsed into a data model. + + """ + self._arg_parser = self._create_arg_parser() + cli_args, self._unknown_cli_args = self._arg_parser.parse_known_args(data.args.copy()) + self._cli_args = vars(cli_args) # convert argparse.Namespace to dict + + self.data = data + self.extend_serverless_yml = data.extend_serverless_yml + self.promotezip = data.promotezip + self.skip_npm_ci = data.skip_npm_ci + + @property + def args(self) -> list[str]: + """List of CLI arguments/options to pass to the Serverless Framework CLI.""" + known_args: list[str] = [] + for key, val in self._cli_args.items(): + if isinstance(val, str): + known_args.extend([f"--{key}", val]) + return known_args + self._unknown_cli_args + + def update_args(self, key: str, value: str) -> None: + """Update a known CLI argument. + + Args: + key: Dict key to be updated. + value: New value + + Raises: + KeyError: The key provided for update is not a known arg. + + """ + if key in self._cli_args: + self._cli_args[key] = value + else: + raise KeyError(key) + + @staticmethod + def _create_arg_parser() -> argparse.ArgumentParser: + """Create argparse parser to parse args. + + Used to pull arguments out of self.args when logic could change + depending on values provided. + + Returns: + argparse.ArgumentParser + + """ + parser = argparse.ArgumentParser() + parser.add_argument("-c", "--config", default=None) + return parser + + @classmethod + def parse_obj(cls, obj: object) -> ServerlessOptions: + """Parse options definition and return an options object. + + Args: + obj: Object to parse. + + """ + return cls(data=RunwayServerlessModuleOptionsDataModel.model_validate(obj)) + + +class Serverless(RunwayModuleNpm[ServerlessOptions]): + """Serverless Runway Module.""" def __init__( self, context: RunwayContext, *, - explicitly_enabled: Optional[bool] = False, + explicitly_enabled: bool | None = False, logger: RunwayLogger = LOGGER, module_root: Path, - name: Optional[str] = None, - options: Optional[Union[Dict[str, Any], ModuleOptions]] = None, - parameters: Optional[Dict[str, Any]] = None, + name: str | None = None, + options: dict[str, Any] | ModuleOptions | None = None, + parameters: dict[str, Any] | None = None, **_: Any, ) -> None: """Instantiate class. @@ -94,7 +173,7 @@ def __init__( self.stage = self.ctx.env.name @property - def cli_args(self) -> List[str]: + def cli_args(self) -> list[str]: """Generate CLI args from self used in all Serverless commands.""" result = ["--region", self.region, "--stage", self.stage] if "DEBUG" in self.ctx.env.vars: @@ -102,7 +181,7 @@ def cli_args(self) -> List[str]: return result @cached_property - def env_file(self) -> Optional[Path]: + def env_file(self) -> Path | None: """Find the environment file for the module.""" for name in gen_sls_config_files(self.stage, self.region): test_path = self.path / name @@ -117,8 +196,7 @@ def skip(self) -> bool: if self.parameters or self.explicitly_enabled or self.env_file: return False self.logger.info( - "skipped; config file for this stage/region not found" - " -- looking for one of: %s", + "skipped; config file for this stage/region not found -- looking for one of: %s", ", ".join(gen_sls_config_files(self.stage, self.region)), ) else: @@ -158,12 +236,11 @@ def extend_serverless_yml(self, func: Callable[..., None]) -> None: self.logger.debug("removed temporary Serverless config") except OSError: self.logger.debug( - "encountered an error when trying to delete the " - "temporary Serverless config", + "encountered an error when trying to delete the temporary Serverless config", exc_info=True, ) - def gen_cmd(self, command: str, args_list: Optional[List[str]] = None) -> List[str]: + def gen_cmd(self, command: str, args_list: list[str] | None = None) -> list[str]: """Generate and log a Serverless command. This does not execute the command, only prepares it for use. @@ -176,7 +253,7 @@ def gen_cmd(self, command: str, args_list: Optional[List[str]] = None) -> List[s The full command to be passed into a subprocess. """ - args = [command] + self.cli_args + self.options.args + args = [command, *self.cli_args, *self.options.args] args.extend(args_list or []) if command not in ["remove", "package", "print"] and self.ctx.is_noninteractive: args.append("--conceal") # hide secrets from serverless output @@ -200,9 +277,7 @@ def gen_cmd(self, command: str, args_list: Optional[List[str]] = None) -> List[s command="sls", command_opts=args, path=self.path, logger=self.logger ) - def sls_deploy( - self, *, package: Optional[AnyPath] = None, skip_install: bool = False - ) -> None: + def sls_deploy(self, *, package: AnyPath | None = None, skip_install: bool = False) -> None: """Execute ``sls deploy`` command. Args: @@ -224,9 +299,9 @@ def sls_deploy( def sls_package( self, *, - output_path: Optional[AnyPathConstrained] = None, + output_path: AnyPathConstrained | None = None, skip_install: bool = False, - ) -> Optional[AnyPathConstrained]: + ) -> AnyPathConstrained | None: """Execute ``sls package`` command. Args: @@ -248,8 +323,8 @@ def sls_package( return output_path def sls_print( - self, *, item_path: Optional[str] = None, skip_install: bool = False - ) -> Dict[str, Any]: + self, *, item_path: str | None = None, skip_install: bool = False + ) -> dict[str, Any]: """Execute ``sls print`` command. Keyword Args: @@ -294,7 +369,7 @@ def sls_remove(self, *, skip_install: bool = False) -> None: self.npm_install() stack_missing = False # track output for acceptable error self.logger.info("destroy (in progress)") - with subprocess.Popen( + with subprocess.Popen( # noqa: SIM117 self.gen_cmd("remove"), bufsize=1, env=self.ctx.env.vars, @@ -353,9 +428,7 @@ def destroy(self) -> None: def init(self) -> None: """Run init.""" - self.logger.warning( - "init not currently supported for %s", self.__class__.__name__ - ) + self.logger.warning("init not currently supported for %s", self.__class__.__name__) def plan(self) -> None: """Entrypoint for Runway's plan action.""" @@ -368,9 +441,9 @@ class ServerlessArtifact: def __init__( self, context: RunwayContext, - config: Dict[str, Any], + config: dict[str, Any], *, - logger: Union[PrefixAdaptor, RunwayLogger] = LOGGER, + logger: PrefixAdaptor | RunwayLogger = LOGGER, package_path: AnyPath, path: AnyPath, ) -> None: @@ -388,33 +461,27 @@ def __init__( self.ctx = context self.config = config self.logger = logger - self.package_path = ( - Path(package_path) if isinstance(package_path, str) else package_path - ) + self.package_path = Path(package_path) if isinstance(package_path, str) else package_path self.path = Path(path) if isinstance(path, str) else path @cached_property - def source_hash(self) -> Dict[str, str]: + def source_hash(self) -> dict[str, str]: """File hash(es) of each service's source code.""" if self.config.get("package", {"": ""}).get("individually"): return { name: get_hash_of_files( - self.path / os.path.dirname(detail.get("handler")) + self.path / os.path.dirname(detail.get("handler")) # noqa: PTH120 ) for name, detail in self.config.get("functions", {}).items() } - directories: List[Dict[str, Union[List[str], str]]] = [] - for _name, detail in self.config.get("functions", {}).items(): - func_path = {"path": os.path.dirname(detail.get("handler"))} + directories: list[dict[str, list[str] | str | None]] | None = [] + for detail in self.config.get("functions", {}).values(): + func_path = {"path": os.path.dirname(detail.get("handler"))} # noqa: PTH120 if func_path not in directories: directories.append(func_path) if isinstance(self.config["service"], dict): # handle sls<3.0.0 potential service property object notation - return { - self.config["service"]["name"]: get_hash_of_files( - self.path, directories - ) - } + return {self.config["service"]["name"]: get_hash_of_files(self.path, directories)} return {self.config["service"]: get_hash_of_files(self.path, directories)} def sync_with_s3(self, bucket_name: str) -> None: @@ -452,86 +519,3 @@ def sync_with_s3(self, bucket_name: str) -> None: filename=str(file_path), session=session, ) - - -class ServerlessOptions(ModuleOptions): - """Module options for Serverless Framework. - - Attributes: - data: Options parsed into a data model. - extend_serverless_yml: If provided, the value of this option will be - recursively merged into the module's Serverless config file. - promotezip: If provided, promote Serverless Framework generated zip files - between environments from a build AWS account. - skip_npm_ci: Skip running ``npm ci`` in the module directory prior to - processing the module. - - """ - - def __init__(self, data: RunwayServerlessModuleOptionsDataModel) -> None: - """Instantiate class. - - Args: - data: Options parsed into a data model. - - """ - self._arg_parser = self._create_arg_parser() - cli_args, self._unknown_cli_args = self._arg_parser.parse_known_args( - data.args.copy() - ) - self._cli_args = vars(cli_args) # convert argparse.Namespace to dict - - self.data = data - self.extend_serverless_yml = data.extend_serverless_yml - self.promotezip = data.promotezip - self.skip_npm_ci = data.skip_npm_ci - - @property - def args(self) -> List[str]: - """List of CLI arguments/options to pass to the Serverless Framework CLI.""" - known_args: List[str] = [] - for key, val in self._cli_args.items(): - if isinstance(val, str): - known_args.extend([f"--{key}", val]) - return known_args + self._unknown_cli_args - - def update_args(self, key: str, value: str) -> None: - """Update a known CLI argument. - - Args: - key: Dict key to be updated. - value: New value - - Raises: - KeyError: The key provided for update is not a known arg. - - """ - if key in self._cli_args: - self._cli_args[key] = value - else: - raise KeyError(key) - - @staticmethod - def _create_arg_parser() -> argparse.ArgumentParser: - """Create argparse parser to parse args. - - Used to pull arguments out of self.args when logic could change - depending on values provided. - - Returns: - argparse.ArgumentParser - - """ - parser = argparse.ArgumentParser() - parser.add_argument("-c", "--config", default=None) - return parser - - @classmethod - def parse_obj(cls, obj: object) -> ServerlessOptions: - """Parse options definition and return an options object. - - Args: - obj: Object to parse. - - """ - return cls(data=RunwayServerlessModuleOptionsDataModel.parse_obj(obj)) diff --git a/runway/module/staticsite/handler.py b/runway/module/staticsite/handler.py index 4e9c0db82..fdcbbd077 100644 --- a/runway/module/staticsite/handler.py +++ b/runway/module/staticsite/handler.py @@ -8,7 +8,7 @@ import sys import tempfile from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast import yaml @@ -29,7 +29,7 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) -class StaticSite(RunwayModule): +class StaticSite(RunwayModule[StaticSiteOptions]): """Static website Runway Module.""" DEPRECATION_MSG = ( @@ -37,19 +37,18 @@ class StaticSite(RunwayModule): "may be removed in the next major release." ) - options: StaticSiteOptions parameters: RunwayStaticSiteModuleParametersDataModel def __init__( self, context: RunwayContext, *, - explicitly_enabled: Optional[bool] = False, + explicitly_enabled: bool | None = False, logger: RunwayLogger = LOGGER, module_root: Path, - name: Optional[str] = None, - options: Optional[Union[Dict[str, Any], ModuleOptions]] = None, - parameters: Optional[Dict[str, Any]] = None, + name: str | None = None, + options: dict[str, Any] | ModuleOptions | None = None, + parameters: dict[str, Any] | None = None, **_: Any, ) -> None: """Instantiate class. @@ -78,9 +77,7 @@ def __init__( options=StaticSiteOptions.parse_obj(options or {}), parameters=parameters, ) - self.parameters = RunwayStaticSiteModuleParametersDataModel.parse_obj( - self.parameters - ) + self.parameters = RunwayStaticSiteModuleParametersDataModel.model_validate(self.parameters) # logger needs to be created here to use the correct logger self.logger = PrefixAdaptor(self.name, LOGGER) LOGGER.warning("%s:%s", self.name, self.DEPRECATION_MSG) @@ -166,7 +163,7 @@ def _setup_website_module(self, command: str) -> None: # have any costs when unused. if command == "destroy" and ( self.parameters.auth_at_edge - or self.parameters.dict().get("staticsite_rewrite_index_index") + or self.parameters.model_dump().get("staticsite_rewrite_index_index") ): self._create_cleanup_yaml(module_dir) @@ -175,8 +172,8 @@ def _setup_website_module(self, command: str) -> None: explicitly_enabled=self.explicitly_enabled, module_root=module_dir, name=self.name, - options=self.options.data.dict(), - parameters=self.parameters.dict(by_alias=True), + options=self.options.data.model_dump(), + parameters=self.parameters.model_dump(by_alias=True), ) self.logger.info("%s (in progress)", command) getattr(cfn, command)() @@ -199,13 +196,11 @@ def _create_dependencies_yaml(self, module_dir: Path) -> Path: Path to the file that was created. """ - pre_deploy: List[Any] = [] + pre_deploy: list[Any] = [] pre_destroy = [ { - "args": { - "bucket_name": f"${{rxref {self.sanitized_name}-dependencies::{i}}}" - }, + "args": {"bucket_name": f"${{rxref {self.sanitized_name}-dependencies::{i}}}"}, "path": "runway.cfngin.hooks.cleanup_s3.purge_bucket", "required": True, } @@ -264,7 +259,7 @@ def _create_dependencies_yaml(self, module_dir: Path) -> Path: } ) - content: Dict[str, Any] = { + content: dict[str, Any] = { "cfngin_bucket": "", "namespace": "${namespace}", "pre_deploy": pre_deploy, @@ -279,11 +274,10 @@ def _create_dependencies_yaml(self, module_dir: Path) -> Path: } out_file = module_dir / "01-dependencies.yaml" - with open(out_file, "w", encoding="utf-8") as output_stream: - yaml.dump(content, output_stream, default_flow_style=False, sort_keys=True) - self.logger.debug( - "created %s:\n%s", out_file.name, yaml.dump(content, Dumper=YamlDumper) + out_file.write_text( + yaml.dump(content, default_flow_style=False, sort_keys=True), encoding="utf-8" ) + self.logger.debug("created %s:\n%s", out_file.name, yaml.dump(content, Dumper=YamlDumper)) return out_file def _create_staticsite_yaml(self, module_dir: Path) -> Path: @@ -300,23 +294,19 @@ def _create_staticsite_yaml(self, module_dir: Path) -> Path: """ # Default parameter name matches build_staticsite hook if not self.options.source_hashing.parameter: - self.options.source_hashing.parameter = ( - f"${{namespace}}-{self.sanitized_name}-hash" - ) + self.options.source_hashing.parameter = f"${{namespace}}-{self.sanitized_name}-hash" nonce_secret_param = f"${{namespace}}-{self.sanitized_name}-nonce-secret" - build_staticsite_args: Dict[str, Any] = { + build_staticsite_args: dict[str, Any] = { # ensures yaml.safe_load will work by using JSON to convert objects - "options": json.loads(self.options.data.json(by_alias=True)) + "options": json.loads(self.options.data.model_dump_json(by_alias=True)) } build_staticsite_args["artifact_bucket_rxref_lookup"] = ( f"{self.sanitized_name}-dependencies::ArtifactsBucketName" ) build_staticsite_args["options"]["namespace"] = "${namespace}" build_staticsite_args["options"]["name"] = self.sanitized_name - build_staticsite_args["options"]["path"] = os.path.join( - os.path.realpath(self.ctx.env.root_dir), self.path - ) + build_staticsite_args["options"]["path"] = str(self.ctx.env.root_dir.resolve() / self.path) site_stack_variables = self._get_site_stack_variables() @@ -340,7 +330,7 @@ def _create_staticsite_yaml(self, module_dir: Path) -> Path: "CFDistributionDomainName::default=undefined}", "distribution_id": f"${{cfn ${{namespace}}-{self.sanitized_name}" ".CFDistributionId::default=undefined}", - "extra_files": [i.dict() for i in self.options.extra_files], + "extra_files": [i.model_dump() for i in self.options.extra_files], "website_url": f"${{cfn ${{namespace}}-{self.sanitized_name}" ".BucketWebsiteURL::default=undefined}", }, @@ -351,9 +341,7 @@ def _create_staticsite_yaml(self, module_dir: Path) -> Path: pre_destroy = [ { - "args": { - "bucket_name": f"${{rxref {self.sanitized_name}::BucketName}}" - }, + "args": {"bucket_name": f"${{rxref {self.sanitized_name}::BucketName}}"}, "path": "runway.cfngin.hooks.cleanup_s3.purge_bucket", "required": True, } @@ -430,13 +418,13 @@ def _create_staticsite_yaml(self, module_dir: Path) -> Path: site_stack_variables["RoleBoundaryArn"] = self.parameters.role_boundary_arn site_stack_variables["custom_error_responses"] = [ - i.dict(exclude_none=True) for i in self.parameters.custom_error_responses + i.model_dump(exclude_none=True) for i in self.parameters.custom_error_responses ] site_stack_variables["lambda_function_associations"] = [ - i.dict() for i in self.parameters.lambda_function_associations + i.model_dump() for i in self.parameters.lambda_function_associations ] - content = { + content: dict[str, Any] = { "cfngin_bucket": "", "namespace": "${namespace}", "post_deploy": post_deploy, @@ -453,11 +441,10 @@ def _create_staticsite_yaml(self, module_dir: Path) -> Path: } out_file = module_dir / "02-staticsite.yaml" - with open(out_file, "w", encoding="utf-8") as output_stream: - yaml.dump(content, output_stream, default_flow_style=False, sort_keys=True) - self.logger.debug( - "created 02-staticsite.yaml:\n%s", yaml.dump(content, Dumper=YamlDumper) + out_file.write_text( + yaml.dump(content, default_flow_style=False, sort_keys=True), encoding="utf-8" ) + self.logger.debug("created 02-staticsite.yaml:\n%s", yaml.dump(content, Dumper=YamlDumper)) return out_file def _create_cleanup_yaml(self, module_dir: Path) -> Path: @@ -478,23 +465,23 @@ def _create_cleanup_yaml(self, module_dir: Path) -> Path: "service_role": self.parameters.service_role, "stacks": { f"{self.sanitized_name}-cleanup": { - "template_path": os.path.join( - tempfile.gettempdir(), "thisfileisnotused.yaml" + "template_path": os.path.join( # noqa: PTH118 + tempfile.gettempdir(), + "thisfileisnotused.yaml", # cspell: disable-line ), } }, } out_file = module_dir / "03-cleanup.yaml" - with open(out_file, "w", encoding="utf-8") as output_stream: - yaml.dump(content, output_stream, default_flow_style=False, sort_keys=True) - self.logger.debug( - "created %s:\n%s", out_file.name, yaml.dump(content, Dumper=YamlDumper) + out_file.write_text( + yaml.dump(content, default_flow_style=False, sort_keys=True), encoding="utf-8" ) + self.logger.debug("created %s:\n%s", out_file.name, yaml.dump(content, Dumper=YamlDumper)) return out_file - def _get_site_stack_variables(self) -> Dict[str, Any]: - site_stack_variables: Dict[str, Any] = { + def _get_site_stack_variables(self) -> dict[str, Any]: + site_stack_variables: dict[str, Any] = { "Aliases": [], "Compress": self.parameters.compress, "DisableCloudFront": self.parameters.cf_disable, @@ -527,17 +514,16 @@ def _get_site_stack_variables(self) -> Dict[str, Any]: site_stack_variables["OAuthScopes"] = self.parameters.oauth_scopes else: site_stack_variables["custom_error_responses"] = [ - i.dict(exclude_none=True) - for i in self.parameters.custom_error_responses + i.model_dump(exclude_none=True) for i in self.parameters.custom_error_responses ] site_stack_variables["lambda_function_associations"] = [ - i.dict() for i in self.parameters.lambda_function_associations + i.model_dump() for i in self.parameters.lambda_function_associations ] return site_stack_variables - def _get_dependencies_variables(self) -> Dict[str, Any]: - variables: Dict[str, Any] = {"OAuthScopes": self.parameters.oauth_scopes} + def _get_dependencies_variables(self) -> dict[str, Any]: + variables: dict[str, Any] = {"OAuthScopes": self.parameters.oauth_scopes} if self.parameters.auth_at_edge: self._ensure_auth_at_edge_requirements() @@ -548,9 +534,7 @@ def _get_dependencies_variables(self) -> Dict[str, Any]: "RedirectPathSignIn": ( "${default staticsite_redirect_path_sign_in::/parseauth}" ), - "RedirectPathSignOut": ( - "${default staticsite_redirect_path_sign_out::/}" - ), + "RedirectPathSignOut": ("${default staticsite_redirect_path_sign_out::/}"), }, ) @@ -558,17 +542,15 @@ def _get_dependencies_variables(self) -> Dict[str, Any]: variables.update({"Aliases": self.parameters.aliases}) if self.parameters.additional_redirect_domains: variables.update( - { - "AdditionalRedirectDomains": self.parameters.additional_redirect_domains - } + {"AdditionalRedirectDomains": self.parameters.additional_redirect_domains} ) if self.parameters.create_user_pool: variables.update({"CreateUserPool": self.parameters.create_user_pool}) return variables - def _get_user_pool_id_retriever_variables(self) -> Dict[str, Any]: - args: Dict[str, Any] = { + def _get_user_pool_id_retriever_variables(self) -> dict[str, Any]: + args: dict[str, Any] = { "user_pool_arn": self.parameters.user_pool_arn, } @@ -579,7 +561,7 @@ def _get_user_pool_id_retriever_variables(self) -> Dict[str, Any]: return args - def _get_domain_updater_variables(self) -> Dict[str, str]: + def _get_domain_updater_variables(self) -> dict[str, str]: return { "client_id_output_lookup": f"{self.sanitized_name}-dependencies::AuthAtEdgeClient", "client_id": f"${{rxref {self.sanitized_name}-dependencies::AuthAtEdgeClient}}", @@ -587,10 +569,10 @@ def _get_domain_updater_variables(self) -> Dict[str, str]: def _get_lambda_config_variables( self, - site_stack_variables: Dict[str, Any], + site_stack_variables: dict[str, Any], nonce_secret_param: str, - required_group: Optional[str] = None, - ) -> Dict[str, Any]: + required_group: str | None = None, + ) -> dict[str, Any]: return { "client_id": f"${{rxref {self.sanitized_name}-dependencies::AuthAtEdgeClient}}", "bucket": f"${{rxref {self.sanitized_name}-dependencies::ArtifactsBucketName}}", @@ -605,12 +587,10 @@ def _get_lambda_config_variables( } def _get_client_updater_variables( - self, name: str, site_stack_variables: Dict[str, Any] - ) -> Dict[str, Any]: + self, name: str, site_stack_variables: dict[str, Any] + ) -> dict[str, Any]: return { - "alternate_domains": [ - add_url_scheme(x) for x in site_stack_variables["Aliases"] - ], + "alternate_domains": [add_url_scheme(x) for x in site_stack_variables["Aliases"]], "client_id": f"${{rxref {self.sanitized_name}-dependencies::AuthAtEdgeClient}}", "distribution_domain": f"${{rxref {name}::CFDistributionDomainName}}", "oauth_scopes": site_stack_variables["OAuthScopes"], @@ -641,8 +621,7 @@ def _ensure_cloudfront_with_auth_at_edge(self) -> None: """Exit if both the Auth@Edge and CloudFront disablement are true.""" if self.parameters.cf_disable and self.parameters.auth_at_edge: self.logger.error( - 'staticsite_cf_disable must be "false" if ' - 'staticsite_auth_at_edge is "true"' + 'staticsite_cf_disable must be "false" if staticsite_auth_at_edge is "true"' ) sys.exit(1) diff --git a/runway/module/staticsite/options/__init__.py b/runway/module/staticsite/options/__init__.py index 373c98c85..254648e35 100644 --- a/runway/module/staticsite/options/__init__.py +++ b/runway/module/staticsite/options/__init__.py @@ -1,7 +1,7 @@ """Runway Static Site Module options.""" -from .components import StaticSiteOptions -from .models import ( +from ._components import StaticSiteOptions +from ._models import ( RunwayStaticSiteExtraFileDataModel, RunwayStaticSiteModuleOptionsDataModel, RunwayStaticSitePreBuildStepDataModel, diff --git a/runway/module/staticsite/options/components.py b/runway/module/staticsite/options/_components.py similarity index 90% rename from runway/module/staticsite/options/components.py rename to runway/module/staticsite/options/_components.py index 642a0c859..5f735ef2d 100644 --- a/runway/module/staticsite/options/components.py +++ b/runway/module/staticsite/options/_components.py @@ -3,7 +3,7 @@ from __future__ import annotations from ...base import ModuleOptions -from .models import RunwayStaticSiteModuleOptionsDataModel +from ._models import RunwayStaticSiteModuleOptionsDataModel class StaticSiteOptions(ModuleOptions): @@ -38,4 +38,4 @@ def parse_obj(cls, obj: object) -> StaticSiteOptions: obj: Object to parse. """ - return cls(data=RunwayStaticSiteModuleOptionsDataModel.parse_obj(obj)) + return cls(data=RunwayStaticSiteModuleOptionsDataModel.model_validate(obj)) diff --git a/runway/module/staticsite/options/_models.py b/runway/module/staticsite/options/_models.py new file mode 100644 index 000000000..29065508d --- /dev/null +++ b/runway/module/staticsite/options/_models.py @@ -0,0 +1,158 @@ +"""Runway static site Module options.""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any, cast + +from pydantic import ConfigDict, model_validator + +from ....config.models.base import ConfigProperty + + +class RunwayStaticSiteExtraFileDataModel(ConfigProperty): + """Model for Runway static site Module extra_files option item.""" + + model_config = ConfigDict( + extra="forbid", + title="Runway static site Module extra_files option item", + validate_assignment=True, + validate_default=True, + ) + + content_type: str | None = None + """An explicit content type for the file. + If not provided, will attempt to determine based on the name provided. + + """ + + content: Any = None + """Inline content that will be used as the file content. + This or ``file`` must be provided. + + """ + + file: Path | None = None + """Path to an existing file. + The content of this file will be uploaded to the static site S3 bucket using + the name as the object key. + This or ``content`` must be provided. + + """ + + name: str + """The destination name of the file to create.""" + + @model_validator(mode="before") + @classmethod + def _autofill_content_type(cls, values: dict[str, Any]) -> dict[str, Any]: + """Attempt to fill content_type if not provided.""" + if values.get("content_type"): + return values + name = cast(str, values.get("name", "")) + if name.endswith(".json"): + values["content_type"] = "application/json" + elif name.endswith((".yaml", ".yml")): + values["content_type"] = "text/yaml" + return values + + @model_validator(mode="before") + @classmethod + def _validate_content_or_file(cls, values: dict[str, Any]) -> dict[str, Any]: + """Validate that content or file is provided.""" + if all(i in values and values[i] for i in ["content", "file"]): + raise ValueError("only one of content or file can be provided") + if not any(i in values for i in ["content", "file"]): + raise ValueError("one of content or file must be provided") + return values + + +class RunwayStaticSitePreBuildStepDataModel(ConfigProperty): + """Model for Runway static site Module pre_build_steps option item.""" + + model_config = ConfigDict( + extra="forbid", + title="Runway static site Module pre_build_steps option item", + validate_default=True, + validate_assignment=True, + ) + + command: str + """The command to run.""" + + cwd: Path = Path.cwd() + """The working directory for the subprocess running the command. + If not provided, the current working directory is used. + + """ + + +class RunwayStaticSiteSourceHashingDirectoryDataModel(ConfigProperty): + """Model for Runway static site Module source_hashing.directory option item.""" + + model_config = ConfigDict( + extra="forbid", + title="Runway static site Module source_hashing.directories option item", + validate_default=True, + validate_assignment=True, + ) + + exclusions: list[str] = [] + """List of gitignore formmated globs to ignore when calculating the hash.""" + + path: Path + """Path to files to include in the hash.""" + + +class RunwayStaticSiteSourceHashingDataModel(ConfigProperty): + """Model for Runway static site Module source_hashing option.""" + + model_config = ConfigDict( + extra="forbid", + title="Runway static site Module source_hashing option", + validate_default=True, + validate_assignment=True, + ) + + directories: list[RunwayStaticSiteSourceHashingDirectoryDataModel] = [ + RunwayStaticSiteSourceHashingDirectoryDataModel(path="./") # type: ignore + ] + """Explicitly provide the directories to use when calculating the hash. + If not provided, will default to the root of the module. + """ + + enabled: bool = True + """Enable source hashing. If not enabled, build and upload will occur on every deploy.""" + + parameter: str | None = None + """SSM parameter where the hash of each build is stored.""" + + +class RunwayStaticSiteModuleOptionsDataModel(ConfigProperty): + """Model for Runway static site Module options.""" + + model_config = ConfigDict( + extra="ignore", + title="Runway static site Module options", + validate_default=True, + validate_assignment=True, + ) + + build_output: str = "./" + """Directory where build output is placed. Defaults to current working directory.""" + + build_steps: list[str] = [] + """List of commands to run to build the static site.""" + + extra_files: list[RunwayStaticSiteExtraFileDataModel] = [] + """List of files that should be uploaded to S3 after the build. + Used to dynamically create or select file. + """ + + pre_build_steps: list[RunwayStaticSitePreBuildStepDataModel] = [] + """Commands to be run prior to the build process.""" + + source_hashing: RunwayStaticSiteSourceHashingDataModel = ( + RunwayStaticSiteSourceHashingDataModel() + ) + """Overrides for source hash calculation and tracking.""" diff --git a/runway/module/staticsite/options/models.py b/runway/module/staticsite/options/models.py deleted file mode 100644 index 793e80071..000000000 --- a/runway/module/staticsite/options/models.py +++ /dev/null @@ -1,156 +0,0 @@ -"""Runway static site Module options.""" - -from __future__ import annotations - -from pathlib import Path -from typing import Any, Dict, List, Optional, cast - -from pydantic import Extra, root_validator - -from ....config.models.base import ConfigProperty - - -class RunwayStaticSiteExtraFileDataModel(ConfigProperty): - """Model for Runway static site Module extra_files option item. - - Attributes: - content_type: An explicit content type for the file. If not provided, - will attempt to determine based on the name provided. - content: Inline content that will be used as the file content. - This or ``file`` must be provided. - file: Path to an existing file. The content of this file will be uploaded - to the static site S3 bucket using the name as the object key. - This or ``content`` must be provided. - name: The destination name of the file to create. - - """ - - content_type: Optional[str] = None - content: Any = None - file: Optional[Path] = None - name: str - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - title = "Runway static site Module extra_files option item." - - @root_validator - def _autofill_content_type( # pylint: disable=no-self-argument - cls, values: Dict[str, Any] - ) -> Dict[str, Any]: - """Attempt to fill content_type if not provided.""" - if values.get("content_type"): - return values - name = cast(str, values.get("name", "")) - if name.endswith(".json"): - values["content_type"] = "application/json" - elif name.endswith(".yaml") or name.endswith(".yml"): - values["content_type"] = "text/yaml" - return values - - @root_validator(pre=True) - def _validate_content_or_file( # pylint: disable=no-self-argument - cls, values: Dict[str, Any] - ) -> Dict[str, Any]: - """Validate that content or file is provided.""" - if all(i in values and values[i] for i in ["content", "file"]): - raise ValueError("only one of content or file can be provided") - if not any(i in values for i in ["content", "file"]): - raise ValueError("one of content or file must be provided") - return values - - -class RunwayStaticSitePreBuildStepDataModel(ConfigProperty): - """Model for Runway static site Module pre_build_steps option item. - - Attributes: - command: The command to run. - cwd: The working directory for the subprocess running the command. - If not provided, the current working directory is used. - - """ - - command: str - cwd: Path = Path.cwd() - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - title = "Runway static site Module pre_build_steps option item." - - -class RunwayStaticSiteSourceHashingDirectoryDataModel(ConfigProperty): - """Model for Runway static site Module source_hashing.directory option item. - - Attributes: - exclusions: List of gitignore formatted globs to ignore when calculating - the hash. - path: Path to files to include in the hash. - - """ - - exclusions: List[str] = [] - path: Path - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - title = "Runway static site Module source_hashing.directories option item." - - -class RunwayStaticSiteSourceHashingDataModel(ConfigProperty): - """Model for Runway static site Module source_hashing option. - - Attributes: - directories: Explicitly provide the directories to use when calculating - the hash. If not provided, will default to the root of the module. - enabled: Enable source hashing. If not enabled, build and upload will - occur on every deploy. - parameter: SSM parameter where the hash of each build is stored. - - """ - - directories: List[RunwayStaticSiteSourceHashingDirectoryDataModel] = [ - RunwayStaticSiteSourceHashingDirectoryDataModel(path="./") # type: ignore - ] - enabled: bool = True - parameter: Optional[str] = None - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - title = "Runway static site Module source_hashing option." - - -class RunwayStaticSiteModuleOptionsDataModel(ConfigProperty): - """Model for Runway static site Module options. - - Attributes: - build_output: Directory where build output is placed. Defaults to current - working directory. - build_steps: List of commands to run to build the static site. - extra_files: List of files that should be uploaded to S3 after the build. - Used to dynamically create or select file. - pre_build_steps: Commands to be run prior to the build process. - source_hashing: Overrides for source hash calculation and tracking. - - """ - - build_output: str = "./" - build_steps: List[str] = [] - extra_files: List[RunwayStaticSiteExtraFileDataModel] = [] - pre_build_steps: List[RunwayStaticSitePreBuildStepDataModel] = [] - source_hashing: RunwayStaticSiteSourceHashingDataModel = ( - RunwayStaticSiteSourceHashingDataModel() - ) - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.ignore - title = "Runway static site Module options." diff --git a/runway/module/staticsite/parameters/__init__.py b/runway/module/staticsite/parameters/__init__.py index 89af619c2..ee7220cda 100644 --- a/runway/module/staticsite/parameters/__init__.py +++ b/runway/module/staticsite/parameters/__init__.py @@ -1,6 +1,6 @@ """Runway Static Site Module parameters.""" -from .models import ( +from ._models import ( RunwayStaticSiteCustomErrorResponseDataModel, RunwayStaticSiteLambdaFunctionAssociationDataModel, RunwayStaticSiteModuleParametersDataModel, diff --git a/runway/module/staticsite/parameters/_models.py b/runway/module/staticsite/parameters/_models.py new file mode 100644 index 000000000..a5a6bfc4e --- /dev/null +++ b/runway/module/staticsite/parameters/_models.py @@ -0,0 +1,190 @@ +"""Runway static site Module parameters.""" + +from __future__ import annotations + +from pydantic import ConfigDict, Field, field_validator + +from ....config.models.base import ConfigProperty + + +class RunwayStaticSiteCustomErrorResponseDataModel(ConfigProperty): + """Model for Runway stat site Module staticsite_custom_error_responses parameter item. + + https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-customerrorresponse.html + + """ + + model_config = ConfigDict( + extra="forbid", + title="Runway static site Module staticsite_custom_error_responses parameter item", + validate_default=True, + validate_assignment=True, + ) + + ErrorCachingMinTTL: int | None = None + ErrorCode: int | None = None + ResponseCode: int | None = None + ResponsePagePath: str | None = None + + +class RunwayStaticSiteLambdaFunctionAssociationDataModel(ConfigProperty): + """Model for Runway stat site Module staticsite_lambda_function_associations parameter item.""" + + model_config = ConfigDict( + extra="forbid", + title="Runway static site Module staticsite_lambda_function_associations parameter item", + validate_default=True, + validate_assignment=True, + ) + + arn: str + """Lambda function ARN.""" + + type: str + """Association type.""" + + +def _staticsite_alias_generator(field_name: str) -> str: + """Append ``staticsite`` to field names. + + Some fields are excluded from having aliases (e.g. namespace). + + """ + return f"staticsite_{field_name}" if field_name != "namespace" else field_name + + +class RunwayStaticSiteModuleParametersDataModel(ConfigProperty): + """Model for Runway static site Module parameters.""" + + model_config = ConfigDict( + alias_generator=_staticsite_alias_generator, + extra="ignore", + populate_by_name=True, + title="Runway static site Module parameters", + validate_default=True, + validate_assignment=True, + ) + + acmcert_arn: str | None = None + """The certificate arn used for any alias domains supplied. + This is a requirement when supplying any custom domain. + + """ + + additional_redirect_domains: list[str] = [] + """Additional domains (beyond the ``aliases`` domains or the CloudFront URL if + no aliases are provided) that will be authorized by the Auth@Edge UserPool AppClient. + + """ + + aliases: list[str] = [] + """Any custom domains that should be added to the CloudFront Distribution.""" + + auth_at_edge: bool = False + """Auth@Edge make the static site private by placing it behind an authorization wall.""" + + cf_disable: bool = False + """Whether deployment of the CloudFront Distribution should be disabled.""" + + compress: bool = True + """Whether the CloudFront default cache behavior will automatically compress certain files.""" + + cookie_settings: dict[str, str] = { + "idToken": "Path=/; Secure; SameSite=Lax", + "accessToken": "Path=/; Secure; SameSite=Lax", + "refreshToken": "Path=/; Secure; SameSite=Lax", + "nonce": "Path=/; Secure; HttpOnly; Max-Age=1800; SameSite=Lax", + } + """The default cookie settings for retrieved tokens and generated nonce's.""" + + create_user_pool: bool = False + """Whether to create a User Pool for the Auth@Edge configuration.""" + + custom_error_responses: list[RunwayStaticSiteCustomErrorResponseDataModel] = [] + """Define custom error responses.""" + + enable_cf_logging: bool = True + """Enable CloudFront logging.""" + + http_headers: dict[str, str] = { + "Content-Security-Policy": "default-src https: 'unsafe-eval' 'unsafe-inline'; " + "font-src 'self' 'unsafe-inline' 'unsafe-eval' data: https:; " + "object-src 'none'; " + "connect-src 'self' https://*.amazonaws.com https://*.amazoncognito.com", + "Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload", + "Referrer-Policy": "same-origin", + "X-XSS-Protection": "1; mode=block", + "X-Frame-Options": "DENY", + "X-Content-Type-Options": "nosniff", + } + """Headers that should be sent with each origin response.""" + + lambda_function_associations: list[RunwayStaticSiteLambdaFunctionAssociationDataModel] = [] + """This allows the user to deploy custom Lambda@Edge associations with their pre-build function versions.""" + + namespace: str + """The unique namespace for the deployment.""" + + non_spa: bool = False + """Whether this site is a single page application (SPA).""" + + oauth_scopes: list[str] = [ + "phone", + "email", + "profile", + "openid", + "aws.cognito.signin.user.admin", + ] + """Scope is a mechanism in OAuth 2.0 to limit an application's access to a user's account.""" + + redirect_path_auth_refresh: str = "/refreshauth" + """The path that a user is redirected to when their authorization tokens have expired (1 hour).""" + + redirect_path_sign_in: str = "/parseauth" + """The path that a user is redirected to after sign-in.""" + + redirect_path_sign_out: str = "/" + """The path that a user is redirected to after sign-out.""" + + required_group: str | None = None + """Name of Cognito User Pool group of which users must be a member to be granted access to the site. + If ``None``, allows all UserPool users to have access. + + """ + + rewrite_directory_index: str | None = None + """Deploy a Lambda@Edge function designed to rewrite directory indexes.""" + + role_boundary_arn: str | None = None + """Defines an IAM Managed Policy that will be set as the permissions boundary + for any IAM Roles created to support the site. + + """ + + service_role: str | None = Field(default=None, alias="cloudformation_service_role") + """IAM role that CloudFormation will use.""" + + sign_out_url: str = "/signout" + """The path a user should access to sign themselves out of the application.""" + + supported_identity_providers: list[str] = ["COGNITO"] + """A comma delimited list of the User Pool client identity providers.""" + + user_pool_arn: str | None = None + """The ARN of a pre-existing Cognito User Pool to use with Auth@Edge.""" + + web_acl: str | None = None + """The ARN of a web access control list (web ACL) to associate with the CloudFront Distribution.""" + + @field_validator( + "additional_redirect_domains", + "aliases", + "supported_identity_providers", + mode="before", + ) + @classmethod + def _convert_comma_delimited_list(cls, v: list[str] | str) -> list[str]: + """Convert comma delimited lists to a string.""" + if isinstance(v, str): + return [i.strip() for i in v.split(",")] + return v diff --git a/runway/module/staticsite/parameters/models.py b/runway/module/staticsite/parameters/models.py deleted file mode 100644 index 4fca1a9df..000000000 --- a/runway/module/staticsite/parameters/models.py +++ /dev/null @@ -1,201 +0,0 @@ -"""Runway static site Module parameters.""" - -# pylint: disable=no-self-argument -from __future__ import annotations - -from typing import Dict, List, Optional, Union - -from pydantic import Extra, Field, validator - -from ....config.models.base import ConfigProperty - - -class RunwayStaticSiteCustomErrorResponseDataModel(ConfigProperty): - """Model for Runway stat site Module staticsite_custom_error_responses parameter item. - - https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-customerrorresponse.html - - """ - - ErrorCachingMinTTL: Optional[int] = None - ErrorCode: Optional[int] = None - ResponseCode: Optional[int] = None - ResponsePagePath: Optional[str] = None - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - title = "Runway static site Module staticsite_custom_error_responses parameter item." - - -class RunwayStaticSiteLambdaFunctionAssociationDataModel(ConfigProperty): - """Model for Runway stat site Module staticsite_lambda_function_associations parameter item. - - Attributes: - arn: Lambda function ARN. - type: Association type. - - """ - - arn: str - type: str - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - title = "Runway static site Module staticsite_lambda_function_associations parameter item." # noqa - - -class RunwayStaticSiteModuleParametersDataModel(ConfigProperty): - """Model for Runway static site Module parameters. - - Attributes: - acmcert_arn: The certificate arn used for any alias domains supplied. - This is a requirement when supplying any custom domain. - additional_redirect_domains: Additional domains (beyond the ``aliases`` - domains or the CloudFront URL if no aliases are provided) that will - be authorized by the Auth@Edge UserPool AppClient. - aliases: Any custom domains that should be added to the CloudFront - Distribution. - auth_at_edge: Auth@Edge make the static site private by placing it behind - an authorization wall. - cf_disable: Wether deployment of the CloudFront Distribution should be - disabled. - compress: Whether the CloudFront default cache behavior will automatically - compress certain files. - cookie_settings: The default cookie settings for retrieved tokens and - generated nonce's. - create_user_pool: Wether to create a User Pool for the Auth@Edge - configuration. - custom_error_responses: Define custom error responses. - enable_cf_logging: Enable CloudFront logging. - http_headers: Headers that should be sent with each origin response. - lambda_function_associations: This allows the user to deploy custom - Lambda@Edge associations with their pre-build function versions. - namespace: The unique namespace for the deployment. - non_spa: Wether this site is a single page application (SPA). - oauth_scopes: Scope is a mechanism in OAuth 2.0 to limit an application's - access to a user's account. - redirect_path_auth_refresh: The path that a user is redirected to when - their authorization tokens have expired (1 hour). - redirect_path_sign_in: The path that a user is redirected to after sign-in. - redirect_path_sign_out: The path that a user is redirected to after sign-out. - required_group: Name of Cognito User Pool group of which users must be a - member to be granted access to the site. If ``None``, allows all - UserPool users to have access. - rewrite_directory_index: Deploy a Lambda@Edge function designed to - rewrite directory indexes. - role_boundary_arn: Defines an IAM Managed Policy that will be set as the - permissions boundary for any IAM Roles created to support the site. - service_role: IAM role that CloudFormation will use. - sign_out_url: The path a user should access to sign themselves out of the - application. - supported_identity_providers: A comma delimited list of the User Pool - client identity providers. - user_pool_arn: The ARN of a pre-existing Cognito User Pool to use with - Auth@Edge. - web_acl: The ARN of a web access control list (web ACL) to associate with - the CloudFront Distribution. - - """ - - acmcert_arn: Optional[str] = Field(default=None, alias="staticsite_acmcert_arn") - additional_redirect_domains: List[str] = Field( - default=[], alias="staticsite_additional_redirect_domains" - ) - aliases: List[str] = Field(default=[], alias="staticsite_aliases") - auth_at_edge: bool = Field(default=False, alias="staticsite_auth_at_edge") - cf_disable: bool = Field(default=False, alias="staticsite_cf_disable") - compress: bool = Field(default=True, alias="staticsite_compress") - cookie_settings: Dict[str, str] = Field( - default={ - "idToken": "Path=/; Secure; SameSite=Lax", - "accessToken": "Path=/; Secure; SameSite=Lax", - "refreshToken": "Path=/; Secure; SameSite=Lax", - "nonce": "Path=/; Secure; HttpOnly; Max-Age=1800; SameSite=Lax", - }, - alias="staticsite_cookie_settings", - ) - create_user_pool: bool = Field(default=False, alias="staticsite_create_user_pool") - custom_error_responses: List[RunwayStaticSiteCustomErrorResponseDataModel] = Field( - default=[], alias="staticsite_custom_error_responses" - ) - enable_cf_logging: bool = Field(default=True, alias="staticsite_enable_cf_logging") - http_headers: Dict[str, str] = Field( - default={ - "Content-Security-Policy": "default-src https: 'unsafe-eval' 'unsafe-inline'; " - "font-src 'self' 'unsafe-inline' 'unsafe-eval' data: https:; " - "object-src 'none'; " - "connect-src 'self' https://*.amazonaws.com https://*.amazoncognito.com", - "Strict-Transport-Security": "max-age=31536000; " - "includeSubdomains; " - "preload", - "Referrer-Policy": "same-origin", - "X-XSS-Protection": "1; mode=block", - "X-Frame-Options": "DENY", - "X-Content-Type-Options": "nosniff", - }, - alias="staticsite_http_headers", - ) - lambda_function_associations: List[ - RunwayStaticSiteLambdaFunctionAssociationDataModel - ] = Field(default=[], alias="staticsite_lambda_function_associations") - namespace: str - non_spa: bool = Field(default=False, alias="staticsite_non_spa") - oauth_scopes: List[str] = Field( - default=[ - "phone", - "email", - "profile", - "openid", - "aws.cognito.signin.user.admin", - ], - alias="staticsite_oauth_scopes", - ) - redirect_path_auth_refresh: str = Field( - default="/refreshauth", alias="staticsite_redirect_path_auth_refresh" - ) - redirect_path_sign_in: str = Field( - default="/parseauth", alias="staticsite_redirect_path_sign_in" - ) - redirect_path_sign_out: str = Field( - default="/", alias="staticsite_redirect_path_sign_out" - ) - required_group: Optional[str] = Field( - default=None, alias="staticsite_required_group" - ) - rewrite_directory_index: Optional[str] = Field( - default=None, alias="staticsite_rewrite_directory_index" - ) - role_boundary_arn: Optional[str] = Field( - default=None, alias="staticsite_role_boundary_arn" - ) - service_role: Optional[str] = Field( - default=None, alias="cloudformation_service_role" - ) - sign_out_url: str = Field(default="/signout", alias="staticsite_sign_out_url") - supported_identity_providers: List[str] = Field( - default=["COGNITO"], alias="staticsite_supported_identity_providers" - ) - user_pool_arn: Optional[str] = Field(default=None, alias="staticsite_user_pool_arn") - web_acl: Optional[str] = Field(default=None, alias="staticsite_web_acl") - - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.ignore - title = "Runway static site Module parameters." - - @validator( - "additional_redirect_domains", - "aliases", - "supported_identity_providers", - pre=True, - ) - def _convert_comma_delimited_list(cls, v: Union[List[str], str]) -> List[str]: - """Convert comma delimited lists to a string.""" - if isinstance(v, str): - return [i.strip() for i in v.split(",")] - return v diff --git a/runway/module/staticsite/utils.py b/runway/module/staticsite/utils.py index c157384a8..86c81cfe3 100644 --- a/runway/module/staticsite/utils.py +++ b/runway/module/staticsite/utils.py @@ -8,6 +8,6 @@ def add_url_scheme(url: str) -> str: url (str): The current url. """ - if url.startswith("https://") or url.startswith("http://"): + if url.startswith(("https://", "http://")): return url return f"https://{url}" diff --git a/runway/module/terraform.py b/runway/module/terraform.py index 2ff6f8569..1def347fe 100644 --- a/runway/module/terraform.py +++ b/runway/module/terraform.py @@ -8,7 +8,7 @@ import subprocess import sys from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, cast import hcl from send2trash import send2trash @@ -34,7 +34,7 @@ LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) -def gen_workspace_tfvars_files(environment: str, region: str) -> List[str]: +def gen_workspace_tfvars_files(environment: str, region: str) -> list[str]: """Generate possible Terraform workspace tfvars filenames.""" return [ # Give preference to explicit environment-region files @@ -45,16 +45,15 @@ def gen_workspace_tfvars_files(environment: str, region: str) -> List[str]: def update_env_vars_with_tf_var_values( - os_env_vars: Dict[str, str], - tf_vars: Dict[str, Union[Dict[str, Any], List[Any], str]], -) -> Dict[str, str]: + os_env_vars: dict[str, str], + tf_vars: dict[str, dict[str, Any] | list[Any] | str], +) -> dict[str, str]: """Return os_env_vars with TF_VAR values for each tf_var.""" # https://www.terraform.io/docs/commands/environment-variables.html#tf_var_name for key, val in tf_vars.items(): if isinstance(val, dict): value = ", ".join( - nestedkey + ' = "' + str(nestedval) + '"' - for (nestedkey, nestedval) in val.items() + nestedkey + ' = "' + str(nestedval) + '"' for (nestedkey, nestedval) in val.items() ) os_env_vars[f"TF_VAR_{key}"] = f"{{ {value} }}" elif isinstance(val, list): @@ -64,6 +63,193 @@ def update_env_vars_with_tf_var_values( return os_env_vars +class TerraformBackendConfig(ModuleOptions): + """Terraform backend configuration module options.""" + + def __init__( + self, + data: RunwayTerraformBackendConfigDataModel, + deploy_environment: DeployEnvironment, + path: Path, + ) -> None: + """Instantiate class. + + Args: + data: Options parsed into a data model. + deploy_environment: Current deploy environment. + path: Module path. + + """ + self.bucket = data.bucket + self.data = data + self.dynamodb_table = data.dynamodb_table + self.env = deploy_environment + self.path = path + if data and not data.region: + data.region = deploy_environment.aws_region # default to region from env + self.region = data.region + + @cached_property + def config_file(self) -> Path | None: + """Backend configuration file.""" + return self.get_backend_file(self.path, self.env.name, self.env.aws_region) + + @cached_property + def init_args(self) -> list[str]: + """Return command line arguments for init.""" + result: list[str] = [] + for k, v in self.data.model_dump(exclude_none=True).items(): + result.extend(["-backend-config", f"{k}={v}"]) + if not result: + if self.config_file: + LOGGER.verbose("using backend config file: %s", self.config_file.name) + return [f"-backend-config={self.config_file.name}"] + LOGGER.info( + "backend file not found -- looking for one of: %s", + ", ".join(self.gen_backend_filenames(self.env.name, self.env.aws_region)), + ) + return [] + LOGGER.info("using backend values from runway.yml") + LOGGER.debug("provided backend values: %s", json.dumps(result)) + return result + + def get_full_configuration(self) -> dict[str, str]: + """Get full backend configuration.""" + if not self.config_file: + return self.data.model_dump(exclude_none=True) + result = cast(dict[str, str], hcl.loads(self.config_file.read_text())) + result.update(self.data.model_dump(exclude_none=True)) + return result + + @classmethod + def get_backend_file(cls, path: Path, environment: str, region: str) -> Path | None: + """Determine Terraform backend file. + + Args: + path: Path to the module. + environment: Current deploy environment. + region: Current AWS region. + + """ + backend_filenames = cls.gen_backend_filenames(environment, region) + for name in backend_filenames: + test_path = path / name + if test_path.is_file(): + return test_path + return None + + @staticmethod + def gen_backend_filenames(environment: str, region: str) -> list[str]: + """Generate possible Terraform backend filenames. + + Args: + environment: Current deploy environment. + region : Current AWS region. + + """ + formats = [ + "backend-{environment}-{region}.{extension}", + "backend-{environment}.{extension}", + "backend-{region}.{extension}", + "backend.{extension}", + ] + result: list[str] = [] + for fmt in formats: + for ext in ["hcl", "tfvars"]: + result.append( # noqa: PERF401 + fmt.format(environment=environment, extension=ext, region=region) + ) + return result + + @classmethod + def parse_obj( + cls, + deploy_environment: DeployEnvironment, + obj: object, + path: Path | None = None, + ) -> TerraformBackendConfig: + """Parse options definition and return an options object. + + Args: + deploy_environment: Current deploy environment. + obj: Object to parse. + path: Module path. + + """ + return cls( + data=RunwayTerraformBackendConfigDataModel.model_validate(obj), + deploy_environment=deploy_environment, + path=path or Path.cwd(), + ) + + +class TerraformOptions(ModuleOptions): + """Module options for Terraform. + + Attributes: + args: CLI arguments/options to pass to Terraform. + data: Options parsed into a data model. + env: Current deploy environment. + path: Module path. + version: String containing a Terraform version. + write_auto_tfvars: Optionally write parameters to a tfvars file instead + of updating variables. + + """ + + def __init__( + self, + data: RunwayTerraformModuleOptionsDataModel, + deploy_environment: DeployEnvironment, + path: Path | None = None, + ) -> None: + """Instantiate class. + + Args: + deploy_environment: Current deploy environment. + data: Options parsed into a data model. + path: Module path. + + """ + self.args = data.args + self.data = data + self.env = deploy_environment + self.path = path or Path.cwd() + self.version = data.version + self.workspace = data.workspace or deploy_environment.name + self.write_auto_tfvars = data.write_auto_tfvars + + @cached_property + def backend_config(self) -> TerraformBackendConfig: + """Backend configuration options.""" + return TerraformBackendConfig.parse_obj( + deploy_environment=self.env, + obj=self.data.backend_config or {}, + path=self.path, + ) + + @classmethod + def parse_obj( + cls, + deploy_environment: DeployEnvironment, + obj: object, + path: Path | None = None, + ) -> TerraformOptions: + """Parse options definition and return an options object. + + Args: + deploy_environment: Current deploy environment. + obj: Object to parse. + path: Module path. + + """ + return cls( + data=RunwayTerraformModuleOptionsDataModel.model_validate(obj), + deploy_environment=deploy_environment, + path=path or Path.cwd(), + ) + + TerraformActionTypeDef = Literal[ "apply", "destroy", @@ -77,21 +263,19 @@ def update_env_vars_with_tf_var_values( ] -class Terraform(RunwayModule, DelCachedPropMixin): +class Terraform(RunwayModule[TerraformOptions], DelCachedPropMixin): """Terraform Runway Module.""" - options: TerraformOptions - def __init__( self, context: RunwayContext, *, - explicitly_enabled: Optional[bool] = False, + explicitly_enabled: bool | None = False, logger: RunwayLogger = LOGGER, module_root: Path, - name: Optional[str] = None, - options: Optional[Union[Dict[str, Any], ModuleOptions]] = None, - parameters: Optional[Dict[str, Any]] = None, + name: str | None = None, + options: dict[str, Any] | ModuleOptions | None = None, + parameters: dict[str, Any] | None = None, **_: Any, ) -> None: """Instantiate class. @@ -150,12 +334,10 @@ def current_workspace(self) -> str: return self.terraform_workspace_show() @cached_property - def env_file(self) -> List[str]: + def env_file(self) -> list[str]: """Find the environment file for the module.""" - result: List[str] = [] - for name in gen_workspace_tfvars_files( - self.ctx.env.name, self.ctx.env.aws_region - ): + result: list[str] = [] + for name in gen_workspace_tfvars_files(self.ctx.env.name, self.ctx.env.aws_region): test_path = self.path / name if test_path.is_file(): result.append("-var-file=" + test_path.name) @@ -170,9 +352,7 @@ def skip(self) -> bool: self.logger.info( "skipped; tfvars file for this environment/region not found " "and no parameters provided -- looking for one of: %s", - ", ".join( - gen_workspace_tfvars_files(self.ctx.env.name, self.ctx.env.aws_region) - ), + ", ".join(gen_workspace_tfvars_files(self.ctx.env.name, self.ctx.env.aws_region)), ) return True @@ -188,14 +368,10 @@ def tf_bin(self) -> str: return self.tfenv.install(self.options.version) except ValueError: self.logger.debug("terraform install failed", exc_info=True) - self.logger.verbose( - "terraform version not specified; resorting to global install" - ) + self.logger.verbose("terraform version not specified; resorting to global install") if which("terraform"): return "terraform" - self.logger.error( - "terraform not available and a version to install not specified" - ) + self.logger.error("terraform not available and a version to install not specified") self.logger.error( "learn how to use Runway to manage Terraform versions at " "%s/page/terraform/advanced_features.html#version-management", @@ -231,8 +407,7 @@ def cleanup_dot_terraform(self) -> None: return self.logger.verbose( - ".terraform directory exists from a previous run; " - "removing some of its contents" + ".terraform directory exists from a previous run; removing some of its contents" ) for child in dot_terraform.iterdir(): if child.name == "plugins" and child.is_dir(): @@ -250,15 +425,14 @@ def destroy(self) -> None: self.run("destroy") def gen_command( - self, - command: Union[List[str], str, Tuple[str, ...]], - args_list: Optional[List[str]] = None, - ) -> List[str]: + self, command: list[str] | str | tuple[str, ...], args_list: list[str] | None = None + ) -> list[str]: """Generate Terraform command.""" - if isinstance(command, (list, tuple)): - cmd = [self.tf_bin, *command] - else: - cmd = [self.tf_bin, command] + cmd = ( + [self.tf_bin, *command] + if isinstance(command, (list, tuple)) + else [self.tf_bin, command] + ) cmd.extend(args_list or []) if self.ctx.no_color: cmd.append("-no-color") @@ -273,8 +447,7 @@ def handle_backend(self) -> None: """ if not self.tfenv.backend["type"]: self.logger.info( - "unable to determine backend for module; no special handling " - "will be applied" + "unable to determine backend for module; no special handling will be applied" ) return handler = f"_{self.tfenv.backend['type']}_backend_handler" @@ -282,12 +455,8 @@ def handle_backend(self) -> None: self.tfenv.backend["config"].update( self.options.backend_config.get_full_configuration() ) - self.logger.debug( - "full backend config: %s", json.dumps(self.tfenv.backend["config"]) - ) - self.logger.verbose( - "handling use of backend config: %s", self.tfenv.backend["type"] - ) + self.logger.debug("full backend config: %s", json.dumps(self.tfenv.backend["config"])) + self.logger.verbose("handling use of backend config: %s", self.tfenv.backend["type"]) self[f"_{self.tfenv.backend['type']}_backend_handler"]() else: self.logger.verbose( @@ -313,9 +482,7 @@ def _remote_backend_handler(self) -> None: self.options.write_auto_tfvars = True if self.tfenv.backend["config"]["workspaces"].get("prefix"): - self.logger.verbose( - "handling use of backend config: remote.workspaces.prefix" - ) + self.logger.verbose("handling use of backend config: remote.workspaces.prefix") self.ctx.env.vars.update({"TF_WORKSPACE": self.ctx.env.name}) self.logger.verbose( 'set environment variable "TF_WORKSPACE" to avoid prompt ' @@ -323,9 +490,7 @@ def _remote_backend_handler(self) -> None: ) if self.tfenv.backend["config"]["workspaces"].get("name"): - self.logger.verbose( - "handling use of backend config: remote.workspaces.name" - ) + self.logger.verbose("handling use of backend config: remote.workspaces.name") # this can't be set or it will cause errors self.ctx.env.vars.pop("TF_WORKSPACE", None) self.required_workspace = "default" @@ -343,9 +508,7 @@ def handle_parameters(self) -> None: if self.auto_tfvars.exists(): return - self.ctx.env.vars = update_env_vars_with_tf_var_values( - self.ctx.env.vars, self.parameters - ) + self.ctx.env.vars = update_env_vars_with_tf_var_values(self.ctx.env.vars, self.parameters) def init(self) -> None: """Run init.""" @@ -391,7 +554,7 @@ def _terraform_destroy_12(self) -> None: """ return run_module_command( - self.gen_command("destroy", ["-auto-approve"] + self.env_file), + self.gen_command("destroy", ["-auto-approve", *self.env_file]), env_vars=self.ctx.env.vars, logger=self.logger, ) @@ -403,7 +566,7 @@ def _terraform_destroy_15_2(self) -> None: """ return run_module_command( - self.gen_command("apply", ["-destroy", "-auto-approve"] + self.env_file), + self.gen_command("apply", ["-destroy", "-auto-approve", *self.env_file]), env_vars=self.ctx.env.vars, logger=self.logger, ) @@ -415,7 +578,7 @@ def _terraform_destroy_legacy(self) -> None: """ return run_module_command( - self.gen_command("destroy", ["-force"] + self.env_file), + self.gen_command("destroy", ["-force", *self.env_file]), env_vars=self.ctx.env.vars, logger=self.logger, ) @@ -441,9 +604,7 @@ def terraform_init(self) -> None: """ cmd = self.gen_command( "init", - ["-reconfigure"] - + self.options.backend_config.init_args - + self.options.args.init, + ["-reconfigure", *self.options.backend_config.init_args, *self.options.args.init], ) try: run_module_command( @@ -533,9 +694,7 @@ def terraform_workspace_show(self) -> str: """ self.logger.debug("using Terraform to get the current workspace") workspace = ( - subprocess.check_output( - self.gen_command(["workspace", "show"]), env=self.ctx.env.vars - ) + subprocess.check_output(self.gen_command(["workspace", "show"]), env=self.ctx.env.vars) .strip() .decode() ) @@ -553,7 +712,7 @@ def run(self, action: TerraformActionTypeDef) -> None: self.logger.info("init (in progress)") self.terraform_init() if self.current_workspace != self.required_workspace: - if re.compile(f"^[*\\s]\\s{self.required_workspace}$", re.M).search( + if re.compile(f"^[*\\s]\\s{self.required_workspace}$", re.MULTILINE).search( self.terraform_workspace_list() ): self.terraform_workspace_select(self.required_workspace) @@ -570,194 +729,3 @@ def run(self, action: TerraformActionTypeDef) -> None: finally: if self.auto_tfvars.exists(): self.auto_tfvars.unlink() - - -class TerraformOptions(ModuleOptions): - """Module options for Terraform. - - Attributes: - args: CLI arguments/options to pass to Terraform. - data: Options parsed into a data model. - env: Current deploy environment. - path: Module path. - version: String containing a Terraform version. - write_auto_tfvars: Optionally write parameters to a tfvars file instead - of updating variables. - - """ - - def __init__( - self, - data: RunwayTerraformModuleOptionsDataModel, - deploy_environment: DeployEnvironment, - path: Optional[Path] = None, - ) -> None: - """Instantiate class. - - Args: - deploy_environment: Current deploy environment. - data: Options parsed into a data model. - path: Module path. - - """ - self.args = data.args - self.data = data - self.env = deploy_environment - self.path = path or Path.cwd() - self.version = data.version - self.workspace = data.workspace or deploy_environment.name - self.write_auto_tfvars = data.write_auto_tfvars - - @cached_property - def backend_config(self) -> TerraformBackendConfig: - """Backend configuration options.""" - return TerraformBackendConfig.parse_obj( - deploy_environment=self.env, - obj=self.data.backend_config or {}, - path=self.path, - ) - - @classmethod - def parse_obj( - cls, - deploy_environment: DeployEnvironment, - obj: object, - path: Optional[Path] = None, - ) -> TerraformOptions: - """Parse options definition and return an options object. - - Args: - deploy_environment: Current deploy environment. - obj: Object to parse. - path: Module path. - - """ - return cls( - data=RunwayTerraformModuleOptionsDataModel.parse_obj(obj), - deploy_environment=deploy_environment, - path=path or Path.cwd(), - ) - - -class TerraformBackendConfig(ModuleOptions): - """Terraform backend configuration module options.""" - - def __init__( - self, - data: RunwayTerraformBackendConfigDataModel, - deploy_environment: DeployEnvironment, - path: Path, - ) -> None: - """Instantiate class. - - Args: - data: Options parsed into a data model. - deploy_environment: Current deploy environment. - path: Module path. - - """ - self.bucket = data.bucket - self.data = data - self.dynamodb_table = data.dynamodb_table - self.env = deploy_environment - self.path = path - if data and not data.region: - data.region = deploy_environment.aws_region # default to region from env - self.region = data.region - - @cached_property - def config_file(self) -> Optional[Path]: - """Backend configuration file.""" - return self.get_backend_file(self.path, self.env.name, self.env.aws_region) - - @cached_property - def init_args(self) -> List[str]: - """Return command line arguments for init.""" - result: List[str] = [] - for k, v in self.data.dict(exclude_none=True).items(): - result.extend(["-backend-config", f"{k}={v}"]) - if not result: - if self.config_file: - LOGGER.verbose("using backend config file: %s", self.config_file.name) - return [f"-backend-config={self.config_file.name}"] - LOGGER.info( - "backend file not found -- looking for one of: %s", - ", ".join( - self.gen_backend_filenames(self.env.name, self.env.aws_region) - ), - ) - return [] - LOGGER.info("using backend values from runway.yml") - LOGGER.debug("provided backend values: %s", json.dumps(result)) - return result - - def get_full_configuration(self) -> Dict[str, str]: - """Get full backend configuration.""" - if not self.config_file: - return self.data.dict(exclude_none=True) - result = cast(Dict[str, str], hcl.loads(self.config_file.read_text())) - result.update(self.data.dict(exclude_none=True)) - return result - - @classmethod - def get_backend_file( - cls, path: Path, environment: str, region: str - ) -> Optional[Path]: - """Determine Terraform backend file. - - Args: - path: Path to the module. - environment: Current deploy environment. - region: Current AWS region. - - """ - backend_filenames = cls.gen_backend_filenames(environment, region) - for name in backend_filenames: - test_path = path / name - if test_path.is_file(): - return test_path - return None - - @staticmethod - def gen_backend_filenames(environment: str, region: str) -> List[str]: - """Generate possible Terraform backend filenames. - - Args: - environment: Current deploy environment. - region : Current AWS region. - - """ - formats = [ - "backend-{environment}-{region}.{extension}", - "backend-{environment}.{extension}", - "backend-{region}.{extension}", - "backend.{extension}", - ] - result: List[str] = [] - for fmt in formats: - for ext in ["hcl", "tfvars"]: - result.append( - fmt.format(environment=environment, extension=ext, region=region) - ) - return result - - @classmethod - def parse_obj( - cls, - deploy_environment: DeployEnvironment, - obj: object, - path: Optional[Path] = None, - ) -> TerraformBackendConfig: - """Parse options definition and return an options object. - - Args: - deploy_environment: Current deploy environment. - obj: Object to parse. - path: Module path. - - """ - return cls( - data=RunwayTerraformBackendConfigDataModel.parse_obj(obj), - deploy_environment=deploy_environment, - path=path or Path.cwd(), - ) diff --git a/runway/module/utils.py b/runway/module/utils.py index f80a813e5..d686c5217 100644 --- a/runway/module/utils.py +++ b/runway/module/utils.py @@ -7,12 +7,12 @@ import platform import subprocess import sys -from pathlib import Path -from typing import TYPE_CHECKING, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, cast from ..utils import which if TYPE_CHECKING: + from pathlib import Path from typing import Any from .._logging import RunwayLogger @@ -22,24 +22,21 @@ NPX_BIN = "npx.cmd" if platform.system().lower() == "windows" else "npx" -def format_npm_command_for_logging(command: List[str]) -> str: +def format_npm_command_for_logging(command: list[str]) -> str: """Convert npm command list to string for display to user.""" - if platform.system().lower() == "windows" and ( - command[0] == "npx.cmd" and command[1] == "-c" - ): + if platform.system().lower() == "windows" and (command[0] == "npx.cmd" and command[1] == "-c"): return f'npx.cmd -c "{" ".join(command[2:])}"' return " ".join(command) -# type hint quoted b/c pylint 2.11.1 raises unsubscriptable-object def generate_node_command( command: str, - command_opts: List[str], + command_opts: list[str], path: Path, *, - logger: Union[logging.Logger, "logging.LoggerAdapter[Any]"] = LOGGER, - package: Optional[str] = None, -) -> List[str]: + logger: logging.Logger | logging.LoggerAdapter[Any] = LOGGER, + package: str | None = None, +) -> list[str]: """Return node bin command list for subprocess execution. Args: @@ -75,12 +72,11 @@ def generate_node_command( return cmd_list -# type hint b/c pylint 2.11.1 raises unsubscriptable-object def run_module_command( - cmd_list: List[str], - env_vars: Dict[str, str], + cmd_list: list[str], + env_vars: dict[str, str], exit_on_error: bool = True, - logger: Union[logging.Logger, "logging.LoggerAdapter[Any]"] = LOGGER, + logger: logging.Logger | logging.LoggerAdapter[Any] = LOGGER, ) -> None: """Shell out to provisioner command. @@ -107,12 +103,9 @@ def run_module_command( def use_npm_ci(path: Path) -> bool: """Return true if npm ci should be used in lieu of npm install.""" # https://docs.npmjs.com/cli/ci#description - with open(os.devnull, "w", encoding="utf-8") as fnull: + with open(os.devnull, "w", encoding="utf-8") as fnull: # noqa: PTH123 if ( - (path / "package-lock.json").is_file() - or (path / "npm-shrinkwrap.json").is_file() - ) and subprocess.call( - [NPM_BIN, "ci", "-h"], stdout=fnull, stderr=subprocess.STDOUT - ) == 0: + (path / "package-lock.json").is_file() or (path / "npm-shrinkwrap.json").is_file() + ) and subprocess.call([NPM_BIN, "ci", "-h"], stdout=fnull, stderr=subprocess.STDOUT) == 0: return True return False diff --git a/runway/s3_utils.py b/runway/s3_utils.py index e0a72abd1..ef84b1998 100644 --- a/runway/s3_utils.py +++ b/runway/s3_utils.py @@ -6,39 +6,41 @@ import os import tempfile import zipfile -from typing import TYPE_CHECKING, Any, Dict, Iterator, Optional, Sequence, cast +from pathlib import Path +from typing import TYPE_CHECKING, Any, cast import boto3 from botocore.exceptions import ClientError if TYPE_CHECKING: + from collections.abc import Iterator, Sequence + from mypy_boto3_s3.client import S3Client from mypy_boto3_s3.service_resource import S3ServiceResource - from mypy_boto3_s3.type_defs import ObjectTypeDef + from mypy_boto3_s3.type_defs import ( + ListObjectsV2RequestListObjectsV2PaginateTypeDef, + ObjectTypeDef, + ) from ._logging import RunwayLogger LOGGER = cast("RunwayLogger", logging.getLogger(__name__)) -def _get_client( - session: Optional[boto3.Session] = None, region: Optional[str] = None -) -> S3Client: +def _get_client(session: boto3.Session | None = None, region: str | None = None) -> S3Client: """Get S3 boto client.""" return session.client("s3") if session else boto3.client("s3", region_name=region) def _get_resource( - session: Optional[boto3.Session] = None, region: Optional[str] = None + session: boto3.Session | None = None, region: str | None = None ) -> S3ServiceResource: """Get S3 boto resource.""" - return ( - session.resource("s3") if session else boto3.resource("s3", region_name=region) - ) + return session.resource("s3") if session else boto3.resource("s3", region_name=region) def purge_and_delete_bucket( - bucket_name: str, region: str = "us-east-1", session: Optional[boto3.Session] = None + bucket_name: str, region: str = "us-east-1", session: boto3.Session | None = None ) -> None: """Delete all objects and versions in bucket, then delete bucket.""" purge_bucket(bucket_name, region, session) @@ -46,7 +48,7 @@ def purge_and_delete_bucket( def purge_bucket( - bucket_name: str, region: str = "us-east-1", session: Optional[boto3.Session] = None + bucket_name: str, region: str = "us-east-1", session: boto3.Session | None = None ) -> None: """Delete all objects and versions in bucket.""" if does_bucket_exist(bucket_name, region, session): @@ -58,7 +60,7 @@ def purge_bucket( def delete_bucket( - bucket_name: str, region: str = "us-east-1", session: Optional[boto3.Session] = None + bucket_name: str, region: str = "us-east-1", session: boto3.Session | None = None ) -> None: """Delete bucket.""" if does_bucket_exist(bucket_name, region, session): @@ -72,7 +74,7 @@ def delete_bucket( def does_bucket_exist( - bucket_name: str, region: str = "us-east-1", session: Optional[boto3.Session] = None + bucket_name: str, region: str = "us-east-1", session: boto3.Session | None = None ) -> bool: """Check if bucket exists in S3.""" s3_resource = _get_resource(session, region) @@ -84,26 +86,22 @@ def does_bucket_exist( LOGGER.info('bucket "%s" does not exist', bucket_name) return False if exc.response["Error"]["Message"] == "Forbidden": - LOGGER.exception( - 'access denied for bucket "%s" (permissions?)', bucket_name - ) + LOGGER.exception('access denied for bucket "%s" (permissions?)', bucket_name) raise return False def ensure_bucket_exists( - bucket_name: str, region: str = "us-east-1", session: Optional[boto3.Session] = None + bucket_name: str, region: str = "us-east-1", session: boto3.Session | None = None ) -> None: """Ensure S3 bucket exists.""" if not does_bucket_exist(bucket_name, region, session): LOGGER.info('creating bucket "%s" (in progress)', bucket_name) s3_client = _get_client(session, region) if region == "us-east-1": - create_bucket_opts: Dict[str, Any] = {} + create_bucket_opts: dict[str, Any] = {} else: - create_bucket_opts = { - "CreateBucketConfiguration": {"LocationConstraint": region} - } + create_bucket_opts = {"CreateBucketConfiguration": {"LocationConstraint": region}} s3_client.create_bucket(Bucket=bucket_name, **create_bucket_opts) # sometimes when creating the bucket it can take a few moments before @@ -116,9 +114,7 @@ def ensure_bucket_exists( s3_client.put_bucket_encryption( Bucket=bucket_name, ServerSideEncryptionConfiguration={ - "Rules": [ - {"ApplyServerSideEncryptionByDefault": {"SSEAlgorithm": "AES256"}} - ] + "Rules": [{"ApplyServerSideEncryptionByDefault": {"SSEAlgorithm": "AES256"}}] }, ) LOGGER.verbose('enabled encryption for bucket "%s"', bucket_name) @@ -127,7 +123,7 @@ def ensure_bucket_exists( def does_s3_object_exist( bucket: str, key: str, - session: Optional[boto3.Session] = None, + session: boto3.Session | None = None, region: str = "us-east-1", ) -> bool: """Determine if object exists on s3.""" @@ -143,18 +139,14 @@ def does_s3_object_exist( return True -def upload( - bucket: str, key: str, filename: str, session: Optional[boto3.Session] = None -) -> None: +def upload(bucket: str, key: str, filename: str, session: boto3.Session | None = None) -> None: """Upload file to S3 bucket.""" s3_client = _get_client(session) LOGGER.info("uploading %s to s3://%s/%s...", filename, bucket, key) s3_client.upload_file(Filename=filename, Bucket=bucket, Key=key) -def download( - bucket: str, key: str, file_path: str, session: Optional[boto3.Session] = None -) -> str: +def download(bucket: str, key: str, file_path: str, session: boto3.Session | None = None) -> str: """Download a file from S3 to the given path.""" s3_client = _get_client(session) @@ -164,7 +156,7 @@ def download( def download_and_extract_to_mkdtemp( - bucket: str, key: str, session: Optional[boto3.Session] = None + bucket: str, key: str, session: boto3.Session | None = None ) -> str: """Download zip archive and extract it to temporary directory.""" filedes, temp_file = tempfile.mkstemp() @@ -174,7 +166,7 @@ def download_and_extract_to_mkdtemp( output_dir = tempfile.mkdtemp() with zipfile.ZipFile(temp_file, "r") as zip_ref: zip_ref.extractall(output_dir) - os.remove(temp_file) + Path(temp_file).unlink() LOGGER.verbose("extracted %s to %s", temp_file, output_dir) return output_dir @@ -183,7 +175,7 @@ def get_matching_s3_objects( bucket: str, prefix: Sequence[str] = "", suffix: str = "", - session: Optional[boto3.Session] = None, + session: boto3.Session | None = None, ) -> Iterator[ObjectTypeDef]: """Generate objects in an S3 bucket. @@ -199,7 +191,7 @@ def get_matching_s3_objects( s3_client = _get_client(session) paginator = s3_client.get_paginator("list_objects_v2") - kwargs = {"Bucket": bucket} + kwargs: ListObjectsV2RequestListObjectsV2PaginateTypeDef = {"Bucket": bucket} # We can pass the prefix directly to the S3 API. If the user has passed # a tuple or list of prefixes, we go through them one by one. @@ -208,12 +200,7 @@ def get_matching_s3_objects( kwargs["Prefix"] = key_prefix for page in paginator.paginate(**kwargs): - try: - contents = page["Contents"] - except KeyError: - return - - for obj in contents: + for obj in page.get("Contents", []): if "Key" in obj and obj["Key"].endswith(suffix): yield obj @@ -222,7 +209,7 @@ def get_matching_s3_keys( bucket: str, prefix: str = "", suffix: str = "", - session: Optional[boto3.Session] = None, + session: boto3.Session | None = None, ) -> Iterator[str]: """Generate the keys in an S3 bucket. diff --git a/runway/sources/git.py b/runway/sources/git.py index 4c287d1b4..c39b69a2e 100644 --- a/runway/sources/git.py +++ b/runway/sources/git.py @@ -1,11 +1,13 @@ """'Git type Path Source.""" +from __future__ import annotations + import logging import shutil import subprocess import tempfile from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any from .source import Source @@ -23,7 +25,7 @@ class Git(Source): def __init__( self, *, - arguments: Optional[Dict[str, str]] = None, + arguments: dict[str, str] | None = None, location: str = "", uri: str = "", **kwargs: Any, @@ -38,6 +40,7 @@ def __init__( module resides. Leaving this as an empty string, ``/``, or ``./`` will have runway look in the root folder. uri: The uniform resource identifier that targets the remote git repository + **kwargs: Arbitrary keyword arguments. """ self.args = arguments or {} @@ -48,7 +51,7 @@ def __init__( def fetch(self) -> Path: """Retrieve the git repository from it's remote location.""" - from git.repo import Repo # pylint: disable=import-outside-toplevel + from git.repo import Repo ref = self.__determine_git_ref() dir_name = "_".join([self.sanitize_git_path(self.uri), ref]) @@ -91,9 +94,7 @@ def __determine_git_ls_remote_ref(self) -> str: def __determine_git_ref(self) -> str: """Determine the git reference code.""" - ref_config_keys = sum( - bool(self.args.get(i)) for i in ["commit", "tag", "branch"] - ) + ref_config_keys = sum(bool(self.args.get(i)) for i in ["commit", "tag", "branch"]) if ref_config_keys > 1: raise ValueError( "Fetching remote git sources failed: conflicting revisions " diff --git a/runway/sources/source.py b/runway/sources/source.py index 06daf757a..4e5c520b4 100644 --- a/runway/sources/source.py +++ b/runway/sources/source.py @@ -5,9 +5,11 @@ """ +from __future__ import annotations + import logging from pathlib import Path -from typing import Any, Union +from typing import Any LOGGER = logging.getLogger(__name__) @@ -29,12 +31,13 @@ class Source: cache_dir: Path - def __init__(self, *, cache_dir: Union[Path, str], **_: Any): + def __init__(self, *, cache_dir: Path | str, **_: Any) -> None: """Source. Args: cache_dir: The directory where the given remote resource should be cached. + **kwargs: Arbitrary keyword arguments. """ self.cache_dir = cache_dir if isinstance(cache_dir, Path) else Path(cache_dir) diff --git a/runway/templates/.flake8 b/runway/templates/.flake8 deleted file mode 100644 index ed384d4ba..000000000 --- a/runway/templates/.flake8 +++ /dev/null @@ -1,4 +0,0 @@ -[flake8] -exclude = - node_modules, - .serverless diff --git a/runway/templates/.pylintrc b/runway/templates/.pylintrc deleted file mode 100644 index 0b852af53..000000000 --- a/runway/templates/.pylintrc +++ /dev/null @@ -1,549 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -extension-pkg-whitelist= - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. -jobs=1 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=line-too-long, - import-error, - unused-import, - print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - invalid-unicode-literal, - raw-checker-failed, - bad-inline-option, - locally-disabled, - locally-enabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - deprecated-string-function, - deprecated-str-translate-call, - deprecated-itertools-function, - deprecated-types-field, - next-method-defined, - dict-items-not-iterating, - dict-keys-not-iterating, - dict-values-not-iterating, - deprecated-operator-function, - deprecated-urllib-function, - xreadlines-attribute, - deprecated-sys-function, - exception-escape, - comprehension-escape - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio).You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=optparse.Values,sys.exit - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[BASIC] - -# Naming style matching correct argument names -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style -#argument-rgx= - -# Naming style matching correct attribute names -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Naming style matching correct class attribute names -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style -#class-attribute-rgx= - -# Naming style matching correct class names -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming-style -#class-rgx= - -# Naming style matching correct constant names -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma -good-names=i, - j, - k, - ex, - Run, - _ - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# Naming style matching correct inline iteration names -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style -#inlinevar-rgx= - -# Naming style matching correct method names -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style -#method-rgx= - -# Naming style matching correct module names -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style -#variable-rgx= - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,io,builtins - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of statements in function / method body -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff --git a/runway/templates/cdk-csharp/README.md b/runway/templates/cdk-csharp/README.md index b1fdeb70b..10160c5c5 100644 --- a/runway/templates/cdk-csharp/README.md +++ b/runway/templates/cdk-csharp/README.md @@ -1,4 +1,3 @@ - # CDK HelloWorld Welcome to your CDK .NET project! @@ -8,13 +7,13 @@ a stack (`HelloStack`) which also uses a user-defined construct (`HelloConstruct The `cdk.json` file tells the CDK Toolkit how to execute your app. It uses the `dotnet` CLI to do this. -# Useful commands +## Useful commands -* `dotnet build src` compile this app -* `cdk ls` list all stacks in the app -* `cdk synth` emits the synthesized CloudFormation template -* `cdk deploy` deploy this stack to your default AWS account/region -* `cdk diff` compare deployed stack with current state -* `cdk docs` open CDK documentation +- `dotnet build src` compile this app +- `cdk ls` list all stacks in the app +- `cdk synth` emits the synthesized CloudFormation template +- `cdk deploy` deploy this stack to your default AWS account/region +- `cdk diff` compare deployed stack with current state +- `cdk docs` open CDK documentation Enjoy! diff --git a/runway/templates/cdk-csharp/package.json b/runway/templates/cdk-csharp/package.json index a1dbc2e70..39a11b1ae 100644 --- a/runway/templates/cdk-csharp/package.json +++ b/runway/templates/cdk-csharp/package.json @@ -1,12 +1,12 @@ { - "name": "sampleapp", - "version": "1.0.0", - "description": "Sample CDK Project", - "author": "Onica", - "homepage": "https://www.onica.com", - "private": true, - "main": "app.py", - "devDependencies": { - "aws-cdk": "^0.27.0" - } + "author": "Onica", + "description": "Sample CDK Project", + "devDependencies": { + "aws-cdk": "^0.27.0" + }, + "homepage": "https://www.onica.com", + "main": "app.py", + "name": "sampleapp", + "private": true, + "version": "1.0.0" } diff --git a/runway/templates/cdk-csharp/runway.module.yml b/runway/templates/cdk-csharp/runway.module.yml index 568f9bd3f..7fac5323c 100644 --- a/runway/templates/cdk-csharp/runway.module.yml +++ b/runway/templates/cdk-csharp/runway.module.yml @@ -1,4 +1,3 @@ ---- options: build_steps: - dotnet build src diff --git a/runway/templates/cdk-py/__init__.py b/runway/templates/cdk-py/__init__.py new file mode 100644 index 000000000..0349622bd --- /dev/null +++ b/runway/templates/cdk-py/__init__.py @@ -0,0 +1 @@ +"""PLACEHOLDER.""" diff --git a/runway/templates/cdk-py/app.py b/runway/templates/cdk-py/app.py index febe08d8e..e4582376b 100644 --- a/runway/templates/cdk-py/app.py +++ b/runway/templates/cdk-py/app.py @@ -1,5 +1,6 @@ -#!/usr/bin/env python3 """Sample app.""" + +# ruff: noqa from aws_cdk import core from hello.hello_stack import MyStack diff --git a/runway/templates/cdk-py/hello/hello_construct.py b/runway/templates/cdk-py/hello/hello_construct.py index cf2e5ff49..709d59696 100644 --- a/runway/templates/cdk-py/hello/hello_construct.py +++ b/runway/templates/cdk-py/hello/hello_construct.py @@ -1,5 +1,6 @@ """Sample app.""" +# ruff: noqa from aws_cdk import aws_iam as iam from aws_cdk import aws_s3 as s3 from aws_cdk import core @@ -17,7 +18,7 @@ def __init__(self, scope: core.Construct, id: str, num_buckets: int) -> None: """Instantiate class.""" super().__init__(scope, id) self._buckets = [] - for i in range(0, num_buckets): + for i in range(num_buckets): self._buckets.append(s3.Bucket(self, f"Bucket-{i}")) def grant_read(self, principal: iam.IPrincipal): diff --git a/runway/templates/cdk-py/hello/hello_stack.py b/runway/templates/cdk-py/hello/hello_stack.py index e90281fba..d970a8000 100644 --- a/runway/templates/cdk-py/hello/hello_stack.py +++ b/runway/templates/cdk-py/hello/hello_stack.py @@ -1,5 +1,6 @@ """Hello stack.""" +# ruff: noqa from aws_cdk import aws_iam as iam from aws_cdk import aws_sns as sns from aws_cdk import aws_sns_subscriptions as subs @@ -16,9 +17,7 @@ def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: """Instantiate class.""" super().__init__(scope, id, **kwargs) - queue = sqs.Queue( - self, "MyFirstQueue", visibility_timeout=core.Duration.seconds(300) - ) + queue = sqs.Queue(self, "MyFirstQueue", visibility_timeout=core.Duration.seconds(300)) topic = sns.Topic(self, "MyFirstTopic", display_name="My First Topic") diff --git a/runway/templates/cdk-py/package.json b/runway/templates/cdk-py/package.json index 25b829b2f..09eff9f88 100644 --- a/runway/templates/cdk-py/package.json +++ b/runway/templates/cdk-py/package.json @@ -1,12 +1,12 @@ { - "name": "sampleapp", - "version": "1.0.0", - "description": "Sample CDK Project", "author": "Onica", - "homepage": "https://www.onica.com", - "private": true, - "main": "app.py", + "description": "Sample CDK Project", "devDependencies": { "aws-cdk": "^1.13.0" - } + }, + "homepage": "https://www.onica.com", + "main": "app.py", + "name": "sampleapp", + "private": true, + "version": "1.0.0" } diff --git a/runway/templates/cdk-py/pyproject.toml b/runway/templates/cdk-py/pyproject.toml index f92603924..fdced52e0 100644 --- a/runway/templates/cdk-py/pyproject.toml +++ b/runway/templates/cdk-py/pyproject.toml @@ -1,21 +1,21 @@ [tool.poetry] name = "runway-sample-cdk-py" version = "0.0.0" -description = "Runway Sample" authors = ["Onica Group LLC "] +description = "Runway Sample" license = "Apache-2.0" [tool.poetry.dependencies] python = "^3.9" [tool.poetry.dev-dependencies] -"aws-cdk.core" = "^1.13" "aws-cdk.aws_iam" = "^1.13" -"aws-cdk.aws_sqs" = "^1.13" +"aws-cdk.aws_s3" = "^1.13" "aws-cdk.aws_sns" = "^1.13" "aws-cdk.aws_sns_subscriptions" = "^1.13" -"aws-cdk.aws_s3" = "^1.13" +"aws-cdk.aws_sqs" = "^1.13" +"aws-cdk.core" = "^1.13" [build-system] -requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" +requires = ["poetry-core>=1.0.0"] diff --git a/runway/templates/cdk-py/runway.module.yml b/runway/templates/cdk-py/runway.module.yml index 16dedbc2b..11784bb82 100644 --- a/runway/templates/cdk-py/runway.module.yml +++ b/runway/templates/cdk-py/runway.module.yml @@ -1,4 +1,3 @@ ---- options: build_steps: - poetry install diff --git a/runway/templates/cdk-tsc/README.md b/runway/templates/cdk-tsc/README.md index da3d2c543..cceca9130 100644 --- a/runway/templates/cdk-tsc/README.md +++ b/runway/templates/cdk-tsc/README.md @@ -1,7 +1,7 @@ # Useful commands -* `npm run build` compile typescript to js -* `npm run watch` watch for changes and compile -* `npx cdk deploy` deploy this stack to your default AWS account/region -* `npx cdk diff` compare deployed stack with current state -* `npx cdk synth` emits the synthesized CloudFormation template +- `npm run build` compile typescript to js +- `npm run watch` watch for changes and compile +- `npx cdk deploy` deploy this stack to your default AWS account/region +- `npx cdk diff` compare deployed stack with current state +- `npx cdk synth` emits the synthesized CloudFormation template diff --git a/runway/templates/cdk-tsc/package.json b/runway/templates/cdk-tsc/package.json index 2d58e675f..164400f13 100644 --- a/runway/templates/cdk-tsc/package.json +++ b/runway/templates/cdk-tsc/package.json @@ -1,23 +1,23 @@ { - "name": "sampleapp", - "version": "0.1.0", "bin": { "newbase": "bin/sample.js" }, - "scripts": { - "build": "tsc", - "watch": "tsc -w", - "cdk": "cdk" + "dependencies": { + "@aws-cdk/core": "^1.1.0", + "source-map-support": "^0.5.9" }, "devDependencies": { "@types/node": "^10.17.5", "@types/source-map-support": "^0.5.0", - "typescript": "^3.3.3333", + "aws-cdk": "^1.1.0", "ts-node": "^8.1.0", - "aws-cdk": "^1.1.0" + "typescript": "^3.3.3333" }, - "dependencies": { - "@aws-cdk/core": "^1.1.0", - "source-map-support": "^0.5.9" - } + "name": "sampleapp", + "scripts": { + "build": "tsc", + "cdk": "cdk", + "watch": "tsc -w" + }, + "version": "0.1.0" } diff --git a/runway/templates/cdk-tsc/runway.module.yml b/runway/templates/cdk-tsc/runway.module.yml index 01f3f5109..661ba5426 100644 --- a/runway/templates/cdk-tsc/runway.module.yml +++ b/runway/templates/cdk-tsc/runway.module.yml @@ -1,4 +1,3 @@ ---- options: build_steps: - npm run build diff --git a/runway/templates/k8s-cfn-repo/README.md b/runway/templates/k8s-cfn-repo/README.md index 6252ebb4d..020f0e6ca 100644 --- a/runway/templates/k8s-cfn-repo/README.md +++ b/runway/templates/k8s-cfn-repo/README.md @@ -22,7 +22,7 @@ This repo represents a sample infrastructure deployment of EKS, featuring: ### Deployment 1. Update the VPC-id & subnet ids in [runway.yml](./runway.yml) to reflect your VPC & private subnets. -2. Deploy to the **dev** environment (`runway deploy -e dev`). This will take some time to complete. +1. Deploy to the **dev** environment (`runway deploy -e dev`). This will take some time to complete. ### Post-Deployment diff --git a/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/auth_map.py b/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/auth_map.py index a90a85184..24880132c 100644 --- a/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/auth_map.py +++ b/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/auth_map.py @@ -5,7 +5,7 @@ import logging import os from pathlib import Path -from typing import TYPE_CHECKING, Any, List +from typing import TYPE_CHECKING, Any from runway.cfngin.lookups.handlers.output import OutputLookup @@ -37,8 +37,8 @@ def get_principal_arn(context: CfnginContext) -> str: def generate( - context: CfnginContext, *, filename: str, path: List[str], stack: str, **_: Any -): + context: CfnginContext, *, filename: str, path: list[str], stack: str, **_: Any +) -> bool: """Generate an EKS auth_map for worker connection. Args: @@ -53,25 +53,23 @@ def generate( """ overlay_path = Path(*path) file_path = overlay_path / filename - if os.path.exists(filename): + if os.path.exists(filename): # noqa: PTH110 LOGGER.info("%s file present; skipping initial creation", file_path) return True LOGGER.info("Creating auth_map at %s", file_path) overlay_path.mkdir(parents=True, exist_ok=True) principal_arn = get_principal_arn(context) - node_instancerole_arn = OutputLookup.handle( - f"{stack}::NodeInstanceRoleArn", context=context - ) + node_instancerole_arn = OutputLookup.handle(f"{stack}::NodeInstanceRoleArn", context=context) aws_authmap_template = (Path(__file__).parent / "aws-auth-cm.yaml").read_text() file_path.write_text( - aws_authmap_template.replace( - "INSTANCEROLEARNHERE", node_instancerole_arn - ).replace("ORIGINALPRINCIPALARNHERE", principal_arn) + aws_authmap_template.replace("INSTANCEROLEARNHERE", node_instancerole_arn).replace( + "ORIGINALPRINCIPALARNHERE", principal_arn + ) ) return True -def remove(*, path: List[str], filename: str, **_: Any) -> bool: +def remove(*, path: list[str], filename: str, **_: Any) -> bool: """Remove an EKS auth_map for worker connection. For use after destroying a cluster. diff --git a/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/aws-auth-cm.yaml b/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/aws-auth-cm.yaml index 3d9ecdaa8..d7a47b2bb 100644 --- a/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/aws-auth-cm.yaml +++ b/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/aws-auth-cm.yaml @@ -1,4 +1,3 @@ ---- apiVersion: v1 kind: ConfigMap metadata: diff --git a/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/awscli.py b/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/awscli.py index ae5da1c9d..b01b74ec6 100644 --- a/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/awscli.py +++ b/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/awscli.py @@ -22,6 +22,7 @@ def aws_eks_update_kubeconfig(context: CfnginContext, **kwargs: Any) -> bool: Args: context: Context object. + **kwargs: Arbitrary keyword arguments. Returns: boolean for whether or not the hook succeeded @@ -34,9 +35,7 @@ def aws_eks_update_kubeconfig(context: CfnginContext, **kwargs: Any) -> bool: f"{kwargs['stack']}::EksClusterName", context=context ) LOGGER.info("writing kubeconfig...") - subprocess.check_output( - ["aws", "eks", "update-kubeconfig", "--name", eks_cluster_name] - ) + subprocess.check_output(["aws", "eks", "update-kubeconfig", "--name", eks_cluster_name]) LOGGER.info("kubeconfig written successfully...") # The newly-generated kubeconfig will have introduced a dependency on the @@ -45,7 +44,7 @@ def aws_eks_update_kubeconfig(context: CfnginContext, **kwargs: Any) -> bool: if not os.environ.get("PIPENV_ACTIVE") and ( not os.environ.get("VIRTUAL_ENV") and not which("aws") ): - print("", file=sys.stderr) # noqa: T201 + print(file=sys.stderr) # noqa: T201 print( # noqa: T201 "Warning: the generated kubeconfig uses the aws-cli for " "authentication, but it is not found in your environment. ", diff --git a/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/bootstrap.py b/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/bootstrap.py index 2b5dc58fa..90b5ddbe3 100644 --- a/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/bootstrap.py +++ b/runway/templates/k8s-cfn-repo/k8s-master.cfn/k8s_hooks/bootstrap.py @@ -3,7 +3,6 @@ from __future__ import annotations import logging -import os import shutil from pathlib import Path from typing import TYPE_CHECKING, Any @@ -16,13 +15,13 @@ LOGGER = logging.getLogger(__name__) -def copy_template_to_env(path: Path, env: str, region: str): +def copy_template_to_env(path: Path, env: str, region: str) -> None: """Copy k8s module template into new environment directory.""" overlays_dir = path / "overlays" template_dir = overlays_dir / "template" env_dir = overlays_dir / env if template_dir.is_dir(): - if env_dir.is_dir() or (os.path.isdir(f"{env_dir}-{region}")): + if env_dir.is_dir() or (Path(f"{env_dir}-{region}").is_dir()): LOGGER.info( 'Bootstrap of k8s module at "%s" skipped; module ' "already has a config for this environment", @@ -30,7 +29,7 @@ def copy_template_to_env(path: Path, env: str, region: str): ) else: LOGGER.info( - 'Copying overlay template at "%s" to new ' 'environment directory "%s"', + 'Copying overlay template at "%s" to new environment directory "%s"', template_dir, env_dir, ) @@ -44,10 +43,8 @@ def copy_template_to_env(path: Path, env: str, region: str): templated_file_path = env_dir / i if templated_file_path.is_file(): filedata = templated_file_path.read_text() - if "REPLACEMEENV" in filedata: - templated_file_path.write_text( - filedata.replace("REPLACEMEENV", env) - ) + if "REPLACE_ME_ENV" in filedata: + templated_file_path.write_text(filedata.replace("REPLACE_ME_ENV", env)) else: LOGGER.info( 'Skipping bootstrap of k8s module at "%s"; no template directory present', @@ -55,7 +52,7 @@ def copy_template_to_env(path: Path, env: str, region: str): ) -def create_runway_environments(*, context: CfnginContext, namespace: str, **_: Any): +def create_runway_environments(*, context: CfnginContext, namespace: str, **_: Any) -> bool: """Copy k8s module templates into new environment directories. Args: @@ -65,9 +62,7 @@ def create_runway_environments(*, context: CfnginContext, namespace: str, **_: A Returns: boolean for whether or not the hook succeeded. """ - LOGGER.info( - "Bootstrapping runway k8s modules, looking for unconfigured environments..." - ) + LOGGER.info("Bootstrapping runway k8s modules, looking for unconfigured environments...") environment = namespace diff --git a/runway/templates/k8s-cfn-repo/k8s-workers.cfn/local_lookups/bootstrap_value.py b/runway/templates/k8s-cfn-repo/k8s-workers.cfn/local_lookups/bootstrap_value.py index 3157eda85..98cb3c2aa 100644 --- a/runway/templates/k8s-cfn-repo/k8s-workers.cfn/local_lookups/bootstrap_value.py +++ b/runway/templates/k8s-cfn-repo/k8s-workers.cfn/local_lookups/bootstrap_value.py @@ -29,7 +29,7 @@ class HookArgs(BaseModel): post_bootstrap: str -class BootstrapValue(LookupHandler): +class BootstrapValue(LookupHandler["CfnginContext"]): """Return the bootstrap value on creation otherwise the post_bootstrap. .. rubric:: Example @@ -40,34 +40,24 @@ class BootstrapValue(LookupHandler): """ - # pylint: disable=arguments-differ @classmethod - def handle( # type: ignore - cls, - value: str, - context: CfnginContext, - *_args: Any, - provider: Provider, - **_kwargs: Any, + def handle( + cls, value: str, context: CfnginContext, *, provider: Provider, **_kwargs: Any ) -> str: """Handle lookup.""" query, raw_args = cls.parse(value) - args = HookArgs.parse_obj(raw_args) + args = HookArgs.model_validate(raw_args) stack = context.get_stack(query) if not stack: raise ValueError(f"stack {query} not defined in CFNgin config") try: - stack_des = provider.cloudformation.describe_stacks(StackName=stack.fqn)[ - "Stacks" - ][0] + stack_des = provider.cloudformation.describe_stacks(StackName=stack.fqn)["Stacks"][0] except ClientError as exc: if "does not exist" not in str(exc): raise return args.bootstrap - if provider.is_stack_completed(stack_des) or ( - provider.is_stack_in_progress(stack_des) - ): + if provider.is_stack_completed(stack_des) or (provider.is_stack_in_progress(stack_des)): return args.post_bootstrap return args.bootstrap diff --git a/runway/templates/k8s-cfn-repo/runway.yml b/runway/templates/k8s-cfn-repo/runway.yml index 7a54edd11..37e3dc16e 100644 --- a/runway/templates/k8s-cfn-repo/runway.yml +++ b/runway/templates/k8s-cfn-repo/runway.yml @@ -1,4 +1,3 @@ ---- deployments: - modules: - name: EKS Cluster diff --git a/runway/templates/k8s-cfn-repo/service-hello-world.k8s/base/deployment.yaml b/runway/templates/k8s-cfn-repo/service-hello-world.k8s/base/deployment.yaml index 7b989ea0f..9b7068ef1 100644 --- a/runway/templates/k8s-cfn-repo/service-hello-world.k8s/base/deployment.yaml +++ b/runway/templates/k8s-cfn-repo/service-hello-world.k8s/base/deployment.yaml @@ -12,9 +12,7 @@ spec: containers: - name: the-container image: monopole/hello:1 - command: ["/hello", - "--port=8080", - "--enableRiskyFeature=$(ENABLE_RISKY)"] + command: ["/hello", "--port=8080", "--enableRiskyFeature=$(ENABLE_RISKY)"] ports: - containerPort: 8080 env: diff --git a/runway/templates/k8s-flux-repo/README.md b/runway/templates/k8s-flux-repo/README.md index e5527360c..6e4b60187 100644 --- a/runway/templates/k8s-flux-repo/README.md +++ b/runway/templates/k8s-flux-repo/README.md @@ -17,82 +17,82 @@ This repo represents a sample Terraform infrastructure deployment of EKS & Flux. 1. Update the `kubectl-access-role-arn` parameter in [runway.yml](./runway.yml) to specify the IAM role to which cluster admin access should be granted. E.g., if you assume an IAM role for operating in your account `aws sts get-caller-identity --query 'Arn' --output text` will show you the assumed role principal like: - ```text - arn:aws:sts::123456789012:assumed-role/myIamRole/guy.incognito - ``` + ```text + arn:aws:sts::123456789012:assumed-role/myIamRole/guy.incognito + ``` - You can use that arn to determine the IAM role arn for runway.yml: + You can use that arn to determine the IAM role arn for runway.yml: - ```yaml - deployments: - ... - - modules: - ... - parameters: - ... - kubectl-access-role-arn: arn:aws:iam::123456789012:role/myIamRole - ``` + ```yaml + deployments: + ... + - modules: + ... + parameters: + ... + kubectl-access-role-arn: arn:aws:iam::123456789012:role/myIamRole + ``` - (to use IAM users instead, see `mapUsers` in `eks-base.tf/main.tf`) + (to use IAM users instead, see `mapUsers` in `eks-base.tf/main.tf`) -2. After updating the role ARN, deploy to the dev environment (`runway deploy -e dev`). +1. After updating the role ARN, deploy to the dev environment (`runway deploy -e dev`). This will take some time to complete. - (Terraform will prompt for confirmation; pass the `--ci` flag to prevent any prompting) + (Terraform will prompt for confirmation; pass the `--ci` flag to prevent any prompting) #### Part 2: Pushing to the Flux repo 1. Setup and push an initial commit to the AWS CodeCommit git repository called `flux-dev`. - macOS/Linux: - - ```sh - CC_REPO_URL=https://git-codecommit.us-west-2.amazonaws.com/v1/repos/flux-dev - cd flux-dev - git init - git config credential."$CC_REPO_URL".helper '!aws codecommit credential-helper $@' - git config credential."$CC_REPO_URL".UseHttpPath true - git remote add origin $CC_REPO_URL - git add * - git commit -m "initial commit" - git push --set-upstream origin master - ``` - - Windows: - - ```powershell - cd $home - $CC_REPO_URL = "https://git-codecommit.us-west-2.amazonaws.com/v1/repos/flux-dev" - cd flux-dev - git init - git config credential."$CC_REPO_URL".helper '!aws codecommit credential-helper $@' - git config credential."$CC_REPO_URL".UseHttpPath true - git remote add origin $CC_REPO_URL - git add * - git commit -m "initial commit" - git push --set-upstream origin master - ``` - -2. [Wait 5 minutes](https://fluxcd.io/legacy/flux/faq/#how-often-does-flux-check-for-new-images)... - -3. The CodeCommit git repo will have a `flux` tag indicated the applied state of the repo and a namespace titled `demo` will appear in the cluster. - - macOS/Linux: - - ```sh - git ls-remote - cd .. - eval $(runway envvars -e dev) - runway kbenv run -- get namespace - ``` - - Windows: - - ```powershell - git ls-remote - cd .. - runway envvars -e dev | iex - runway kbenv run -- get namespace - ``` + macOS/Linux: + + ```sh + CC_REPO_URL=https://git-codecommit.us-west-2.amazonaws.com/v1/repos/flux-dev + cd flux-dev + git init + git config credential."$CC_REPO_URL".helper '!aws codecommit credential-helper $@' + git config credential."$CC_REPO_URL".UseHttpPath true + git remote add origin $CC_REPO_URL + git add * + git commit -m "initial commit" + git push --set-upstream origin master + ``` + + Windows: + + ```powershell + cd $home + $CC_REPO_URL = "https://git-codecommit.us-west-2.amazonaws.com/v1/repos/flux-dev" + cd flux-dev + git init + git config credential."$CC_REPO_URL".helper '!aws codecommit credential-helper $@' + git config credential."$CC_REPO_URL".UseHttpPath true + git remote add origin $CC_REPO_URL + git add * + git commit -m "initial commit" + git push --set-upstream origin master + ``` + +1. [Wait 5 minutes](https://fluxcd.io/legacy/flux/faq/#how-often-does-flux-check-for-new-images)... + +1. The CodeCommit git repo will have a `flux` tag indicated the applied state of the repo and a namespace titled `demo` will appear in the cluster. + + macOS/Linux: + + ```sh + git ls-remote + cd .. + eval $(runway envvars -e dev) + runway kbenv run -- get namespace + ``` + + Windows: + + ```powershell + git ls-remote + cd .. + runway envvars -e dev | iex + runway kbenv run -- get namespace + ``` ### Post-Deployment diff --git a/runway/templates/k8s-flux-repo/flux.tf/__init__.py b/runway/templates/k8s-flux-repo/flux.tf/__init__.py new file mode 100644 index 000000000..fe2653a60 --- /dev/null +++ b/runway/templates/k8s-flux-repo/flux.tf/__init__.py @@ -0,0 +1 @@ +"""Empty init file for python import traversal.""" diff --git a/runway/templates/k8s-flux-repo/runway.yml b/runway/templates/k8s-flux-repo/runway.yml index e59010f16..889db4afc 100644 --- a/runway/templates/k8s-flux-repo/runway.yml +++ b/runway/templates/k8s-flux-repo/runway.yml @@ -1,4 +1,3 @@ ---- deployments: - modules: - path: tfstate.cfn diff --git a/runway/templates/k8s-tf-repo/README.md b/runway/templates/k8s-tf-repo/README.md index 1613538de..c397f53c6 100644 --- a/runway/templates/k8s-tf-repo/README.md +++ b/runway/templates/k8s-tf-repo/README.md @@ -25,27 +25,27 @@ This repo represents a sample Terraform infrastructure deployment of EKS, featur 1. Update the `kubectl-access-role-arn` parameter in [runway.yml](./runway.yml) to specify the IAM role to which cluster admin access should be granted. E.g., if you assume an IAM role for operating in your account `aws sts get-caller-identity --query 'Arn' --output text` will show you the assumed role principal like: - ```text - arn:aws:sts::123456789012:assumed-role/myIamRole/guy.incognito - ``` + ```text + arn:aws:sts::123456789012:assumed-role/myIamRole/guy.incognito + ``` - You can use that arn to determine the IAM role arn for runway.yml: + You can use that arn to determine the IAM role arn for runway.yml: - ```yaml - deployments: - ... - - modules: - ... - parameters: - ... - kubectl-access-role-arn: arn:aws:iam::123456789012:role/myIamRole - ``` + ```yaml + deployments: + ... + - modules: + ... + parameters: + ... + kubectl-access-role-arn: arn:aws:iam::123456789012:role/myIamRole + ``` - (to use IAM users instead, see `mapUsers` in `eks-base.tf/main.tf`) + (to use IAM users instead, see `mapUsers` in `eks-base.tf/main.tf`) -2. After updating the role ARN, deploy to the dev environment (`runway deploy -e dev`). +1. After updating the role ARN, deploy to the dev environment (`runway deploy -e dev`). This will take some time to complete. - (Terraform will prompt twice for confirmation; pass the `--ci` flag to prevent any prompting) + (Terraform will prompt twice for confirmation; pass the `--ci` flag to prevent any prompting) ### Post-Deployment diff --git a/runway/templates/k8s-tf-repo/eks-base.tf/__init__.py b/runway/templates/k8s-tf-repo/eks-base.tf/__init__.py new file mode 100644 index 000000000..fe2653a60 --- /dev/null +++ b/runway/templates/k8s-tf-repo/eks-base.tf/__init__.py @@ -0,0 +1 @@ +"""Empty init file for python import traversal.""" diff --git a/runway/templates/k8s-tf-repo/job-s3-echo.tf/__init__.py b/runway/templates/k8s-tf-repo/job-s3-echo.tf/__init__.py new file mode 100644 index 000000000..fe2653a60 --- /dev/null +++ b/runway/templates/k8s-tf-repo/job-s3-echo.tf/__init__.py @@ -0,0 +1 @@ +"""Empty init file for python import traversal.""" diff --git a/runway/templates/k8s-tf-repo/runway.yml b/runway/templates/k8s-tf-repo/runway.yml index 907e58c8a..0f8a490cb 100644 --- a/runway/templates/k8s-tf-repo/runway.yml +++ b/runway/templates/k8s-tf-repo/runway.yml @@ -1,4 +1,3 @@ ---- deployments: - modules: - path: tfstate.cfn diff --git a/runway/templates/k8s-tf-repo/service-hello-world.k8s/base/deployment.yaml b/runway/templates/k8s-tf-repo/service-hello-world.k8s/base/deployment.yaml index 7b989ea0f..9b7068ef1 100644 --- a/runway/templates/k8s-tf-repo/service-hello-world.k8s/base/deployment.yaml +++ b/runway/templates/k8s-tf-repo/service-hello-world.k8s/base/deployment.yaml @@ -12,9 +12,7 @@ spec: containers: - name: the-container image: monopole/hello:1 - command: ["/hello", - "--port=8080", - "--enableRiskyFeature=$(ENABLE_RISKY)"] + command: ["/hello", "--port=8080", "--enableRiskyFeature=$(ENABLE_RISKY)"] ports: - containerPort: 8080 env: diff --git a/runway/templates/sls-py/__init__.py b/runway/templates/sls-py/__init__.py index a70847153..5d3447e99 100644 --- a/runway/templates/sls-py/__init__.py +++ b/runway/templates/sls-py/__init__.py @@ -1 +1 @@ -"""Empty file for python import traversal.""" # pylint: disable=all +"""Empty file for python import traversal.""" diff --git a/runway/templates/sls-py/config-dev-us-east-1.json b/runway/templates/sls-py/config-dev-us-east-1.json index 0db3279e4..0967ef424 100644 --- a/runway/templates/sls-py/config-dev-us-east-1.json +++ b/runway/templates/sls-py/config-dev-us-east-1.json @@ -1,3 +1 @@ -{ - -} +{} diff --git a/runway/templates/sls-py/hello_world/__init__.py b/runway/templates/sls-py/hello_world/__init__.py index bfa0de3db..9359fcc30 100644 --- a/runway/templates/sls-py/hello_world/__init__.py +++ b/runway/templates/sls-py/hello_world/__init__.py @@ -3,20 +3,16 @@ from __future__ import annotations import json -from typing import Any, Dict, Union +from typing import Any -# pylint: disable=unused-argument -def handler(event: Any, context: Any) -> Dict[str, Union[int, str]]: +def handler(event: Any, context: Any) -> dict[str, int | str]: # noqa: ARG001 """Return Serverless Hello World.""" body = { "message": "Go Serverless v1.0! Your function executed successfully!", "input": event, } - - response = {"statusCode": 200, "body": json.dumps(body)} - - return response + return {"statusCode": 200, "body": json.dumps(body)} # Use this code if you don't use the http event with the LAMBDA-PROXY # integration diff --git a/runway/templates/sls-py/package.json b/runway/templates/sls-py/package.json index 23ff2a5dc..48de1b12c 100644 --- a/runway/templates/sls-py/package.json +++ b/runway/templates/sls-py/package.json @@ -1,12 +1,12 @@ { - "name": "sampleapp", - "version": "1.0.0", + "author": "unused", "description": "Serverless Python app", "devDependencies": { "serverless": "^1.57.0", "serverless-iam-roles-per-function": "^2.0.2", "serverless-python-requirements": "^5.0.1" }, - "author": "unused", - "license": "unused" + "license": "unused", + "name": "sampleapp", + "version": "1.0.0" } diff --git a/runway/templates/sls-py/pyproject.toml b/runway/templates/sls-py/pyproject.toml index 5ebc3f6b2..c7702b999 100644 --- a/runway/templates/sls-py/pyproject.toml +++ b/runway/templates/sls-py/pyproject.toml @@ -1,18 +1,16 @@ [tool.poetry] name = "runway-sample-sls-py" version = "0.0.0" -description = "Runway Sample" authors = ["Onica Group LLC "] +description = "Runway Sample" license = "Apache-2.0" [tool.poetry.dependencies] python = "^3.9" - pyyaml = "^5.1" [tool.poetry.dev-dependencies] - [build-system] -requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" +requires = ["poetry-core>=1.0.0"] diff --git a/runway/templates/sls-tsc/package.json b/runway/templates/sls-tsc/package.json index a86b845c7..7103b454b 100644 --- a/runway/templates/sls-tsc/package.json +++ b/runway/templates/sls-tsc/package.json @@ -1,20 +1,9 @@ { - "name": "replaceme-myslsservice", - "version": "1.0.0", - "description": "Replace Me - My Serverless Service", - "main": "index.js", - "scripts": { - "docs": "typedoc", - "lint": "eslint src --ext .js,.jsx,.ts,.tsx", - "lintfix": "npm run lint -- --fix", - "test": "jest", - "coverage": "npm test -- --coverage" - }, "author": "", - "license": "ISC", "dependencies": { "source-map-support": "^0.5.19" }, + "description": "Replace Me - My Serverless Service", "devDependencies": { "@types/aws-lambda": "^8.10.66", "@types/jest": "^26.0.19", @@ -35,7 +24,18 @@ "typescript": "^4.1.3", "webpack": "^5.10.3" }, + "license": "ISC", + "main": "index.js", + "name": "replaceme-myslsservice", "prettier": { "trailingComma": "all" - } + }, + "scripts": { + "coverage": "npm test -- --coverage", + "docs": "typedoc", + "lint": "eslint src --ext .js,.jsx,.ts,.tsx", + "lintfix": "npm run lint -- --fix", + "test": "jest" + }, + "version": "1.0.0" } diff --git a/runway/templates/static-angular/runway.yml b/runway/templates/static-angular/runway.yml index 976d7d265..09d336f55 100644 --- a/runway/templates/static-angular/runway.yml +++ b/runway/templates/static-angular/runway.yml @@ -1,15 +1,15 @@ deployments: - modules: - - path: sampleapp.web - type: static - parameters: - namespace: sampleapp-${env DEPLOY_ENVIRONMENT} - staticsite_cf_disable: ${var cf_disable.${env DEPLOY_ENVIRONMENT}::default=false, transform=bool} - options: - build_output: dist/sample-app - build_steps: - - npm install - - npx ng build + - path: sampleapp.web + type: static + parameters: + namespace: sampleapp-${env DEPLOY_ENVIRONMENT} + staticsite_cf_disable: ${var cf_disable.${env DEPLOY_ENVIRONMENT}::default=false, transform=bool} + options: + build_output: dist/sample-app + build_steps: + - npm install + - npx ng build regions: - us-east-1 diff --git a/runway/templates/static-angular/sampleapp.web/angular.json b/runway/templates/static-angular/sampleapp.web/angular.json index 4efb6346e..df32f45c6 100644 --- a/runway/templates/static-angular/sampleapp.web/angular.json +++ b/runway/templates/static-angular/sampleapp.web/angular.json @@ -1,74 +1,70 @@ { "$schema": "./node_modules/@angular/cli/lib/config/schema.json", - "version": 1, + "defaultProject": "sample-app", "newProjectRoot": "projects", "projects": { "sample-app": { - "projectType": "application", - "schematics": {}, - "root": "", - "sourceRoot": "src", - "prefix": "app", "architect": { "build": { "builder": "@angular-devkit/build-angular:browser", - "options": { - "outputPath": "dist/sample-app", - "index": "src/index.html", - "main": "src/main.ts", - "polyfills": "src/polyfills.ts", - "tsConfig": "tsconfig.app.json", - "aot": false, - "assets": [ - "src/favicon.ico", - "src/assets" - ], - "styles": [ - "src/styles.css" - ], - "scripts": [] - }, "configurations": { "production": { + "aot": true, + "budgets": [ + { + "maximumError": "5mb", + "maximumWarning": "2mb", + "type": "initial" + }, + { + "maximumError": "10kb", + "maximumWarning": "6kb", + "type": "anyComponentStyle" + } + ], + "buildOptimizer": true, + "extractCss": true, + "extractLicenses": true, "fileReplacements": [ { "replace": "src/environments/environment.ts", "with": "src/environments/environment.prod.ts" } ], + "namedChunks": false, "optimization": true, "outputHashing": "all", "sourceMap": false, - "extractCss": true, - "namedChunks": false, - "aot": true, - "extractLicenses": true, - "vendorChunk": false, - "buildOptimizer": true, - "budgets": [ - { - "type": "initial", - "maximumWarning": "2mb", - "maximumError": "5mb" - }, - { - "type": "anyComponentStyle", - "maximumWarning": "6kb", - "maximumError": "10kb" - } - ] + "vendorChunk": false } + }, + "options": { + "aot": false, + "assets": [ + "src/favicon.ico", + "src/assets" + ], + "index": "src/index.html", + "main": "src/main.ts", + "outputPath": "dist/sample-app", + "polyfills": "src/polyfills.ts", + "scripts": [], + "styles": [ + "src/styles.css" + ], + "tsConfig": "tsconfig.app.json" } }, - "serve": { - "builder": "@angular-devkit/build-angular:dev-server", - "options": { - "browserTarget": "sample-app:build" - }, + "e2e": { + "builder": "@angular-devkit/build-angular:protractor", "configurations": { "production": { - "browserTarget": "sample-app:build:production" + "devServerTarget": "sample-app:serve:production" } + }, + "options": { + "devServerTarget": "sample-app:serve", + "protractorConfig": "e2e/protractor.conf.js" } }, "extract-i18n": { @@ -77,49 +73,54 @@ "browserTarget": "sample-app:build" } }, - "test": { - "builder": "@angular-devkit/build-angular:karma", - "options": { - "main": "src/test.ts", - "polyfills": "src/polyfills.ts", - "tsConfig": "tsconfig.spec.json", - "karmaConfig": "karma.conf.js", - "assets": [ - "src/favicon.ico", - "src/assets" - ], - "styles": [ - "src/styles.css" - ], - "scripts": [] - } - }, "lint": { "builder": "@angular-devkit/build-angular:tslint", "options": { + "exclude": [ + "**/node_modules/**" + ], "tsConfig": [ "tsconfig.app.json", "tsconfig.spec.json", "e2e/tsconfig.json" - ], - "exclude": [ - "**/node_modules/**" ] } }, - "e2e": { - "builder": "@angular-devkit/build-angular:protractor", - "options": { - "protractorConfig": "e2e/protractor.conf.js", - "devServerTarget": "sample-app:serve" - }, + "serve": { + "builder": "@angular-devkit/build-angular:dev-server", "configurations": { "production": { - "devServerTarget": "sample-app:serve:production" + "browserTarget": "sample-app:build:production" } + }, + "options": { + "browserTarget": "sample-app:build" + } + }, + "test": { + "builder": "@angular-devkit/build-angular:karma", + "options": { + "assets": [ + "src/favicon.ico", + "src/assets" + ], + "karmaConfig": "karma.conf.js", + "main": "src/test.ts", + "polyfills": "src/polyfills.ts", + "scripts": [], + "styles": [ + "src/styles.css" + ], + "tsConfig": "tsconfig.spec.json" } } - } - }}, - "defaultProject": "sample-app" + }, + "prefix": "app", + "projectType": "application", + "root": "", + "schematics": {}, + "sourceRoot": "src" + } + }, + "version": 1 } diff --git a/runway/templates/static-angular/sampleapp.web/package.json b/runway/templates/static-angular/sampleapp.web/package.json index 98b874fb4..0e1f54867 100644 --- a/runway/templates/static-angular/sampleapp.web/package.json +++ b/runway/templates/static-angular/sampleapp.web/package.json @@ -1,15 +1,4 @@ { - "name": "sample-app", - "version": "0.0.0", - "scripts": { - "ng": "ng", - "start": "ng serve", - "build": "ng build", - "test": "ng test", - "lint": "ng lint", - "e2e": "ng e2e" - }, - "private": true, "dependencies": { "@angular/animations": "~8.2.11", "@angular/common": "~8.2.11", @@ -28,9 +17,9 @@ "@angular/cli": "~15.2.4", "@angular/compiler-cli": "~18.0.2", "@angular/language-service": "~8.2.11", - "@types/node": "~8.9.4", "@types/jasmine": "~3.3.8", "@types/jasminewd2": "~2.0.3", + "@types/node": "~8.9.4", "codelyzer": "^5.0.0", "jasmine-core": "~3.4.0", "jasmine-spec-reporter": "~4.2.1", @@ -43,5 +32,16 @@ "ts-node": "~7.0.0", "tslint": "~5.15.0", "typescript": "~3.5.3" - } + }, + "name": "sample-app", + "private": true, + "scripts": { + "build": "ng build", + "e2e": "ng e2e", + "lint": "ng lint", + "ng": "ng", + "start": "ng serve", + "test": "ng test" + }, + "version": "0.0.0" } diff --git a/runway/templates/static-angular/sampleapp.web/tsconfig.app.json b/runway/templates/static-angular/sampleapp.web/tsconfig.app.json index 565a11a21..69edc4a83 100644 --- a/runway/templates/static-angular/sampleapp.web/tsconfig.app.json +++ b/runway/templates/static-angular/sampleapp.web/tsconfig.app.json @@ -1,18 +1,18 @@ { - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "./out-tsc/app", - "types": [] - }, - "files": [ - "src/main.ts", - "src/polyfills.ts" - ], - "include": [ - "src/**/*.ts" - ], - "exclude": [ - "src/test.ts", - "src/**/*.spec.ts" - ] + "compilerOptions": { + "outDir": "./out-tsc/app", + "types": [] + }, + "exclude": [ + "src/test.ts", + "src/**/*.spec.ts" + ], + "extends": "./tsconfig.json", + "files": [ + "src/main.ts", + "src/polyfills.ts" + ], + "include": [ + "src/**/*.ts" + ] } diff --git a/runway/templates/static-angular/sampleapp.web/tsconfig.spec.json b/runway/templates/static-angular/sampleapp.web/tsconfig.spec.json index 6400fde7d..95793f6ec 100644 --- a/runway/templates/static-angular/sampleapp.web/tsconfig.spec.json +++ b/runway/templates/static-angular/sampleapp.web/tsconfig.spec.json @@ -1,5 +1,4 @@ { - "extends": "./tsconfig.json", "compilerOptions": { "outDir": "./out-tsc/spec", "types": [ @@ -7,6 +6,7 @@ "node" ] }, + "extends": "./tsconfig.json", "files": [ "src/test.ts", "src/polyfills.ts" diff --git a/runway/templates/static-react/runway.yml b/runway/templates/static-react/runway.yml index 439b2a0fa..de51c4e98 100644 --- a/runway/templates/static-react/runway.yml +++ b/runway/templates/static-react/runway.yml @@ -1,15 +1,15 @@ deployments: - modules: - - path: sampleapp.web - type: static - parameters: - namespace: sampleapp-${env DEPLOY_ENVIRONMENT} - staticsite_cf_disable: ${var cf_disable.${env DEPLOY_ENVIRONMENT}::default=false, transform=bool} - options: - build_output: build - build_steps: - - npm install - - npm run build + - path: sampleapp.web + type: static + parameters: + namespace: sampleapp-${env DEPLOY_ENVIRONMENT} + staticsite_cf_disable: ${var cf_disable.${env DEPLOY_ENVIRONMENT}::default=false, transform=bool} + options: + build_output: build + build_steps: + - npm install + - npm run build regions: - us-east-1 diff --git a/runway/templates/static-react/sampleapp.web/package.json b/runway/templates/static-react/sampleapp.web/package.json index 10b8ff226..b514bed7f 100644 --- a/runway/templates/static-react/sampleapp.web/package.json +++ b/runway/templates/static-react/sampleapp.web/package.json @@ -1,7 +1,16 @@ { - "name": "sample-app", - "version": "0.1.0", - "private": true, + "browserslist": { + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ], + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ] + }, "dependencies": { "@testing-library/jest-dom": "^4.2.4", "@testing-library/react": "^9.5.0", @@ -10,25 +19,16 @@ "react-dom": "^16.13.1", "react-scripts": "3.4.1" }, - "scripts": { - "start": "react-scripts start", - "build": "react-scripts build", - "test": "react-scripts test", - "eject": "react-scripts eject" - }, "eslintConfig": { "extends": "react-app" }, - "browserslist": { - "production": [ - ">0.2%", - "not dead", - "not op_mini all" - ], - "development": [ - "last 1 chrome version", - "last 1 firefox version", - "last 1 safari version" - ] - } + "name": "sample-app", + "private": true, + "scripts": { + "build": "react-scripts build", + "eject": "react-scripts eject", + "start": "react-scripts start", + "test": "react-scripts test" + }, + "version": "0.1.0" } diff --git a/runway/templates/static-react/sampleapp.web/public/manifest.json b/runway/templates/static-react/sampleapp.web/public/manifest.json index 080d6c77a..40bd3d842 100644 --- a/runway/templates/static-react/sampleapp.web/public/manifest.json +++ b/runway/templates/static-react/sampleapp.web/public/manifest.json @@ -1,25 +1,25 @@ { - "short_name": "React App", - "name": "Create React App Sample", - "icons": [ - { - "src": "favicon.ico", - "sizes": "64x64 32x32 24x24 16x16", - "type": "image/x-icon" - }, - { - "src": "logo192.png", - "type": "image/png", - "sizes": "192x192" - }, - { - "src": "logo512.png", - "type": "image/png", - "sizes": "512x512" - } - ], - "start_url": ".", - "display": "standalone", - "theme_color": "#000000", - "background_color": "#ffffff" + "background_color": "#ffffff", + "display": "standalone", + "icons": [ + { + "sizes": "64x64 32x32 24x24 16x16", + "src": "favicon.ico", + "type": "image/x-icon" + }, + { + "sizes": "192x192", + "src": "logo192.png", + "type": "image/png" + }, + { + "sizes": "512x512", + "src": "logo512.png", + "type": "image/png" + } + ], + "name": "Create React App Sample", + "short_name": "React App", + "start_url": ".", + "theme_color": "#000000" } diff --git a/runway/tests/handlers/base.py b/runway/tests/handlers/base.py index 965facfbf..014785955 100644 --- a/runway/tests/handlers/base.py +++ b/runway/tests/handlers/base.py @@ -3,7 +3,7 @@ from __future__ import annotations import os -from typing import TYPE_CHECKING, Any, Dict, List, Union +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from ...config.components.runway.base import ConfigProperty @@ -13,12 +13,12 @@ class TestHandler: """Base class for test handlers.""" @classmethod - def handle(cls, name: str, args: Union[ConfigProperty, Dict[str, Any]]) -> None: + def handle(cls, name: str, args: ConfigProperty | dict[str, Any]) -> None: """Redefine in subclass.""" - raise NotImplementedError() + raise NotImplementedError @staticmethod - def get_dirs(provided_path: str) -> List[str]: + def get_dirs(provided_path: str) -> list[str]: """Return list of directories.""" repo_dirs = next(os.walk(provided_path))[1] if ".git" in repo_dirs: diff --git a/runway/tests/handlers/cfn_lint.py b/runway/tests/handlers/cfn_lint.py index 57b3e7f00..5a53dbd4a 100644 --- a/runway/tests/handlers/cfn_lint.py +++ b/runway/tests/handlers/cfn_lint.py @@ -7,7 +7,7 @@ import runpy import sys from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Union +from typing import TYPE_CHECKING, Any import yaml @@ -26,7 +26,7 @@ class CfnLintHandler(TestHandler): """Lints CFN.""" @classmethod - def handle(cls, name: str, args: Union[ConfigProperty, Dict[str, Any]]) -> None: + def handle(cls, name: str, args: ConfigProperty | dict[str, Any]) -> None: """Perform the actual test. Relies on .cfnlintrc file to be located beside the Runway config file. @@ -42,7 +42,7 @@ def handle(cls, name: str, args: Union[ConfigProperty, Dict[str, Any]]) -> None: # prevent duplicate log messages by not passing to the root logger logging.getLogger("cfnlint").propagate = False try: - with argv(*["cfn-lint"] + args.get("cli_args", [])): + with argv(*["cfn-lint", *args.get("cli_args", [])]): runpy.run_module("cfnlint", run_name="__main__") except SystemExit as err: # this call will always result in SystemExit if err.code != 0: # ignore zero exit codes but re-raise for non-zero diff --git a/runway/tests/handlers/script.py b/runway/tests/handlers/script.py index fcbc22b31..445a03613 100644 --- a/runway/tests/handlers/script.py +++ b/runway/tests/handlers/script.py @@ -6,7 +6,7 @@ import subprocess import sys from subprocess import CalledProcessError -from typing import TYPE_CHECKING, Any, Dict, Union +from typing import TYPE_CHECKING, Any from ..._logging import PrefixAdaptor from ...tests.handlers.base import TestHandler @@ -22,7 +22,7 @@ class ScriptHandler(TestHandler): """Handle script tests. Args: - commands (List[str]): A list of commands to be executed in order. + commands: A list of commands to be executed in order. Each command is run in its own subprocess. The working directory will be the same as where the 'runway test' command was executed. @@ -40,7 +40,7 @@ class ScriptHandler(TestHandler): """ @classmethod - def handle(cls, name: str, args: Union[ConfigProperty, Dict[str, Any]]) -> None: + def handle(cls, name: str, args: ConfigProperty | dict[str, Any]) -> None: """Perform the actual test.""" logger = PrefixAdaptor(name, LOGGER) for cmd in args["commands"]: diff --git a/runway/tests/handlers/yaml_lint.py b/runway/tests/handlers/yaml_lint.py index 0f632891d..0bd4c4403 100644 --- a/runway/tests/handlers/yaml_lint.py +++ b/runway/tests/handlers/yaml_lint.py @@ -7,7 +7,7 @@ import logging import os import runpy -from typing import TYPE_CHECKING, Any, Dict, List, Union +from typing import TYPE_CHECKING, Any from ...tests.handlers.base import TestHandler from ...utils import argv @@ -23,32 +23,34 @@ class YamllintHandler(TestHandler): """Lints yaml.""" @staticmethod - def get_yaml_files_at_path(provided_path: str) -> List[str]: + def get_yaml_files_at_path(provided_path: str) -> list[str]: """Return list of yaml files.""" - yaml_files = glob.glob(os.path.join(provided_path, "*.yaml")) - yml_files = glob.glob(os.path.join(provided_path, "*.yml")) + yaml_files = glob.glob(os.path.join(provided_path, "*.yaml")) # noqa: PTH207, PTH118 + yml_files = glob.glob(os.path.join(provided_path, "*.yml")) # noqa: PTH118, PTH207 return yaml_files + yml_files @classmethod - def get_yamllint_options(cls, path: str) -> List[str]: + def get_yamllint_options(cls, path: str) -> list[str]: """Return yamllint option list.""" - yamllint_options: List[str] = [] + yamllint_options: list[str] = [] return yamllint_options + cls.get_dirs(path) + cls.get_yaml_files_at_path(path) @classmethod - def handle(cls, name: str, args: Union[ConfigProperty, Dict[str, Any]]) -> None: + def handle(cls, name: str, args: ConfigProperty | dict[str, Any]) -> None: # noqa: ARG003 """Perform the actual test.""" - base_dir = os.getcwd() + base_dir = os.getcwd() # noqa: PTH109 - if os.path.isfile(os.path.join(base_dir, ".yamllint")): - yamllint_config = os.path.join(base_dir, ".yamllint") - elif os.path.isfile(os.path.join(base_dir, ".yamllint.yml")): - yamllint_config = os.path.join(base_dir, ".yamllint.yml") + if os.path.isfile(os.path.join(base_dir, ".yamllint")): # noqa: PTH118, PTH113 + yamllint_config = os.path.join(base_dir, ".yamllint") # noqa: PTH118 + elif os.path.isfile(os.path.join(base_dir, ".yamllint.yml")): # noqa: PTH113, PTH118 + yamllint_config = os.path.join(base_dir, ".yamllint.yml") # noqa: PTH118 else: - yamllint_config = os.path.join( - os.path.dirname( - os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + yamllint_config = os.path.join( # noqa: PTH118 + os.path.dirname( # noqa: PTH120 + os.path.dirname( # noqa: PTH120 + os.path.dirname(os.path.abspath(__file__)) # noqa: PTH120, PTH100 + ) ), "templates", ".yamllint.yml", @@ -59,5 +61,5 @@ def handle(cls, name: str, args: Union[ConfigProperty, Dict[str, Any]]) -> None: *cls.get_yamllint_options(base_dir), ] - with argv(*["yamllint"] + yamllint_options): + with argv(*["yamllint", *yamllint_options]): runpy.run_module("yamllint", run_name="__main__") diff --git a/runway/tests/registry.py b/runway/tests/registry.py index a33f5556e..66796e7a7 100644 --- a/runway/tests/registry.py +++ b/runway/tests/registry.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Dict, Type +from typing import TYPE_CHECKING from .handlers import cfn_lint, script from .handlers import yaml_lint as yamllint @@ -10,10 +10,10 @@ if TYPE_CHECKING: from .handlers.base import TestHandler -TEST_HANDLERS: Dict[str, Type[TestHandler]] = {} +TEST_HANDLERS: dict[str, type[TestHandler]] = {} -def register_test_handler(test_type: str, handler: Type[TestHandler]) -> None: +def register_test_handler(test_type: str, handler: type[TestHandler]) -> None: """Register a test handler. Args: diff --git a/runway/type_defs.py b/runway/type_defs.py index 6f58fd24c..504d40cfa 100644 --- a/runway/type_defs.py +++ b/runway/type_defs.py @@ -3,11 +3,14 @@ from __future__ import annotations from pathlib import Path -from typing import TypeVar, Union +from typing import TYPE_CHECKING, TypeVar from typing_extensions import TypedDict -AnyPath = Union[Path, str] +if TYPE_CHECKING: + from typing_extensions import TypeAlias + +AnyPath: TypeAlias = "Path | str" AnyPathConstrained = TypeVar("AnyPathConstrained", Path, str) diff --git a/runway/utils/__init__.py b/runway/utils/__init__.py index e62a2f4e1..130269274 100644 --- a/runway/utils/__init__.py +++ b/runway/utils/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -import datetime import hashlib import importlib import json @@ -12,51 +11,34 @@ import re import stat import sys -from contextlib import contextmanager -from decimal import Decimal +from collections.abc import Iterable, Iterator, MutableMapping +from contextlib import AbstractContextManager, contextmanager +from functools import cached_property # noqa: F401 # TODO (kyle): remove in next major release from pathlib import Path from subprocess import check_call -from types import TracebackType -from typing import ( - ContextManager, # deprecated in 3.9 for contextlib.AbstractContextManager -) -from typing import ( - MutableMapping, # deprecated in 3.9 for collections.abc.MutableMapping -) -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterable, - Iterator, - List, - Optional, - Set, - Type, - Union, - cast, - overload, -) +from typing import TYPE_CHECKING, Any, Callable, cast, overload import yaml from pydantic import BaseModel as _BaseModel -from typing_extensions import Literal - -# make this importable for util as it was before -from ..compat import cached_property # noqa: F401 # make this importable without defining __all__ yet. # more things need to be moved of this file before starting an explicit __all__. +from . import pydantic_validators # noqa: F401 from ._file_hash import FileHash # noqa: F401 +from ._json_encoder import JsonEncoder # noqa: F401 from ._version import Version # noqa: F401 if TYPE_CHECKING: + from types import TracebackType + from mypy_boto3_cloudformation.type_defs import OutputTypeDef + from typing_extensions import Literal + + from ..compat import Self AWS_ENV_VARS = ("AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN") -DOC_SITE = "https://docs.onica.com/projects/runway" -EMBEDDED_LIB_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "embedded") +DOC_SITE = "https://runway.readthedocs.io" +EMBEDDED_LIB_PATH = str(Path(__file__).resolve().parent / "embedded") LOGGER = logging.getLogger(__name__) @@ -81,7 +63,10 @@ def __contains__(self, name: object) -> bool: name: The name to check for existence in the model. """ - return name in self.__dict__ + if name in self.__dict__: + return True + # extra files are no longer added to __dict__, they are placed in `model_extra` + return bool(self.model_extra and name in self.model_extra) def __getitem__(self, name: str) -> Any: """Implement evaluation of self[name]. @@ -106,40 +91,7 @@ def __setitem__(self, name: str, value: Any) -> None: value: Value to assign to the attribute. """ - super().__setattr__(name, value) # type: ignore - - -class JsonEncoder(json.JSONEncoder): - """Encode Python objects to JSON data. - - This class can be used with ``json.dumps()`` to handle most data types - that can occur in responses from AWS. - - Usage: - >>> json.dumps(data, cls=JsonEncoder) - - """ - - def default(self, o: Any) -> Any: - """Encode types not supported by the default JSONEncoder. - - Args: - o: Object to encode. - - Returns: - JSON serializable data type. - - Raises: - TypeError: Object type could not be encoded. - - """ - if isinstance(o, Decimal): - return float(o) - if isinstance(o, (datetime.datetime, datetime.date)): - return o.isoformat() - if isinstance(o, _BaseModel): - return o.dict() - return super().default(o) + super().__setattr__(name, value) class MutableMap(MutableMapping[str, Any]): @@ -167,13 +119,13 @@ def __init__(self, **kwargs: Any) -> None: self._found_queries = MutableMap() @property - def data(self) -> Dict[str, Any]: + def data(self) -> dict[str, Any]: """Sanitized output of __dict__. Removes anything that starts with ``_``. """ - result: Dict[str, Any] = {} + result: dict[str, Any] = {} for key, val in self.__dict__.items(): if key.startswith("_"): continue @@ -182,7 +134,7 @@ def data(self) -> Dict[str, Any]: def clear_found_cache(self) -> None: """Clear _found_cache.""" - for _, val in self.__dict__.items(): + for val in self.__dict__.values(): if isinstance(val, MutableMap): val.clear_found_cache() if hasattr(self, "_found_queries"): @@ -350,20 +302,19 @@ def __str__(self) -> str: return json.dumps(self.data, default=json_serial) -class SafeHaven(ContextManager["SafeHaven"]): +class SafeHaven(AbstractContextManager["SafeHaven"]): """Context manager that caches and resets important values on exit. Caches and resets os.environ, sys.argv, sys.modules, and sys.path. """ - # pylint: disable=redefined-outer-name def __init__( self, - argv: Optional[Iterable[str]] = None, - environ: Optional[Dict[str, str]] = None, - sys_modules_exclude: Optional[Iterable[str]] = None, - sys_path: Optional[Iterable[str]] = None, + argv: Iterable[str] | None = None, + environ: dict[str, str] | None = None, + sys_modules_exclude: Iterable[str] | None = None, + sys_path: Iterable[str] | None = None, ) -> None: """Instantiate class. @@ -385,7 +336,7 @@ def __init__( self.__sys_path = list(sys.path) # more informative origin for log statements self.logger = logging.getLogger("runway." + self.__class__.__name__) - self.sys_modules_exclude: Set[str] = ( + self.sys_modules_exclude: set[str] = ( set(sys_modules_exclude) if sys_modules_exclude else set() ) self.sys_modules_exclude.add("runway") @@ -434,7 +385,7 @@ def reset_sys_path(self) -> None: self.logger.debug("resetting sys.path: %s", json.dumps(self.__sys_path)) sys.path = self.__sys_path - def __enter__(self) -> SafeHaven: + def __enter__(self) -> Self: """Enter the context manager. Returns: @@ -446,16 +397,16 @@ def __enter__(self) -> SafeHaven: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, ) -> None: """Exit the context manager.""" self.logger.debug("leaving the safe haven...") self.reset_all() -# TODO remove after https://github.com/yaml/pyyaml/issues/234 is resolved +# TODO (kyle): remove after https://github.com/yaml/pyyaml/issues/234 is resolved class YamlDumper(yaml.Dumper): """Custom YAML Dumper. @@ -472,9 +423,9 @@ class YamlDumper(yaml.Dumper): """ - def increase_indent(self, flow: bool = False, indentless: bool = False) -> None: + def increase_indent(self, flow: bool = False, indentless: bool = False) -> None: # noqa: ARG002 """Override parent method.""" - return super().increase_indent(flow, False) # type: ignore + return super().increase_indent(flow, False) @contextmanager @@ -490,7 +441,7 @@ def argv(*args: str) -> Iterator[None]: @contextmanager -def change_dir(newdir: Union[Path, str]) -> Iterator[None]: +def change_dir(newdir: Path | str) -> Iterator[None]: """Change directory. Adapted from http://stackoverflow.com/a/24176022 @@ -519,9 +470,7 @@ def ensure_file_is_executable(path: str) -> None: SystemExit: file is not executable. """ - if platform.system() != "Windows" and ( - not stat.S_IXUSR & os.stat(path)[stat.ST_MODE] - ): + if platform.system() != "Windows" and (not stat.S_IXUSR & Path(path).stat()[stat.ST_MODE]): print(f"Error: File {path} is not executable") # noqa: T201 sys.exit(1) @@ -536,13 +485,14 @@ def ensure_string(value: Any) -> str: @contextmanager -def environ(env: Optional[Dict[str, str]] = None, **kwargs: str) -> Iterator[None]: +def environ(env: dict[str, str] | None = None, **kwargs: str) -> Iterator[None]: """Context manager for temporarily changing os.environ. The original value of os.environ is restored upon exit. Args: env: Dictionary to use when updating os.environ. + **kwargs: Arbitrary keyword arguments. """ env = env or {} @@ -566,9 +516,7 @@ def json_serial(obj: Any) -> Any: raise TypeError(f"Type {type(obj)} not serializable") -def load_object_from_string( - fqcn: str, try_reload: bool = False -) -> Union[type, Callable[..., Any]]: +def load_object_from_string(fqcn: str, try_reload: bool = False) -> type | Callable[..., Any]: """Convert "." delimited strings to a python object. Args: @@ -595,8 +543,7 @@ def load_object_from_string( if ( try_reload and sys.modules.get(module_path) - and module_path.split(".")[0] - not in sys.builtin_module_names # skip builtins + and module_path.split(".")[0] not in sys.builtin_module_names # skip builtins ): importlib.reload(sys.modules[module_path]) else: @@ -606,21 +553,19 @@ def load_object_from_string( @overload def merge_dicts( - dict1: Dict[Any, Any], dict2: Dict[Any, Any], deep_merge: bool = ... -) -> Dict[str, Any]: ... + dict1: dict[Any, Any], dict2: dict[Any, Any], deep_merge: bool = ... +) -> dict[str, Any]: ... @overload -def merge_dicts( - dict1: List[Any], dict2: List[Any], deep_merge: bool = ... -) -> List[Any]: ... +def merge_dicts(dict1: list[Any], dict2: list[Any], deep_merge: bool = ...) -> list[Any]: ... def merge_dicts( - dict1: Union[Dict[Any, Any], List[Any]], - dict2: Union[Dict[Any, Any], List[Any]], + dict1: dict[Any, Any] | list[Any], + dict2: dict[Any, Any] | list[Any], deep_merge: bool = True, -) -> Union[Dict[Any, Any], List[Any]]: +) -> dict[Any, Any] | list[Any]: """Merge dict2 into dict1.""" if deep_merge: if isinstance(dict1, list) and isinstance(dict2, list): @@ -630,19 +575,13 @@ def merge_dicts( return dict2 for key in dict2: - dict1[key] = ( - merge_dicts(dict1[key], dict2[key], True) - if key in dict1 - else dict2[key] - ) + dict1[key] = merge_dicts(dict1[key], dict2[key], True) if key in dict1 else dict2[key] return dict1 if isinstance(dict1, dict) and isinstance(dict2, dict): dict3 = dict1.copy() dict3.update(dict2) return dict3 - raise ValueError( - f"values of type {type(dict1)} and {type(dict2)} must be type dict" - ) + raise ValueError(f"values of type {type(dict1)} and {type(dict2)} must be type dict") def snake_case_to_kebab_case(value: str) -> str: @@ -655,7 +594,7 @@ def snake_case_to_kebab_case(value: str) -> str: return value.replace("_", "-") -def extract_boto_args_from_env(env_vars: Dict[str, str]) -> Dict[str, str]: +def extract_boto_args_from_env(env_vars: dict[str, str]) -> dict[str, str]: """Return boto3 client args dict with environment creds.""" return { i: env_vars[i.upper()] @@ -664,27 +603,25 @@ def extract_boto_args_from_env(env_vars: Dict[str, str]) -> Dict[str, str]: } -def flatten_path_lists( - env_dict: Dict[str, Any], env_root: Optional[str] = None -) -> Dict[str, Any]: +def flatten_path_lists(env_dict: dict[str, Any], env_root: str | None = None) -> dict[str, Any]: """Join paths in environment dict down to strings.""" for key, val in env_dict.items(): # Lists are presumed to be path components and will be turned back # to strings if isinstance(val, list): env_dict[key] = ( - os.path.join(env_root, os.path.join(*cast(List[str], val))) - if (env_root and not os.path.isabs(os.path.join(*cast(List[str], val)))) - else os.path.join(*cast(List[str], val)) + Path(env_root).joinpath(*cast("list[str]", val)) + if (env_root and not Path(*cast("list[str]", val)).is_absolute()) + else Path(*cast("list[str]", val)) ) return env_dict def merge_nested_environment_dicts( - env_dicts: Dict[str, Any], - env_name: Optional[str] = None, - env_root: Optional[str] = None, -) -> Dict[str, Any]: + env_dicts: dict[str, Any], + env_name: str | None = None, + env_root: str | None = None, +) -> dict[str, Any]: """Return single-level dictionary from dictionary of dictionaries.""" # If the provided dictionary is just a single "level" (no nested # environments), it applies to all environments @@ -700,13 +637,13 @@ def merge_nested_environment_dicts( return {} combined_dicts = merge_dicts( - cast(Dict[Any, Any], env_dicts.get("*", {})), - cast(Dict[Any, Any], env_dicts.get(env_name, {})), + cast("dict[Any, Any]", env_dicts.get("*", {})), + cast("dict[Any, Any]", env_dicts.get(env_name, {})), ) return flatten_path_lists(combined_dicts, env_root) -def find_cfn_output(key: str, outputs: List[OutputTypeDef]) -> Optional[str]: +def find_cfn_output(key: str, outputs: list[OutputTypeDef]) -> str | None: """Return CFN output value. Args: @@ -722,13 +659,13 @@ def find_cfn_output(key: str, outputs: List[OutputTypeDef]) -> Optional[str]: def get_embedded_lib_path() -> str: """Return path of embedded libraries.""" - return os.path.join(os.path.dirname(os.path.abspath(__file__)), "embedded") + return str(Path(__file__).resolve().parent / "embedded") def get_hash_for_filename(filename: str, hashfile_path: str) -> str: """Return hash for filename in the hashfile.""" filehash = "" - with open(hashfile_path, "r", encoding="utf-8") as stream: + with open(hashfile_path, encoding="utf-8") as stream: # noqa: PTH123 for _cnt, line in enumerate(stream): if line.rstrip().endswith(filename): match = re.match(r"^[A-Za-z0-9]*", line) @@ -750,7 +687,7 @@ def ignore_exit_code_0() -> Iterator[None]: raise -def fix_windows_command_list(commands: List[str]) -> List[str]: +def fix_windows_command_list(commands: list[str]) -> list[str]: """Return command list with working Windows commands. npm on windows is npm.cmd, which will blow up @@ -762,16 +699,17 @@ def fix_windows_command_list(commands: List[str]) -> List[str]: """ fully_qualified_cmd_path = which(commands[0]) if fully_qualified_cmd_path: - commands[0] = os.path.basename(fully_qualified_cmd_path) + commands[0] = Path(fully_qualified_cmd_path).name return commands def run_commands( - commands: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]], - directory: Union[Path, str], - env: Optional[Dict[str, str]] = None, + commands: list[dict[str, list[str] | str] | list[str] | str], + directory: Path | str, + env: dict[str, str] | None = None, ) -> None: """Run list of commands.""" + directory = Path(directory) if env is None: env = os.environ.copy() for step in commands: @@ -780,16 +718,12 @@ def run_commands( raw_command = step elif step.get("command"): # dictionary execution_dir = ( - os.path.join(directory, cast(str, step.get("cwd", ""))) - if step.get("cwd") - else directory + directory / str(step["cwd"]) if step.get("cwd") and step["cwd"] else directory ) raw_command = step["command"] else: raise AttributeError(f"Invalid command step: {step}") - command_list = ( - raw_command.split(" ") if isinstance(raw_command, str) else raw_command - ) + command_list = raw_command.split(" ") if isinstance(raw_command, str) else raw_command if platform.system().lower() == "windows": command_list = fix_windows_command_list(command_list) @@ -835,7 +769,7 @@ def get_file_hash( __name__, ) file_hash = getattr(hashlib, algorithm)() - with open(filename, "rb") as stream: + with open(filename, "rb") as stream: # noqa: PTH123 while True: data = stream.read(65536) # 64kb chunks if not data: @@ -870,7 +804,7 @@ def sha256sum(filename: str) -> str: __name__, ) sha256 = hashlib.sha256() - with open(filename, "rb", buffering=0) as stream: + with open(filename, "rb", buffering=0) as stream: # noqa: PTH123 mem_view = memoryview(bytearray(128 * 1024)) for i in iter(lambda: stream.readinto(mem_view), 0): sha256.update(mem_view[:i]) @@ -878,7 +812,7 @@ def sha256sum(filename: str) -> str: @contextmanager -def use_embedded_pkgs(embedded_lib_path: Optional[str] = None) -> Iterator[None]: +def use_embedded_pkgs(embedded_lib_path: str | None = None) -> Iterator[None]: """Temporarily prepend embedded packages to sys.path.""" if embedded_lib_path is None: embedded_lib_path = get_embedded_lib_path() @@ -891,14 +825,14 @@ def use_embedded_pkgs(embedded_lib_path: Optional[str] = None) -> Iterator[None] sys.path = old_sys_path -def which(program: str) -> Optional[str]: +def which(program: str) -> str | None: """Mimic 'which' command behavior.""" def is_exe(fpath: str) -> bool: """Determine if program exists and is executable.""" - return os.path.isfile(fpath) and os.access(fpath, os.X_OK) + return Path(fpath).is_file() and os.access(fpath, os.X_OK) - def get_extensions() -> List[str]: + def get_extensions() -> list[str]: """Get PATHEXT if the exist, otherwise use default.""" exts = ".COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC" @@ -907,7 +841,7 @@ def get_extensions() -> List[str]: return exts.split(";") - fname, file_ext = os.path.splitext(program) + fname, file_ext = os.path.splitext(program) # noqa: PTH122 fpath, fname = os.path.split(program) if not file_ext and platform.system().lower() == "windows": @@ -917,16 +851,16 @@ def get_extensions() -> List[str]: for i in fnames: if fpath: - exe_file = os.path.join(fpath, i) + exe_file = os.path.join(fpath, i) # noqa: PTH118 if is_exe(exe_file): return exe_file else: for path in ( os.environ.get("PATH", "").split(os.pathsep) if "PATH" in os.environ - else [os.getcwd()] + else [os.getcwd()] # noqa: PTH109 ): - exe_file = os.path.join(path, i) + exe_file = os.path.join(path, i) # noqa: PTH118 if is_exe(exe_file): return exe_file diff --git a/runway/utils/_file_hash.py b/runway/utils/_file_hash.py index d9f4b7c90..2f8758c81 100644 --- a/runway/utils/_file_hash.py +++ b/runway/utils/_file_hash.py @@ -3,10 +3,11 @@ from __future__ import annotations from pathlib import Path -from typing import TYPE_CHECKING, ClassVar, Iterable, Optional +from typing import TYPE_CHECKING, ClassVar if TYPE_CHECKING: import hashlib + from collections.abc import Iterable from _typeshed import StrPath @@ -26,9 +27,7 @@ class FileHash: 1024 * 10_000_000 # 10mb - number of bytes in each read operation ) - def __init__( - self, hash_alg: "hashlib._Hash", *, chunk_size: int = DEFAULT_CHUNK_SIZE - ) -> None: + def __init__(self, hash_alg: hashlib._Hash, *, chunk_size: int = DEFAULT_CHUNK_SIZE) -> None: """Instantiate class. Args: @@ -75,10 +74,8 @@ def add_file(self, file_path: StrPath) -> None: file_path: Path of the file to add. """ - with open(file_path, "rb") as stream: - # python 3.7 compatible version of `while chunk := buf.read(read_size):` - chunk = stream.read(self.chunk_size) # seed chunk with initial value - while chunk: + with Path.open(Path(file_path), "rb") as stream: + while chunk := stream.read(self.chunk_size): self._hash.update(chunk) chunk = stream.read(self.chunk_size) # read in new chunk @@ -87,7 +84,7 @@ def add_file_name( file_path: StrPath, *, end_character: str = "\0", - relative_to: Optional[StrPath] = None, + relative_to: StrPath | None = None, ) -> None: """Add file name to the hash. This includes the path. @@ -103,11 +100,7 @@ def add_file_name( """ self._hash.update( ( - str( - Path(file_path).relative_to(relative_to) - if relative_to - else Path(file_path) - ) + str(Path(file_path).relative_to(relative_to) if relative_to else Path(file_path)) + end_character ).encode() ) @@ -116,7 +109,7 @@ def add_files( self, file_paths: Iterable[StrPath], *, - relative_to: Optional[StrPath] = None, + relative_to: StrPath | None = None, ) -> None: """Add files to the hash. @@ -132,4 +125,4 @@ def add_files( self.add_file_name(fp, relative_to=relative_to) self.add_file(fp) # end of file contents; only necessary with multiple files - self._hash.update("\0".encode()) + self._hash.update(b"\0") diff --git a/runway/utils/_json_encoder.py b/runway/utils/_json_encoder.py new file mode 100644 index 000000000..28b4ca1f6 --- /dev/null +++ b/runway/utils/_json_encoder.py @@ -0,0 +1,49 @@ +"""Customized JSON encoder.""" + +from __future__ import annotations + +import datetime +import json +from decimal import Decimal +from pathlib import Path +from typing import Any + +from packaging.specifiers import SpecifierSet +from pydantic import BaseModel + + +class JsonEncoder(json.JSONEncoder): + """Encode Python objects to JSON data. + + This class can be used with ``json.dumps()`` to handle most data types + that can occur in responses from AWS. + + Usage: + >>> json.dumps(data, cls=JsonEncoder) + + """ + + def default(self, o: Any) -> dict[Any, Any] | float | list[Any] | str | Any: + """Encode types not supported by the default JSONEncoder. + + Args: + o: Object to encode. + + Returns: + JSON serializable data type. + + Raises: + TypeError: Object type could not be encoded. + + """ + if isinstance(o, Decimal): + return float(o) + if isinstance(o, (datetime.datetime, datetime.date)): + return o.isoformat() + if isinstance(o, BaseModel): + return o.model_dump() + if isinstance(o, (Path, SpecifierSet)): + return str(o) + if isinstance(o, (set, tuple)): + return list(o) # pyright: ignore[reportUnknownArgumentType] + return super().default(o) diff --git a/runway/utils/_version.py b/runway/utils/_version.py index 2e4c6decc..54c601245 100644 --- a/runway/utils/_version.py +++ b/runway/utils/_version.py @@ -22,8 +22,7 @@ def __repr__(self) -> str: """Return repr.""" # this usage of super is required to reproduce the intended result in # any subclasses of this class - # pylint: disable=super-with-arguments - return f"" + return f"" def __str__(self) -> str: """Return the original version string.""" diff --git a/runway/utils/pydantic_validators/__init__.py b/runway/utils/pydantic_validators/__init__.py new file mode 100644 index 000000000..e39d7763e --- /dev/null +++ b/runway/utils/pydantic_validators/__init__.py @@ -0,0 +1,5 @@ +"""Pydantic validators.""" + +from ._lax_str import LaxStr + +__all__ = ["LaxStr"] diff --git a/runway/utils/pydantic_validators/_lax_str.py b/runway/utils/pydantic_validators/_lax_str.py new file mode 100644 index 000000000..c986e8bf6 --- /dev/null +++ b/runway/utils/pydantic_validators/_lax_str.py @@ -0,0 +1,36 @@ +"""Inverse of :class:`~pydantic.types.StrictStr`.""" + +from __future__ import annotations + +from decimal import Decimal + +from pydantic.functional_validators import BeforeValidator + + +def _handler(value: object | None) -> object | None: + """Convert the provided value if able.""" + if isinstance(value, (float, int, Decimal)): + return str(value) + return value + + +LaxStr = BeforeValidator(_handler) +"""Custom :class:`~pydantic.functional_validators.BeforeValidator`. + +Inverse of :class:`~pydantic.types.StrictStr` that allows additional types to be +accepted as a :class:`str`. + +.. rubric:: Example +.. code-block:: python + + from __future__ import annotations + + from typing import Annotated + + from runway.utils import LaxStr + + class MyModel(BaseModel): + some_field: Annotated[str, LaxStr] + some_other_field: Annotated[str | None, LaxStr] = None + +""" diff --git a/runway/variables.py b/runway/variables.py index 75f84b27d..ce0810bd8 100644 --- a/runway/variables.py +++ b/runway/variables.py @@ -4,24 +4,8 @@ import logging import re -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generic, - Iterable, - Iterator, - List, - MutableMapping, - MutableSequence, - Optional, - Set, - Type, - TypeVar, - Union, - cast, - overload, -) +from collections.abc import Iterable, Iterator, MutableMapping, MutableSequence +from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast, overload from pydantic import BaseModel from typing_extensions import Literal @@ -35,13 +19,13 @@ UnresolvedVariable, UnresolvedVariableValue, ) -from .lookups.handlers.base import LookupHandler from .lookups.registry import RUNWAY_LOOKUP_HANDLERS if TYPE_CHECKING: from .cfngin.providers.aws.default import Provider from .config.components.runway import RunwayVariablesDefinition from .context import CfnginContext, RunwayContext + from .lookups.handlers.base import LookupHandler LOGGER = logging.getLogger(__name__) @@ -53,6 +37,7 @@ class Variable: """Represents a variable provided to a Runway directive.""" + _value: VariableValue name: str def __init__( @@ -75,11 +60,11 @@ def __init__( self.variable_type = variable_type @property - def dependencies(self) -> Set[str]: + def dependencies(self) -> set[str]: """Stack names that this variable depends on. Returns: - Set[str]: Stack names that this variable depends on. + set[str]: Stack names that this variable depends on. """ return self._value.dependencies @@ -108,9 +93,9 @@ def value(self) -> Any: def resolve( self, - context: Union[CfnginContext, RunwayContext], - provider: Optional[Provider] = None, - variables: Optional[RunwayVariablesDefinition] = None, + context: CfnginContext | RunwayContext, + provider: Provider | None = None, + variables: RunwayVariablesDefinition | None = None, **kwargs: Any, ) -> None: """Resolve the variable value. @@ -119,15 +104,14 @@ def resolve( context: The current context object. provider: Subclass of the base provider. variables: Object containing variables passed to Runway. + **kwargs: Arbitrary keyword arguments. Raises: FailedVariableLookup """ try: - self._value.resolve( - context, provider=provider, variables=variables, **kwargs - ) + self._value.resolve(context, provider=provider, variables=variables, **kwargs) except FailedLookup as err: raise FailedVariableLookup(self, err) from err.cause @@ -147,9 +131,9 @@ def __repr__(self) -> str: def resolve_variables( - variables: List[Variable], - context: Union[CfnginContext, RunwayContext], - provider: Optional[Provider] = None, + variables: list[Variable], + context: CfnginContext | RunwayContext, + provider: Provider | None = None, ) -> None: """Given a list of variables, resolve all of them. @@ -174,7 +158,7 @@ class VariableValue: variable_type: VariableTypeLiteralTypeDef @property - def dependencies(self) -> Set[Any]: + def dependencies(self) -> set[Any]: """Stack names that this variable depends on.""" return set() @@ -212,9 +196,9 @@ def value(self) -> Any: def resolve( self, - context: Union[CfnginContext, RunwayContext], - provider: Optional[Provider] = None, - variables: Optional[RunwayVariablesDefinition] = None, + context: CfnginContext | RunwayContext, + provider: Provider | None = None, + variables: RunwayVariablesDefinition | None = None, **kwargs: Any, ) -> None: """Resolve the variable value. @@ -223,6 +207,7 @@ def resolve( context: The current context object. provider: Subclass of the base provider. variables: Object containing variables passed to Runway. + **kwargs: Arbitrary keyword arguments. """ @@ -245,13 +230,13 @@ def parse_obj( @overload @classmethod def parse_obj( - cls, obj: Dict[str, Any], variable_type: VariableTypeLiteralTypeDef = ... + cls, obj: dict[str, Any], variable_type: VariableTypeLiteralTypeDef = ... ) -> VariableValue: ... @overload @classmethod def parse_obj( - cls, obj: List[Any], variable_type: VariableTypeLiteralTypeDef = ... + cls, obj: list[Any], variable_type: VariableTypeLiteralTypeDef = ... ) -> VariableValueList: ... @overload @@ -264,12 +249,10 @@ def parse_obj( @classmethod def parse_obj( cls, obj: str, variable_type: VariableTypeLiteralTypeDef = ... - ) -> VariableValueConcatenation[ - Union[VariableValueLiteral[str], VariableValueLookup] - ]: ... + ) -> VariableValueConcatenation[VariableValueLiteral[str] | VariableValueLookup]: ... @classmethod - def parse_obj( + def parse_obj( # noqa: C901 cls, obj: Any, variable_type: VariableTypeLiteralTypeDef = "cfngin" ) -> VariableValue: """Parse complex variable structures using type appropriate subclasses. @@ -280,22 +263,22 @@ def parse_obj( """ if isinstance(obj, BaseModel): - return VariableValuePydanticModel(obj, variable_type=variable_type) # type: ignore + return VariableValuePydanticModel(obj, variable_type=variable_type) if isinstance(obj, dict): return VariableValueDict(obj, variable_type=variable_type) # type: ignore if isinstance(obj, list): return VariableValueList(obj, variable_type=variable_type) # type: ignore if not isinstance(obj, str): - return VariableValueLiteral(obj, variable_type=variable_type) # type: ignore - - tokens: VariableValueConcatenation[ - Union[VariableValueLiteral[str], VariableValueLookup] - ] = VariableValueConcatenation( - # pyright 1.1.138 is having issues properly inferring the type from comprehension - [ # type: ignore - VariableValueLiteral(cast(str, t), variable_type=variable_type) - for t in re.split(r"(\$\{|\}|\s+)", obj) # ${ or space or } - ] + return VariableValueLiteral(obj, variable_type=variable_type) + + tokens: VariableValueConcatenation[VariableValueLiteral[str] | VariableValueLookup] = ( + VariableValueConcatenation( + # pyright 1.1.138 is having issues properly inferring the type from comprehension + [ + VariableValueLiteral(cast(str, t), variable_type=variable_type) + for t in re.split(r"(\$\{|\}|\s+)", obj) # ${ or space or } + ] + ) ) opener = "${" @@ -352,7 +335,7 @@ class VariableValueDict(VariableValue, MutableMapping[str, VariableValue]): """A dict variable value.""" def __init__( - self, data: Dict[str, Any], variable_type: VariableTypeLiteralTypeDef = "cfngin" + self, data: dict[str, Any], variable_type: VariableTypeLiteralTypeDef = "cfngin" ) -> None: """Instantiate class. @@ -361,15 +344,13 @@ def __init__( variable_type: Type of variable (cfngin|runway). """ - self._data = { - k: self.parse_obj(v, variable_type=variable_type) for k, v in data.items() - } + self._data = {k: self.parse_obj(v, variable_type=variable_type) for k, v in data.items()} self.variable_type: VariableTypeLiteralTypeDef = variable_type @property - def dependencies(self) -> Set[str]: + def dependencies(self) -> set[str]: """Stack names that this variable depends on.""" - deps: Set[str] = set() + deps: set[str] = set() for item in self.values(): deps.update(item.dependencies) return deps @@ -383,7 +364,7 @@ def resolved(self) -> bool: return accumulator @property - def simplified(self) -> Dict[str, Any]: + def simplified(self) -> dict[str, Any]: """Return a simplified version of the value. This can be used to concatenate two literals into one literal or @@ -393,15 +374,15 @@ def simplified(self) -> Dict[str, Any]: return {k: v.simplified for k, v in self.items()} @property - def value(self) -> Dict[str, Any]: + def value(self) -> dict[str, Any]: """Value of the variable. Can be resolved or unresolved.""" return {k: v.value for k, v in self.items()} def resolve( self, - context: Union[CfnginContext, RunwayContext], - provider: Optional[Provider] = None, - variables: Optional[RunwayVariablesDefinition] = None, + context: CfnginContext | RunwayContext, + provider: Provider | None = None, + variables: RunwayVariablesDefinition | None = None, **kwargs: Any, ) -> None: """Resolve the variable value. @@ -410,6 +391,7 @@ def resolve( context: The current context object. provider: Subclass of the base provider. variables: Object containing variables passed to Runway. + **kwargs: Arbitrary keyword arguments. """ for item in self.values(): @@ -433,7 +415,7 @@ def __len__(self) -> int: def __repr__(self) -> str: """Return object representation.""" - return f"Dict[{', '.join(f'{k}={v}' for k, v in self.items())}]" + return f"dict[{', '.join(f'{k}={v}' for k, v in self.items())}]" def __setitem__(self, __key: str, __value: VariableValue) -> None: """Set item by index.""" @@ -455,15 +437,15 @@ def __init__( variable_type: Type of variable (cfngin|runway). """ - self._data: List[VariableValue] = [ + self._data: list[VariableValue] = [ self.parse_obj(i, variable_type=variable_type) for i in iterable ] self.variable_type: VariableTypeLiteralTypeDef = variable_type @property - def dependencies(self) -> Set[str]: + def dependencies(self) -> set[str]: """Stack names that this variable depends on.""" - deps: Set[str] = set() + deps: set[str] = set() for item in self: deps.update(item.dependencies) return deps @@ -477,7 +459,7 @@ def resolved(self) -> bool: return accumulator @property - def simplified(self) -> List[VariableValue]: + def simplified(self) -> list[VariableValue]: """Return a simplified version of the value. This can be used to concatenate two literals into one literal or @@ -487,7 +469,7 @@ def simplified(self) -> List[VariableValue]: return [item.simplified for item in self] @property - def value(self) -> List[Any]: + def value(self) -> list[Any]: """Value of the variable. Can be resolved or unresolved.""" return [item.value for item in self] @@ -497,9 +479,9 @@ def insert(self, index: int, value: VariableValue) -> None: def resolve( self, - context: Union[CfnginContext, RunwayContext], - provider: Optional[Provider] = None, - variables: Optional[RunwayVariablesDefinition] = None, + context: CfnginContext | RunwayContext, + provider: Provider | None = None, + variables: RunwayVariablesDefinition | None = None, **kwargs: Any, ) -> None: """Resolve the variable value. @@ -508,37 +490,44 @@ def resolve( context: The current context object. provider: Subclass of the base provider. variables: Object containing variables passed to Runway. + **kwargs: Arbitrary keyword arguments. """ for item in self: item.resolve(context, provider=provider, variables=variables, **kwargs) - def __delitem__(self, __index: int) -> None: + @overload + def __delitem__(self, index: int) -> None: ... + + @overload + def __delitem__(self, index: slice) -> None: ... + + def __delitem__(self, index: int | slice) -> None: """Delete item by index.""" - del self._data[__index] + del self._data[index] @overload def __getitem__(self, __index: int) -> VariableValue: ... @overload - def __getitem__(self, __index: slice) -> List[VariableValue]: ... + def __getitem__(self, __index: slice) -> list[VariableValue]: ... - def __getitem__( # type: ignore - self, __index: Union[int, slice] - ) -> Union[MutableSequence[VariableValue], VariableValue]: + def __getitem__( # pyright: ignore[reportIncompatibleMethodOverride] + self, __index: int | slice + ) -> MutableSequence[VariableValue] | VariableValue: """Get item by index.""" - return self._data[__index] # type: ignore + return self._data[__index] @overload def __setitem__(self, __index: int, __value: VariableValue) -> None: ... @overload - def __setitem__(self, __index: slice, __value: List[VariableValue]) -> None: ... + def __setitem__(self, __index: slice, __value: list[VariableValue]) -> None: ... - def __setitem__( + def __setitem__( # pyright: ignore[reportIncompatibleMethodOverride] self, - __index: Union[int, slice], - __value: Union[List[VariableValue], VariableValue], + __index: int | slice, + __value: list[VariableValue] | VariableValue, ) -> None: """Set item by index.""" self._data[__index] = __value # type: ignore @@ -553,7 +542,7 @@ def __len__(self) -> int: def __repr__(self) -> str: """Object string representation.""" - return f"List[{', '.join(repr(i) for i in self._data)}]" + return f"list[{', '.join(repr(i) for i in self._data)}]" class VariableValueLiteral(Generic[_LiteralValue], VariableValue): @@ -615,9 +604,9 @@ def __init__( self.variable_type: VariableTypeLiteralTypeDef = variable_type @property - def dependencies(self) -> Set[str]: + def dependencies(self) -> set[str]: """Stack names that this variable depends on.""" - deps: Set[str] = set() + deps: set[str] = set() for item in self: deps.update(item.dependencies) return deps @@ -638,7 +627,7 @@ def simplified(self) -> VariableValue: nested concatenations. """ - concat: List[VariableValue] = [] + concat: list[VariableValue] = [] for item in self: if isinstance(item, VariableValueLiteral) and item.value == "": pass @@ -650,7 +639,7 @@ def simplified(self) -> VariableValue: concat[-1] = VariableValueLiteral( str(concat[-1].value) + str(item.value) # type: ignore ) - elif isinstance(item, VariableValueConcatenation): # type: ignore + elif isinstance(item, VariableValueConcatenation): concat.extend(iter(item.simplified)) else: concat.append(item.simplified) @@ -672,21 +661,19 @@ def value(self) -> Any: if len(self) == 1: return self[0].value - values: List[str] = [] + values: list[str] = [] for value in self: resolved_value = value.value - if isinstance(resolved_value, bool) or not isinstance( - resolved_value, (int, str) - ): + if isinstance(resolved_value, bool) or not isinstance(resolved_value, (int, str)): raise InvalidLookupConcatenation(value, self) values.append(str(resolved_value)) return "".join(values) def resolve( self, - context: Union[CfnginContext, RunwayContext], - provider: Optional[Provider] = None, - variables: Optional[RunwayVariablesDefinition] = None, + context: CfnginContext | RunwayContext, + provider: Provider | None = None, + variables: RunwayVariablesDefinition | None = None, **kwargs: Any, ) -> None: """Resolve the variable value. @@ -695,6 +682,7 @@ def resolve( context: The current context object. provider: Subclass of the base provider. variables: Object containing variables passed to Runway. + **kwargs: Arbitrary keyword arguments. """ for value in self: @@ -708,11 +696,9 @@ def __delitem__(self, __index: int) -> None: def __getitem__(self, __index: int) -> _VariableValue: ... @overload - def __getitem__(self, __index: slice) -> List[_VariableValue]: ... + def __getitem__(self, __index: slice) -> list[_VariableValue]: ... - def __getitem__( - self, __index: Union[int, slice] - ) -> Union[List[_VariableValue], _VariableValue]: + def __getitem__(self, __index: int | slice) -> list[_VariableValue] | _VariableValue: """Get item by index.""" return self._data[__index] @@ -720,12 +706,12 @@ def __getitem__( def __setitem__(self, __index: int, __value: _VariableValue) -> None: ... @overload - def __setitem__(self, __index: slice, __value: List[_VariableValue]) -> None: ... + def __setitem__(self, __index: slice, __value: list[_VariableValue]) -> None: ... def __setitem__( self, - __index: Union[int, slice], - __value: Union[List[_VariableValue], _VariableValue], + __index: int | slice, + __value: list[_VariableValue] | _VariableValue, ) -> None: """Set item by index.""" self._data[__index] = __value @@ -746,7 +732,7 @@ def __repr__(self) -> str: class VariableValueLookup(VariableValue): """A lookup variable value.""" - handler: Type[LookupHandler] + handler: type[LookupHandler[Any]] lookup_name: VariableValueLiteral[str] lookup_query: VariableValue @@ -755,8 +741,8 @@ class VariableValueLookup(VariableValue): def __init__( self, lookup_name: VariableValueLiteral[str], - lookup_query: Union[str, VariableValue], - handler: Optional[Type[LookupHandler]] = None, + lookup_query: str | VariableValue, + handler: type[LookupHandler[Any]] | None = None, variable_type: VariableTypeLiteralTypeDef = "cfngin", ) -> None: """Initialize class. @@ -790,15 +776,13 @@ def __init__( elif variable_type == "runway": handler = RUNWAY_LOOKUP_HANDLERS[lookup_name_resolved] else: - raise ValueError( - 'Variable type must be one of "cfngin" or "runway"' - ) + raise ValueError('Variable type must be one of "cfngin" or "runway"') except KeyError: raise UnknownLookupType(self) from None self.handler = handler @property - def dependencies(self) -> Set[str]: + def dependencies(self) -> set[str]: """Stack names that this variable depends on.""" if hasattr(self.handler, "dependencies"): return self.handler.dependencies(self.lookup_query) @@ -833,9 +817,9 @@ def value(self) -> Any: def resolve( self, - context: Union[CfnginContext, RunwayContext], - provider: Optional[Provider] = None, - variables: Optional[RunwayVariablesDefinition] = None, + context: CfnginContext | RunwayContext, + provider: Provider | None = None, + variables: RunwayVariablesDefinition | None = None, **kwargs: Any, ) -> None: """Resolve the variable value. @@ -844,14 +828,13 @@ def resolve( context: The current context object. provider: Subclass of the base provider. variables: Object containing variables passed to Runway. + **kwargs: Arbitrary keyword arguments. Raises: FailedLookup: A lookup failed for any reason. """ - self.lookup_query.resolve( - context=context, provider=provider, variables=variables, **kwargs - ) + self.lookup_query.resolve(context=context, provider=provider, variables=variables, **kwargs) try: result = self.handler.handle( self.lookup_query.value, @@ -871,10 +854,8 @@ def __iter__(self) -> Iterator[VariableValueLookup]: def __repr__(self) -> str: """Return object representation.""" if self._resolved: - return ( - f"Lookup[{self._data} ({self.lookup_name} {repr(self.lookup_query)})]" - ) - return f"Lookup[{self.lookup_name} {repr(self.lookup_query)}]" + return f"Lookup[{self._data} ({self.lookup_name} {self.lookup_query!r})]" + return f"Lookup[{self.lookup_name} {self.lookup_query!r}]" def __str__(self) -> str: """Object displayed as a string.""" @@ -896,16 +877,16 @@ def __init__( variable_type: Type of variable (cfngin|runway). """ - self._data: Dict[str, VariableValue] = { + self._data: dict[str, VariableValue] = { k: self.parse_obj(v, variable_type=variable_type) for k, v in data } self._model_class = type(data) self.variable_type: VariableTypeLiteralTypeDef = variable_type @property - def dependencies(self) -> Set[str]: + def dependencies(self) -> set[str]: """Stack names that this variable depends on.""" - deps: Set[str] = set() + deps: set[str] = set() for value in self._data.values(): deps.update(value.dependencies) return deps @@ -919,7 +900,7 @@ def resolved(self) -> bool: return accumulator @property - def simplified(self) -> Dict[str, Any]: + def simplified(self) -> dict[str, Any]: """Return a simplified version of the value. This can be used to concatenate two literals into one literal or @@ -936,15 +917,15 @@ def value(self) -> _PydanticModelTypeVar: into a pydantic model. """ - return self._model_class.parse_obj( + return self._model_class.model_validate( {field: value.value for field, value in self._data.items()} ) def resolve( self, - context: Union[CfnginContext, RunwayContext], - provider: Optional[Provider] = None, - variables: Optional[RunwayVariablesDefinition] = None, + context: CfnginContext | RunwayContext, + provider: Provider | None = None, + variables: RunwayVariablesDefinition | None = None, **kwargs: Any, ) -> None: """Resolve the variable value. @@ -953,6 +934,7 @@ def resolve( context: The current context object. provider: Subclass of the base provider. variables: Object containing variables passed to Runway. + **kwargs: Arbitrary keyword arguments. """ for item in self._data.values(): @@ -977,8 +959,7 @@ def __len__(self) -> int: def __repr__(self) -> str: """Return object representation.""" return ( - self._model_class.__name__ - + f"[{', '.join(f'{k}={v}' for k, v in self._data.items())}]" + self._model_class.__name__ + f"[{', '.join(f'{k}={v}' for k, v in self._data.items())}]" ) def __setitem__(self, __key: str, __value: VariableValue) -> None: diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 941988601..000000000 --- a/setup.cfg +++ /dev/null @@ -1,59 +0,0 @@ -[flake8] -classmethod-decorators = - classmethod, - root_validator, - validator -docstring-convention = all -exclude = - *.egg-info, - *.pyc, - *.pyi, - .demo, - .eggs, - .git, - .github, - .mypy_cache, - .runway, - .serverless, - .terraform, - .venv, - __pycache__, - artifacts, - build, - dist, - docs/.venv, - docs/build, - docs/source/apidocs, - node_modules, - npm, - typings, - quickstarts -extend-ignore = - # BaseException.message` has been deprecated false positive for custom exceptions - B306, - # No blank lines allowed after function docstring - D203, - # Multi-line docstring summary should start at the second line - D213, - # Section underline is over-indented - D215, - # First word of the first line should be properly capitalized - D403, - # Section name should end with a newline - D406, - # Missing dashed underline after section - D407, - # Section underline should be in the line following the section’s name - D408, - # Section underline should match the length of its name - D409, - # First line should end with a period, question mark, or exclamation point - D415, - # whitespace before ':' - black compatible - E203, - # line break before operator - W503 -ignore-decorators = abstractmethod|overload -max-line-length = 98 -show-source = true -statistics = true diff --git a/tests/README.md b/tests/README.md index fb66e04ee..ab5220376 100644 --- a/tests/README.md +++ b/tests/README.md @@ -34,13 +34,12 @@ The the operation of each function/method individually. - Low level tests that import individual functions and classes to invoke them directly. - Mocks should be used to isolate each function/method. - ## Running Tests Tests can be run using `make` commands from the root of the repo. -| Command | Description | -|-------------------------|--------------------------| +| Command | Description | +| ----------------------- | ------------------------ | | `make test` | integration & unit tests | | `make test-functional` | functional tests | | `make test-integration` | integration tests | diff --git a/tests/conftest.py b/tests/conftest.py index 82c2556b7..b73ec1d13 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,22 +1,22 @@ """Pytest configuration, fixtures, and plugins.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import os from pathlib import Path -from typing import TYPE_CHECKING, Generator, Iterator +from typing import TYPE_CHECKING import pytest from .factories import cli_runner_factory if TYPE_CHECKING: + from collections.abc import Generator, Iterator + from _pytest.config import Config from _pytest.config.argparsing import Parser from _pytest.fixtures import SubRequest from click.testing import CliRunner - from pytest import TempPathFactory def pytest_configure(config: Config) -> None: @@ -50,20 +50,20 @@ def pytest_addoption(parser: Parser) -> None: ) -@pytest.fixture(scope="function") +@pytest.fixture() def cli_runner(request: SubRequest) -> CliRunner: """Initialize instance of `click.testing.CliRunner`.""" return cli_runner_factory(request) -@pytest.fixture(scope="function") +@pytest.fixture() def cli_runner_isolated(cli_runner: CliRunner) -> Generator[CliRunner, None, None]: """Initialize instance of `click.testing.CliRunner` with `isolate_filesystem()` called.""" with cli_runner.isolated_filesystem(): yield cli_runner -@pytest.fixture(scope="function") +@pytest.fixture() def cd_tmp_path(tmp_path: Path) -> Iterator[Path]: """Change directory to a temporary path. @@ -79,7 +79,7 @@ def cd_tmp_path(tmp_path: Path) -> Iterator[Path]: os.chdir(prev_dir) -@pytest.fixture(scope="function") +@pytest.fixture() def root_dir() -> Path: """Return a path object to the root directory.""" return Path(__file__).parent.parent @@ -101,6 +101,6 @@ def sanitize_environment() -> None: @pytest.fixture(scope="session") -def tfenv_dir(tmp_path_factory: TempPathFactory) -> Path: +def tfenv_dir(tmp_path_factory: pytest.TempPathFactory) -> Path: """Directory for storing tfenv between tests.""" return tmp_path_factory.mktemp(".tfenv", numbered=True) diff --git a/tests/factories.py b/tests/factories.py index a46eb9c64..0bd677251 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -1,10 +1,9 @@ """Test classes.""" -# pyright: basic, reportIncompatibleMethodOverride=none from __future__ import annotations import os # imports os -from typing import TYPE_CHECKING, Any, Dict, cast +from typing import TYPE_CHECKING, Any, cast from click.testing import CliRunner @@ -14,7 +13,7 @@ def cli_runner_factory(request: SubRequest) -> CliRunner: """Initialize instance of `click.testing.CliRunner`.""" - kwargs: Dict[str, Any] = { + kwargs: dict[str, Any] = { "env": { "CFNGIN_STACK_POLL_TIME": "1", "DEPLOY_ENVIRONMENT": "test", @@ -24,5 +23,5 @@ def cli_runner_factory(request: SubRequest) -> CliRunner: } mark = request.node.get_closest_marker("cli_runner") if mark: - kwargs.update(cast(Dict[str, Any], mark.kwargs)) + kwargs.update(cast(dict[str, Any], mark.kwargs)) return CliRunner(**kwargs) diff --git a/tests/functional/cdk/test_multistack/package.json b/tests/functional/cdk/test_multistack/package.json index 03bff770e..b2cc94c28 100644 --- a/tests/functional/cdk/test_multistack/package.json +++ b/tests/functional/cdk/test_multistack/package.json @@ -1,18 +1,7 @@ { - "name": "test_multistack", - "version": "0.0.0", "bin": { "example": "bin/example.js" }, - "scripts": { - "build": "tsc", - "deploy": "cdk deploy", - "diff": "cdk diff", - "lint": "eslint 'bin/*.ts' 'lib/*.ts'", - "lintfix": "npm run lint -- --fix", - "synth": "cdk synth", - "watch": "tsc -w" - }, "dependencies": { "@aws-cdk/core": "^1.181.0" }, @@ -33,5 +22,16 @@ "tslint-config-prettier": "^1.18.0", "tslint-plugin-prettier": "^2.3.0", "typescript": "^4.9.4" - } + }, + "name": "test_multistack", + "scripts": { + "build": "tsc", + "deploy": "cdk deploy", + "diff": "cdk diff", + "lint": "eslint 'bin/*.ts' 'lib/*.ts'", + "lintfix": "npm run lint -- --fix", + "synth": "cdk synth", + "watch": "tsc -w" + }, + "version": "0.0.0" } diff --git a/tests/functional/cdk/test_multistack/test_runner.py b/tests/functional/cdk/test_multistack/test_runner.py index a0f7e6c59..6a9199e01 100644 --- a/tests/functional/cdk/test_multistack/test_runner.py +++ b/tests/functional/cdk/test_multistack/test_runner.py @@ -10,27 +10,28 @@ """ -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent @pytest.fixture(scope="module") -def deploy_result(cli_runner: CliRunner) -> Generator[Result, None, None]: +def deploy_result(cli_runner: CliRunner) -> Result: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" - yield cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) + return cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) @pytest.fixture(scope="module") diff --git a/tests/functional/cfngin/fixtures/blueprints/_bastion.py b/tests/functional/cfngin/fixtures/blueprints/_bastion.py index 0a0eaacfe..3e3312678 100644 --- a/tests/functional/cfngin/fixtures/blueprints/_bastion.py +++ b/tests/functional/cfngin/fixtures/blueprints/_bastion.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict +from typing import TYPE_CHECKING, ClassVar from runway.cfngin.blueprints.base import Blueprint from runway.cfngin.blueprints.variables.types import ( @@ -22,7 +22,7 @@ class FakeBastion(Blueprint): """Fake Bastion.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "VpcId": {"type": EC2VPCId, "description": "Vpc Id"}, "DefaultSG": { "type": EC2SecurityGroupId, @@ -30,15 +30,15 @@ class FakeBastion(Blueprint): }, "PublicSubnets": { "type": EC2SubnetIdList, - "description": "Subnets to deploy public " "instances in.", + "description": "Subnets to deploy public instances in.", }, "PrivateSubnets": { "type": EC2SubnetIdList, - "description": "Subnets to deploy private " "instances in.", + "description": "Subnets to deploy private instances in.", }, "AvailabilityZones": { "type": CFNCommaDelimitedList, - "description": "Availability Zones to deploy " "instances in.", + "description": "Availability Zones to deploy instances in.", }, "InstanceType": { "type": CFNString, diff --git a/tests/functional/cfngin/fixtures/blueprints/_broken.py b/tests/functional/cfngin/fixtures/blueprints/_broken.py index f7fd569f3..99610b61f 100644 --- a/tests/functional/cfngin/fixtures/blueprints/_broken.py +++ b/tests/functional/cfngin/fixtures/blueprints/_broken.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict +from typing import TYPE_CHECKING, ClassVar from troposphere import Ref from troposphere.cloudformation import WaitCondition, WaitConditionHandle @@ -20,7 +20,7 @@ class Broken(Blueprint): """ - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "StringVariable": {"type": str, "default": ""} } diff --git a/tests/functional/cfngin/fixtures/blueprints/_dummy.py b/tests/functional/cfngin/fixtures/blueprints/_dummy.py index 2b316f895..92d97f608 100644 --- a/tests/functional/cfngin/fixtures/blueprints/_dummy.py +++ b/tests/functional/cfngin/fixtures/blueprints/_dummy.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict +from typing import TYPE_CHECKING, ClassVar from troposphere import Ref from troposphere.cloudformation import WaitCondition, WaitConditionHandle @@ -16,7 +16,7 @@ class Dummy(Blueprint): """Dummy blueprint.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "StringVariable": {"type": str, "default": ""} } @@ -35,7 +35,7 @@ class LongRunningDummy(Blueprint): """ - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "Count": { "type": int, "description": "The # of WaitConditionHandles to create.", diff --git a/tests/functional/cfngin/fixtures/blueprints/_lambda_function.py b/tests/functional/cfngin/fixtures/blueprints/_lambda_function.py index 91dd0351a..7b6ed5559 100644 --- a/tests/functional/cfngin/fixtures/blueprints/_lambda_function.py +++ b/tests/functional/cfngin/fixtures/blueprints/_lambda_function.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, ClassVar, Dict, Optional +from typing import TYPE_CHECKING, Any, ClassVar import awacs.awslambda import awacs.dynamodb @@ -22,7 +22,7 @@ class LambdaFunction(Blueprint): """Blueprint for creating a Lambda Function.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "AppName": {"type": str, "description": "Name of app."}, "Code": { "type": awslambda.Code, @@ -122,7 +122,7 @@ def runtime(self) -> Ref: @cached_property def lambda_function(self) -> awslambda.Function: """AWS Lambda Function.""" - optional_kwargs: Dict[str, Any] = { + optional_kwargs: dict[str, Any] = { "Environment": ( awslambda.Environment(Variables=self.variables["EnvironmentVariables"]) if self.variables["EnvironmentVariables"] @@ -146,13 +146,11 @@ def lambda_function(self) -> awslambda.Function: self.add_output(f"{func.title}Arn", func.get_att("Arn")) self.add_output("Runtime", self.runtime) for attr in awslambda.Code.props: - self.add_output( - f"Code{attr}", getattr(self.variables["Code"], attr, "null") - ) + self.add_output(f"Code{attr}", getattr(self.variables["Code"], attr, "null")) return func @cached_property - def lambda_layer(self) -> Optional[awslambda.LayerVersion]: + def lambda_layer(self) -> awslambda.LayerVersion | None: """AWS Lambda Layer.""" create_layer = bool(self.variables["LayerContent"].S3Bucket) kwargs = { @@ -179,5 +177,5 @@ def create_template(self) -> None: """Create template.""" self.template.set_version("2010-09-09") self.template.set_description("Test Lambda") - self.iam_role # pylint: disable=pointless-statement - self.lambda_function # pylint: disable=pointless-statement + self.iam_role # noqa: B018 + self.lambda_function # noqa: B018 diff --git a/tests/functional/cfngin/fixtures/blueprints/_vpc.py b/tests/functional/cfngin/fixtures/blueprints/_vpc.py index 6161745e2..7ad18f69f 100644 --- a/tests/functional/cfngin/fixtures/blueprints/_vpc.py +++ b/tests/functional/cfngin/fixtures/blueprints/_vpc.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict +from typing import TYPE_CHECKING, ClassVar from troposphere.cloudformation import WaitConditionHandle @@ -16,7 +16,7 @@ class FakeVPC(Blueprint): """Fake VPC.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "AZCount": {"type": int, "default": 2}, "PrivateSubnets": { "type": CFNCommaDelimitedList, diff --git a/tests/functional/cfngin/fixtures/hooks/cleanup.py b/tests/functional/cfngin/fixtures/hooks/cleanup.py index 161bf8ce9..8da35b0f3 100644 --- a/tests/functional/cfngin/fixtures/hooks/cleanup.py +++ b/tests/functional/cfngin/fixtures/hooks/cleanup.py @@ -18,7 +18,7 @@ def local_delete( - context: CfnginContext, # pylint: disable=unused-argument + context: CfnginContext, # noqa: ARG001 *, path: StrPath, **_: Any, diff --git a/tests/functional/cfngin/fixtures/stack_policies/default.json b/tests/functional/cfngin/fixtures/stack_policies/default.json index 6a3513825..04ba8c8c1 100644 --- a/tests/functional/cfngin/fixtures/stack_policies/default.json +++ b/tests/functional/cfngin/fixtures/stack_policies/default.json @@ -1,10 +1,10 @@ { - "Statement" : [ - { - "Effect" : "Allow", - "Action" : "Update:*", - "Principal": "*", - "Resource" : "*" - } - ] + "Statement": [ + { + "Action": "Update:*", + "Effect": "Allow", + "Principal": "*", + "Resource": "*" + } + ] } diff --git a/tests/functional/cfngin/fixtures/stack_policies/none.json b/tests/functional/cfngin/fixtures/stack_policies/none.json index daf7f8424..f66cbde7d 100644 --- a/tests/functional/cfngin/fixtures/stack_policies/none.json +++ b/tests/functional/cfngin/fixtures/stack_policies/none.json @@ -1,10 +1,10 @@ { - "Statement" : [ - { - "Effect" : "Deny", - "Action" : "Update:*", - "Principal": "*", - "Resource" : "*" - } - ] + "Statement": [ + { + "Action": "Update:*", + "Effect": "Deny", + "Principal": "*", + "Resource": "*" + } + ] } diff --git a/tests/functional/cfngin/hooks/test_awslambda/sample_app/cfngin.yml b/tests/functional/cfngin/hooks/test_awslambda/sample_app/cfngin.yml index de663aa2a..828d89aa1 100644 --- a/tests/functional/cfngin/hooks/test_awslambda/sample_app/cfngin.yml +++ b/tests/functional/cfngin/hooks/test_awslambda/sample_app/cfngin.yml @@ -70,7 +70,7 @@ pre_deploy: - path: runway.cfngin.hooks.awslambda.PythonLayer data_key: awslambda.layer.xmlsec args: - << : *xmlsec_args + <<: *xmlsec_args compatible_runtimes: - python3.10 extend_gitignore: diff --git a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker/index.py b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker/index.py index a3a77b16e..20ce6cc87 100644 --- a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker/index.py +++ b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker/index.py @@ -1,11 +1,10 @@ """Lambda Function.""" -# pylint: disable=broad-except,import-outside-toplevel,unused-argument from __future__ import annotations import inspect from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from ..type_defs import LambdaResponse @@ -13,7 +12,7 @@ PACKAGE_DIR = Path(__file__).parent -def handler(event: Dict[str, Any], context: object) -> LambdaResponse: +def handler(event: dict[str, Any], context: object) -> LambdaResponse: # noqa: ARG001 """Lambda Function entrypoint.""" try: import requests @@ -30,7 +29,7 @@ def handler(event: Dict[str, Any], context: object) -> LambdaResponse: "message": None, "status": "success", } - except Exception as exc: + except Exception as exc: # noqa: BLE001 return { "code": 500, "data": { diff --git a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_mysql/Pipfile b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_mysql/Pipfile index e4da7ac5a..6e2f9c074 100644 --- a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_mysql/Pipfile +++ b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_mysql/Pipfile @@ -1,9 +1,9 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" +[dev-packages] [packages] mysqlclient = "==2.1.1" -[dev-packages] +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true diff --git a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_mysql/index.py b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_mysql/index.py index c3695e606..39c76aea1 100644 --- a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_mysql/index.py +++ b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_mysql/index.py @@ -1,11 +1,10 @@ """Lambda Function.""" -# pylint: disable=broad-except,import-error,import-outside-toplevel,unused-argument from __future__ import annotations import inspect from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from ..type_defs import LambdaResponse @@ -13,7 +12,7 @@ PACKAGE_DIR = Path(__file__).parent -def handler(event: Dict[str, Any], context: object) -> LambdaResponse: +def handler(event: dict[str, Any], context: object) -> LambdaResponse: # noqa: ARG001 """Lambda Function entrypoint.""" try: import MySQLdb # type: ignore @@ -25,12 +24,12 @@ def handler(event: Dict[str, Any], context: object) -> LambdaResponse: str(path.relative_to(PACKAGE_DIR)) for path in sorted(PACKAGE_DIR.rglob("*"), reverse=True) ], - "mysqlclient": [i[0] for i in inspect.getmembers(MySQLdb)], # type: ignore + "mysqlclient": [i[0] for i in inspect.getmembers(MySQLdb)], }, "message": None, "status": "success", } - except Exception as exc: + except Exception as exc: # noqa: BLE001 return { "code": 500, "data": { diff --git a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_xmlsec/index.py b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_xmlsec/index.py index 83ecfd1b7..7f9962bbf 100644 --- a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_xmlsec/index.py +++ b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_xmlsec/index.py @@ -1,11 +1,10 @@ """Lambda Function.""" -# pylint: disable=broad-except,import-error,import-outside-toplevel,unused-argument from __future__ import annotations import inspect from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from ..type_defs import LambdaResponse @@ -13,7 +12,7 @@ PACKAGE_DIR = Path(__file__).parent -def handler(event: Dict[str, Any], context: object) -> LambdaResponse: +def handler(event: dict[str, Any], context: object) -> LambdaResponse: # noqa: ARG001 """Lambda Function entrypoint.""" try: import lxml # type: ignore @@ -26,13 +25,13 @@ def handler(event: Dict[str, Any], context: object) -> LambdaResponse: str(path.relative_to(PACKAGE_DIR)) for path in sorted(PACKAGE_DIR.rglob("*"), reverse=True) ], - "lxml": [i[0] for i in inspect.getmembers(lxml)], # type: ignore - "xmlsec": [i[0] for i in inspect.getmembers(xmlsec)], # type: ignore + "lxml": [i[0] for i in inspect.getmembers(lxml)], + "xmlsec": [i[0] for i in inspect.getmembers(xmlsec)], }, "message": None, "status": "success", } - except Exception as exc: + except Exception as exc: # noqa: BLE001 return { "code": 500, "data": { diff --git a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_xmlsec/pyproject.toml b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_xmlsec/pyproject.toml index 14c5bee02..e971e267f 100644 --- a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_xmlsec/pyproject.toml +++ b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/docker_xmlsec/pyproject.toml @@ -1,17 +1,16 @@ [tool.poetry] name = "test-awslambda-xmlsec" version = "0.0.0" -description = "Test for awslambda hook." -license = "Apache-2.0" authors = [ "Onica Group LLC ", ] +description = "Test for awslambda hook." +license = "Apache-2.0" [tool.poetry.dependencies] python = ">=3.7, <4" - xmlsec = "*" [build-system] -requires = ["poetry_core>=1.0.3"] build-backend = "poetry.core.masonry.api" +requires = ["poetry_core>=1.0.3"] diff --git a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/local/index.py b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/local/index.py index cc4b8371f..58885c3b1 100644 --- a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/local/index.py +++ b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/local/index.py @@ -1,10 +1,9 @@ """Lambda Function.""" -# pylint: disable=broad-except,unused-argument from __future__ import annotations from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from ..type_defs import LambdaResponse @@ -12,7 +11,7 @@ PACKAGE_DIR = Path(__file__).parent -def handler(event: Dict[str, Any], context: object) -> LambdaResponse: +def handler(event: dict[str, Any], context: object) -> LambdaResponse: # noqa: ARG001 """Lambda Function entrypoint.""" try: return { @@ -26,7 +25,7 @@ def handler(event: Dict[str, Any], context: object) -> LambdaResponse: "message": None, "status": "success", } - except Exception as exc: + except Exception as exc: # noqa: BLE001 return { "code": 500, "data": {}, diff --git a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/local_xmlsec_layer/index.py b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/local_xmlsec_layer/index.py index 02ca8e3b6..7a820dc7d 100644 --- a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/local_xmlsec_layer/index.py +++ b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/local_xmlsec_layer/index.py @@ -1,11 +1,10 @@ """Lambda Function using a Lambda Layer for xmlsec.""" -# pylint: disable=broad-except,import-error,import-outside-toplevel,unused-argument from __future__ import annotations import inspect from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from ..type_defs import LambdaResponse @@ -14,7 +13,7 @@ OPT_DIR = Path("/opt") -def handler(event: Dict[str, Any], context: object) -> LambdaResponse: +def handler(event: dict[str, Any], context: object) -> LambdaResponse: # noqa: ARG001 """Lambda Function entrypoint.""" try: import lxml # type: ignore @@ -27,8 +26,8 @@ def handler(event: Dict[str, Any], context: object) -> LambdaResponse: str(path.relative_to(PACKAGE_DIR)) for path in sorted(PACKAGE_DIR.rglob("*"), reverse=True) ], - "lxml": [i[0] for i in inspect.getmembers(lxml)], # type: ignore - "xmlsec": [i[0] for i in inspect.getmembers(xmlsec)], # type: ignore + "lxml": [i[0] for i in inspect.getmembers(lxml)], + "xmlsec": [i[0] for i in inspect.getmembers(xmlsec)], "opt_dir_contents": [ str(path) for path in sorted(OPT_DIR.rglob("*"), reverse=True) ], @@ -36,7 +35,7 @@ def handler(event: Dict[str, Any], context: object) -> LambdaResponse: "message": None, "status": "success", } - except Exception as exc: + except Exception as exc: # noqa: BLE001 return { "code": 500, "data": { diff --git a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/type_defs.py b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/type_defs.py index 47019fe16..7fbcfe13b 100644 --- a/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/type_defs.py +++ b/tests/functional/cfngin/hooks/test_awslambda/sample_app/src/type_defs.py @@ -2,13 +2,7 @@ from __future__ import annotations -import sys -from typing import Any, Dict, Optional - -if sys.version_info < (3, 8): - from typing_extensions import Literal, TypedDict # type: ignore -else: - from typing import Literal, TypedDict # type: ignore +from typing import Any, Literal, TypedDict class _LambdaResponseOptional(TypedDict, total=False): @@ -21,8 +15,8 @@ class _LambdaResponseRequired(TypedDict): """Required fields for a Lambda Response.""" code: int - data: Dict[str, Any] - message: Optional[str] + data: dict[str, Any] + message: str | None status: Literal["error", "success"] diff --git a/tests/functional/cfngin/hooks/test_awslambda/test_runner.py b/tests/functional/cfngin/hooks/test_awslambda/test_runner.py index 32f654c1d..e308c04aa 100644 --- a/tests/functional/cfngin/hooks/test_awslambda/test_runner.py +++ b/tests/functional/cfngin/hooks/test_awslambda/test_runner.py @@ -1,31 +1,32 @@ """Test AWS Lambda hook.""" -# pylint: disable=no-self-argument -# pylint: disable=redefined-outer-name,unexpected-keyword-arg,unused-argument from __future__ import annotations import json import shutil from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Generator, Optional +from typing import TYPE_CHECKING, Any -import boto3 import pytest -from pydantic import root_validator +from pydantic import model_validator from runway._cli import cli from runway.compat import cached_property from runway.utils import BaseModel if TYPE_CHECKING: + from collections.abc import Generator + + import boto3 from click.testing import CliRunner, Result from mypy_boto3_cloudformation.client import CloudFormationClient from mypy_boto3_cloudformation.type_defs import StackTypeDef from mypy_boto3_lambda.client import LambdaClient - from sample_app.src.type_defs import LambdaResponse from runway.context import RunwayContext + from .sample_app.src.type_defs import LambdaResponse + AWS_REGION = "us-east-1" PYTHON_RUNTIME = "python3.10" STACK_PREFIX = "test-awslambda" @@ -54,22 +55,23 @@ class AwslambdaStackOutputs(BaseModel): """Outputs of a Stack used for testing the awslambda hook.""" - CodeImageUri: Optional[str] = None + CodeImageUri: str | None = None CodeS3Bucket: str CodeS3Key: str - CodeS3ObjectVersion: Optional[str] = None - CodeZipFile: Optional[str] = None + CodeS3ObjectVersion: str | None = None + CodeZipFile: str | None = None LambdaFunction: str LambdaFunctionArn: str LambdaRole: str - LayerContentS3Bucket: Optional[str] = None - LayerContentS3Key: Optional[str] = None - LayerContentS3ObjectVersion: Optional[str] = None - LayerVersion: Optional[str] = None + LayerContentS3Bucket: str | None = None + LayerContentS3Key: str | None = None + LayerContentS3ObjectVersion: str | None = None + LayerVersion: str | None = None Runtime: str - @root_validator(allow_reuse=True, pre=True) - def _convert_null_to_none(cls, values: Dict[str, Any]) -> Dict[str, Any]: + @model_validator(mode="before") + @classmethod + def _convert_null_to_none(cls, values: dict[str, Any]) -> dict[str, Any]: """Convert ``null`` to ``NoneType``.""" def _handle_null(v: Any) -> Any: @@ -101,7 +103,7 @@ def client(self) -> LambdaClient: @cached_property def outputs(self) -> AwslambdaStackOutputs: """Stack outputs.""" - return AwslambdaStackOutputs.parse_obj( + return AwslambdaStackOutputs.model_validate( { output["OutputKey"]: output["OutputValue"] for output in self.stack.get("Outputs", []) @@ -119,12 +121,12 @@ def stack(self) -> StackTypeDef: ) return stacks[0] - def invoke(self, *, payload: Optional[str] = None) -> LambdaResponse: + def invoke(self, *, payload: str | None = None) -> LambdaResponse: """Invoke the Lambda Function.""" response = self.client.invoke( FunctionName=self.outputs.LambdaFunction, InvocationType="RequestResponse", - **{"Payload": payload} if payload else {}, + **{"Payload": payload} if payload else {}, # pyright: ignore[reportArgumentType] ) if "Payload" in response: return json.load(response["Payload"]) @@ -166,15 +168,11 @@ def test_deploy_exit_code(deploy_result: Result) -> None: def test_deploy_log_messages(deploy_result: Result) -> None: """Test deploy log messages.""" - build_skipped = [ - line for line in deploy_result.stdout.split("\n") if "build skipped" in line - ] + build_skipped = [line for line in deploy_result.stdout.split("\n") if "build skipped" in line] assert not build_skipped, "\n".join(build_skipped) -def test_docker( - deploy_result: Result, namespace: str, runway_context: RunwayContext -) -> None: +def test_docker(deploy_result: Result, namespace: str, runway_context: RunwayContext) -> None: """Test function built with Docker.""" tester = AwslambdaTester( runway_context.get_session(region=AWS_REGION), @@ -193,9 +191,7 @@ def test_docker( assert "certifi/__init__.py" in response["data"]["dir_contents"] -def test_local( - deploy_result: Result, namespace: str, runway_context: RunwayContext -) -> None: +def test_local(deploy_result: Result, namespace: str, runway_context: RunwayContext) -> None: """Test function built with local python.""" tester = AwslambdaTester( runway_context.get_session(region=AWS_REGION), @@ -208,9 +204,7 @@ def test_local( assert response["data"]["dir_contents"] == ["index.py"] -def test_mysql( - deploy_result: Result, namespace: str, runway_context: RunwayContext -) -> None: +def test_mysql(deploy_result: Result, namespace: str, runway_context: RunwayContext) -> None: """Test function built from Dockerfile for mysql.""" tester = AwslambdaTester( runway_context.get_session(region=AWS_REGION), @@ -225,9 +219,7 @@ def test_mysql( assert "Pipfile" not in response["data"]["dir_contents"] -def test_xmlsec( - deploy_result: Result, namespace: str, runway_context: RunwayContext -) -> None: +def test_xmlsec(deploy_result: Result, namespace: str, runway_context: RunwayContext) -> None: """Test function built from Dockerfile for xmlsec.""" tester = AwslambdaTester( runway_context.get_session(region=AWS_REGION), @@ -244,9 +236,7 @@ def test_xmlsec( assert "poetry.lock" not in response["data"]["dir_contents"] -def test_xmlsec_layer( - deploy_result: Result, namespace: str, runway_context: RunwayContext -) -> None: +def test_xmlsec_layer(deploy_result: Result, namespace: str, runway_context: RunwayContext) -> None: """Test layer built from Dockerfile for xmlsec.""" tester = AwslambdaTester( runway_context.get_session(region=AWS_REGION), @@ -262,7 +252,7 @@ def test_xmlsec_layer( assert response["data"]["dir_contents"] == ["index.py"] -def test_plan(cli_runner: CliRunner, deploy_result: Result) -> None: +def test_plan(cli_runner: CliRunner, deploy_result: Result) -> None: # noqa: ARG001 """Test ``runway plan`` - this was not possible with old hook. deploy_result required so cleanup does not start before this runs. @@ -273,9 +263,7 @@ def test_plan(cli_runner: CliRunner, deploy_result: Result) -> None: (DOCKER_XMLSEC_DIR / "poetry.lock").unlink(missing_ok=True) plan_results = cli_runner.invoke(cli, ["plan"], env=ENV_VARS) assert plan_results.exit_code == 0, plan_results.output - matches = [ - line for line in plan_results.stdout.split("\n") if line.endswith(":no changes") - ] + matches = [line for line in plan_results.stdout.split("\n") if line.endswith(":no changes")] a_list = [4, 5] # count needs to be updated if number of test stacks change assert len(matches) in a_list, "\n".join(matches) diff --git a/tests/functional/cfngin/test_assume_role/test_runner.py b/tests/functional/cfngin/test_assume_role/test_runner.py index 1d366cbff..4199bc403 100644 --- a/tests/functional/cfngin/test_assume_role/test_runner.py +++ b/tests/functional/cfngin/test_assume_role/test_runner.py @@ -1,11 +1,10 @@ """Test Runway assume role.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Generator +from typing import TYPE_CHECKING, Any import boto3 import pytest @@ -15,6 +14,8 @@ from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result AWS_REGION = "us-east-1" @@ -27,9 +28,7 @@ def assert_session_belongs_to_account(session: boto3.Session, account_id: str) - @pytest.fixture(scope="module") -def assumed_session( - main_session: boto3.Session, variables: Dict[str, Any] -) -> boto3.Session: +def assumed_session(main_session: boto3.Session, variables: dict[str, Any]) -> boto3.Session: """boto3 session for assumed account.""" role_arn = variables["runner_role"]["test-alt"] sts_client = main_session.client("sts") @@ -52,15 +51,15 @@ def main_session() -> boto3.Session: @pytest.fixture(scope="module") -def variables() -> Dict[str, Any]: +def variables() -> dict[str, Any]: """Contents of runway.variables.yml.""" return yaml.safe_load((CURRENT_DIR / "runway.variables.yml").read_bytes()) @pytest.fixture(scope="module") -def deploy_result(cli_runner: CliRunner) -> Generator[Result, None, None]: +def deploy_result(cli_runner: CliRunner) -> Result: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" - yield cli_runner.invoke(cli, ["deploy", "--debug"], env={"CI": "1"}) + return cli_runner.invoke(cli, ["deploy", "--debug"], env={"CI": "1"}) @pytest.fixture(scope="module") @@ -77,7 +76,7 @@ def test_deploy_exit_code(deploy_result: Result) -> None: def test_does_not_exist_in_main_account( - main_session: boto3.Session, namespace: str, variables: Dict[str, Any] + main_session: boto3.Session, namespace: str, variables: dict[str, Any] ) -> None: """Test that the deployed stack does not exist in the main test account.""" assert_session_belongs_to_account(main_session, variables["account_id"]["test"]) @@ -89,12 +88,10 @@ def test_does_not_exist_in_main_account( def test_exists_in_assumed_account( - assumed_session: boto3.Session, namespace: str, variables: Dict[str, Any] + assumed_session: boto3.Session, namespace: str, variables: dict[str, Any] ) -> None: """Test that the deployed stack exists in the assumed account.""" - assert_session_belongs_to_account( - assumed_session, variables["account_id"]["test-alt"] - ) + assert_session_belongs_to_account(assumed_session, variables["account_id"]["test-alt"]) assert assumed_session.client("cloudformation").describe_stacks( StackName=f"{namespace}-test-assume-role" )["Stacks"] diff --git a/tests/functional/cfngin/test_aws_lambda_hook/cfngin.yml b/tests/functional/cfngin/test_aws_lambda_hook/cfngin.yml index ef62f7c0f..6b4b31bc3 100644 --- a/tests/functional/cfngin/test_aws_lambda_hook/cfngin.yml +++ b/tests/functional/cfngin/test_aws_lambda_hook/cfngin.yml @@ -21,12 +21,12 @@ pre_deploy: exclude: - '*.pyc' nondockerize: - path: ./lambda_src/nondockerize_src - runtime: python3.8 - include: - - '*.py' - exclude: - - '*.pyc' + path: ./lambda_src/nondockerize_src + runtime: python3.8 + include: + - '*.py' + exclude: + - '*.pyc' stacks: - name: test-dockerize @@ -46,17 +46,17 @@ stacks: post_deploy: - path: hooks.awslambda_test.invoke - required: True + required: true args: function_name: ${cfn ${namespace}-test-dockerize.LambdaFunction} - path: hooks.awslambda_test.invoke - required: True + required: true args: function_name: ${cfn ${namespace}-test-nondockerize.LambdaFunction} post_destroy: - path: hooks.cleanup.s3_delete_prefix - required: True + required: true args: bucket_name: ${cfngin_bucket} prefix: lambda_functions/${namespace}/ diff --git a/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/dockerize_src/Pipfile b/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/dockerize_src/Pipfile index cd5c3de34..bef4f4bba 100644 --- a/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/dockerize_src/Pipfile +++ b/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/dockerize_src/Pipfile @@ -1,9 +1,9 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" +[dev-packages] [packages] numpy = "*" -[dev-packages] +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true diff --git a/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/dockerize_src/dockerize.py b/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/dockerize_src/dockerize.py index b3b02c1db..3216d890c 100644 --- a/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/dockerize_src/dockerize.py +++ b/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/dockerize_src/dockerize.py @@ -1,15 +1,17 @@ -"""Test handler.""" +"""Test handler.""" # noqa: INP001 -# flake8: noqa -# pylint: disable=unused-argument -import lib +from __future__ import annotations +from typing import Any -def handler(event, context): +import lib # type: ignore + + +def handler(event: Any, context: Any) -> dict[str, int | str]: # noqa: ARG001 """Handle lambda.""" try: if lib.RESPONSE_OBJ.shape == (3, 5): - return {"statusCode": 200, "body": str(lib.RESPONSE_OBJ.shape)} + return {"statusCode": 200, "body": str(lib.RESPONSE_OBJ.shape)} # type: ignore raise ValueError - except: # pylint: disable=bare-except + except: # noqa: E722 return {"statusCode": 500, "body": "fail"} diff --git a/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/nondockerize_src/Pipfile b/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/nondockerize_src/Pipfile index cd5c3de34..bef4f4bba 100644 --- a/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/nondockerize_src/Pipfile +++ b/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/nondockerize_src/Pipfile @@ -1,9 +1,9 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" +[dev-packages] [packages] numpy = "*" -[dev-packages] +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true diff --git a/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/nondockerize_src/nondockerize.py b/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/nondockerize_src/nondockerize.py index b3b02c1db..3216d890c 100644 --- a/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/nondockerize_src/nondockerize.py +++ b/tests/functional/cfngin/test_aws_lambda_hook/lambda_src/nondockerize_src/nondockerize.py @@ -1,15 +1,17 @@ -"""Test handler.""" +"""Test handler.""" # noqa: INP001 -# flake8: noqa -# pylint: disable=unused-argument -import lib +from __future__ import annotations +from typing import Any -def handler(event, context): +import lib # type: ignore + + +def handler(event: Any, context: Any) -> dict[str, int | str]: # noqa: ARG001 """Handle lambda.""" try: if lib.RESPONSE_OBJ.shape == (3, 5): - return {"statusCode": 200, "body": str(lib.RESPONSE_OBJ.shape)} + return {"statusCode": 200, "body": str(lib.RESPONSE_OBJ.shape)} # type: ignore raise ValueError - except: # pylint: disable=bare-except + except: # noqa: E722 return {"statusCode": 500, "body": "fail"} diff --git a/tests/functional/cfngin/test_aws_lambda_hook/test_runner.py b/tests/functional/cfngin/test_aws_lambda_hook/test_runner.py index 2c988061b..5547b8a78 100644 --- a/tests/functional/cfngin/test_aws_lambda_hook/test_runner.py +++ b/tests/functional/cfngin/test_aws_lambda_hook/test_runner.py @@ -1,17 +1,18 @@ """Test AWS Lambda hook.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent @@ -61,8 +62,7 @@ def test_deploy_log_messages_pipenv(deploy_result: Result) -> None: def test_deploy_log_messages_upload(deploy_result: Result, namespace: str) -> None: """Test deploy log messages.""" assert ( - f"uploading object: lambda_functions/{namespace}/lambda-dockerize-" - in deploy_result.stdout + f"uploading object: lambda_functions/{namespace}/lambda-dockerize-" in deploy_result.stdout ) assert ( f"uploading object: lambda_functions/{namespace}/lambda-nondockerize-" diff --git a/tests/functional/cfngin/test_destroy_removed/00-bootstrap.yaml b/tests/functional/cfngin/test_destroy_removed/00-bootstrap.yaml index 6c3e17ed5..efed99c10 100644 --- a/tests/functional/cfngin/test_destroy_removed/00-bootstrap.yaml +++ b/tests/functional/cfngin/test_destroy_removed/00-bootstrap.yaml @@ -16,22 +16,22 @@ stacks: post_destroy: - path: hooks.cleanup.s3_delete_prefix - required: True + required: true args: bucket_name: ${cfngin_bucket} prefix: persistent_graphs/${namespace}/ - path: hooks.cleanup.s3_delete_prefix - required: True + required: true args: bucket_name: ${cfngin_bucket} prefix: stack_templates/${namespace}-bastion/ - path: hooks.cleanup.s3_delete_prefix - required: True + required: true args: bucket_name: ${cfngin_bucket} prefix: stack_templates/${namespace}-other/ - path: hooks.cleanup.s3_delete_prefix - required: True + required: true args: bucket_name: ${cfngin_bucket} prefix: stack_templates/${namespace}-vpc/ diff --git a/tests/functional/cfngin/test_destroy_removed/test_runner.py b/tests/functional/cfngin/test_destroy_removed/test_runner.py index 2bc183a53..24414f841 100644 --- a/tests/functional/cfngin/test_destroy_removed/test_runner.py +++ b/tests/functional/cfngin/test_destroy_removed/test_runner.py @@ -1,17 +1,18 @@ """Test destroy stack removed from persistent graph.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent @@ -50,8 +51,7 @@ def test_deploy_log_messages(deploy_result: Result, namespace: str) -> None: ) assert ( '00-bootstrap:locked persistent graph "runway-testing-lab-cfngin-bucket-us-east-1' - f'/persistent_graphs/{namespace}/test.json" with lock ID "' - in deploy_result.stdout + f'/persistent_graphs/{namespace}/test.json" with lock ID "' in deploy_result.stdout ) assert ( '00-bootstrap:unlocked persistent graph "runway-testing-lab-cfngin-bucket-us-east-1' @@ -59,8 +59,7 @@ def test_deploy_log_messages(deploy_result: Result, namespace: str) -> None: ) assert ( '01-removed:locked persistent graph "runway-testing-lab-cfngin-bucket-us-east-1' - f'/persistent_graphs/{namespace}/test.json" with lock ID "' - in deploy_result.stdout + f'/persistent_graphs/{namespace}/test.json" with lock ID "' in deploy_result.stdout ) assert ( f"{namespace}-other:removed from the CFNgin config file; it is being destroyed" @@ -85,7 +84,4 @@ def test_destroy_exit_code(destroy_result: Result) -> None: @pytest.mark.order(after="test_destroy_exit_code") def test_destroy_log_messages(destroy_result: Result) -> None: """Test destroy log messages.""" - assert ( - "persistent graph deleted; does not need to be unlocked" - in destroy_result.stdout - ) + assert "persistent graph deleted; does not need to be unlocked" in destroy_result.stdout diff --git a/tests/functional/cfngin/test_duplicate_stack/test_runner.py b/tests/functional/cfngin/test_duplicate_stack/test_runner.py index a870c6c51..7bf61c63a 100644 --- a/tests/functional/cfngin/test_duplicate_stack/test_runner.py +++ b/tests/functional/cfngin/test_duplicate_stack/test_runner.py @@ -1,17 +1,18 @@ """Test duplicate stack names.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent @@ -33,11 +34,7 @@ def test_deploy_exit_code(deploy_result: Result) -> None: @pytest.mark.order(after="test_deploy_exit_code") def test_deploy_log_messages(deploy_result: Result) -> None: """Test deploy log messages.""" - expected_lines = [ - "[runway] 1 validation error for CfnginConfigDefinitionModel", - "stacks", - " Duplicate stack vpc found at index 0 (type=value_error)", - ] + expected_lines = ["[runway] 1 validation error for CFNgin Config File", "stacks"] expected = "\n".join(expected_lines) assert expected in deploy_result.stdout, ( "stdout does not match expected\n\nEXPECTED:\n" diff --git a/tests/functional/cfngin/test_locked_stack/test_runner.py b/tests/functional/cfngin/test_locked_stack/test_runner.py index 7ea182023..6e57045de 100644 --- a/tests/functional/cfngin/test_locked_stack/test_runner.py +++ b/tests/functional/cfngin/test_locked_stack/test_runner.py @@ -1,17 +1,18 @@ """Test locked stack.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent diff --git a/tests/functional/cfngin/test_parallel/test_runner.py b/tests/functional/cfngin/test_parallel/test_runner.py index 43d4d3bb0..6ec32268a 100644 --- a/tests/functional/cfngin/test_parallel/test_runner.py +++ b/tests/functional/cfngin/test_parallel/test_runner.py @@ -1,27 +1,28 @@ """Test parallel deployment.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import platform import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent @pytest.fixture(scope="module") -def deploy_result(cli_runner: CliRunner) -> Generator[Result, None, None]: +def deploy_result(cli_runner: CliRunner) -> Result: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" - yield cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) + return cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) @pytest.fixture(scope="module") @@ -34,18 +35,14 @@ def destroy_result(cli_runner: CliRunner) -> Generator[Result, None, None]: @pytest.mark.order("first") -@pytest.mark.skipif( - platform.system() != "Linux", reason="only runs consistently on Linux" -) +@pytest.mark.skipif(platform.system() != "Linux", reason="only runs consistently on Linux") def test_deploy_exit_code(deploy_result: Result) -> None: """Test deploy exit code.""" assert deploy_result.exit_code == 0 @pytest.mark.order(after="test_deploy_exit_code") -@pytest.mark.skipif( - platform.system() != "Linux", reason="only runs consistently on Linux" -) +@pytest.mark.skipif(platform.system() != "Linux", reason="only runs consistently on Linux") def test_deploy_log_messages(deploy_result: Result) -> None: """Test deploy log messages.""" assert ( @@ -55,9 +52,7 @@ def test_deploy_log_messages(deploy_result: Result) -> None: @pytest.mark.order("last") -@pytest.mark.skipif( - platform.system() != "Linux", reason="only runs consistently on Linux" -) +@pytest.mark.skipif(platform.system() != "Linux", reason="only runs consistently on Linux") def test_destroy_exit_code(destroy_result: Result) -> None: """Test destroy exit code.""" assert destroy_result.exit_code == 0 diff --git a/tests/functional/cfngin/test_raw_cfn/templates/dummy.yml b/tests/functional/cfngin/test_raw_cfn/templates/dummy.yml index cbebd07c2..e92b8d44f 100644 --- a/tests/functional/cfngin/test_raw_cfn/templates/dummy.yml +++ b/tests/functional/cfngin/test_raw_cfn/templates/dummy.yml @@ -16,10 +16,8 @@ Parameters: Conditions: - DeployOne: - !Or [ !Equals [ !Ref WaitConditionCount, 1 ], !Equals [ !Ref WaitConditionCount, 2 ] ] - DeployTwo: - !Equals [ !Ref WaitConditionCount, 2 ] + DeployOne: !Or [!Equals [!Ref WaitConditionCount, 1], !Equals [!Ref WaitConditionCount, 2]] + DeployTwo: !Equals [!Ref WaitConditionCount, 2] Resources: diff --git a/tests/functional/cfngin/test_raw_cfn/test_runner.py b/tests/functional/cfngin/test_raw_cfn/test_runner.py index c559ddfaf..51ac53696 100644 --- a/tests/functional/cfngin/test_raw_cfn/test_runner.py +++ b/tests/functional/cfngin/test_raw_cfn/test_runner.py @@ -1,17 +1,18 @@ """Test using raw CloudFormation template.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent diff --git a/tests/functional/cfngin/test_recreate_failed/test_runner.py b/tests/functional/cfngin/test_recreate_failed/test_runner.py index 3d304f98e..4f0e72753 100644 --- a/tests/functional/cfngin/test_recreate_failed/test_runner.py +++ b/tests/functional/cfngin/test_recreate_failed/test_runner.py @@ -1,17 +1,18 @@ """Test recreation of a failed deployment.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent @@ -21,10 +22,7 @@ def deploy_bad_result(cli_runner: CliRunner) -> Generator[Result, None, None]: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" yield cli_runner.invoke(cli, ["deploy", "--tag", "bad"], env={"CI": "1"}) - assert ( - cli_runner.invoke(cli, ["destroy", "--tag", "good"], env={"CI": "1"}).exit_code - == 0 - ) + assert cli_runner.invoke(cli, ["destroy", "--tag", "good"], env={"CI": "1"}).exit_code == 0 shutil.rmtree(CURRENT_DIR / ".runway", ignore_errors=True) @@ -60,8 +58,7 @@ def test_deploy_bad_log_messages(deploy_bad_result: Result, namespace: str) -> N # output may or may not have a "rolled back" or "failed (creating new stack)" msg # depends on API throttling assert ( - "[runway] The following steps failed: recreate-failed" - in deploy_bad_result.stdout + "[runway] The following steps failed: recreate-failed" in deploy_bad_result.stdout ), f"stdout does not match expected\n\nSTDOUT:\n{deploy_bad_result.stdout}" diff --git a/tests/functional/cfngin/test_rollback_dependant/test_runner.py b/tests/functional/cfngin/test_rollback_dependant/test_runner.py index cce4da118..0a397a5bd 100644 --- a/tests/functional/cfngin/test_rollback_dependant/test_runner.py +++ b/tests/functional/cfngin/test_rollback_dependant/test_runner.py @@ -1,17 +1,18 @@ """Test failed stack with dependency.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent diff --git a/tests/functional/cfngin/test_simple_build/test_runner.py b/tests/functional/cfngin/test_simple_build/test_runner.py index f7b6fdfdc..5aaae4b56 100644 --- a/tests/functional/cfngin/test_simple_build/test_runner.py +++ b/tests/functional/cfngin/test_simple_build/test_runner.py @@ -1,11 +1,10 @@ """Run a simple test of CFNgin deploy and destroy.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest @@ -13,6 +12,8 @@ from runway.config import CfnginConfig if TYPE_CHECKING: + from collections.abc import Generator + from _pytest.fixtures import SubRequest from click.testing import CliRunner, Result @@ -27,9 +28,7 @@ def cfngin_config( request: SubRequest, runway_config: RunwayConfig, runway_context: RunwayContext ) -> CfnginConfig: """Find and return the CFNgin config.""" - runway_config.deployments[0].resolve( - runway_context, variables=runway_config.variables - ) + runway_config.deployments[0].resolve(runway_context, variables=runway_config.variables) return CfnginConfig.parse_file( path=request.path.parent / "simple-build.cfn" / "cfngin.yml", parameters=runway_config.deployments[0].parameters, @@ -163,9 +162,7 @@ def test_stacks_not_exists(cfngin_context: CfnginContext) -> None: client = cfngin_context.get_session(region="us-east-1").client("cloudformation") assert cfngin_context.stacks, "no stacks found in context/config" for stack in cfngin_context.stacks: - try: + with pytest.raises(client.exceptions.ClientError, match="does not exist"): assert not client.describe_stacks(StackName=stack.fqn)[ "Stacks" ], f"stack exists: {stack.fqn}" - except client.exceptions.ClientError as exc: - assert "does not exist" in str(exc) diff --git a/tests/functional/cfngin/test_simple_diff/blueprints.py b/tests/functional/cfngin/test_simple_diff/blueprints.py index 99150ca6a..a7479a7a0 100644 --- a/tests/functional/cfngin/test_simple_diff/blueprints.py +++ b/tests/functional/cfngin/test_simple_diff/blueprints.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict +from typing import TYPE_CHECKING, ClassVar from troposphere.cloudformation import WaitConditionHandle @@ -16,7 +16,7 @@ class DiffTester(Blueprint): """Diff tester.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "InstanceType": { "type": CFNString, "description": "NAT EC2 instance type.", @@ -24,8 +24,7 @@ class DiffTester(Blueprint): }, "WaitConditionCount": { "type": int, - "description": "Number of WaitConditionHandle resources " - "to add to the template", + "description": "Number of WaitConditionHandle resources to add to the template", }, } diff --git a/tests/functional/cfngin/test_simple_diff/test_runner.py b/tests/functional/cfngin/test_simple_diff/test_runner.py index 3b828e8ec..1aac871bd 100644 --- a/tests/functional/cfngin/test_simple_diff/test_runner.py +++ b/tests/functional/cfngin/test_simple_diff/test_runner.py @@ -1,15 +1,16 @@ """Run a simple test of `runway plan` for CFNgin.""" -# pylint: disable=redefined-outer-name,unused-argument from __future__ import annotations -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result from runway.config import RunwayConfig @@ -24,11 +25,9 @@ def initial_deploy(cli_runner: CliRunner) -> Generator[None, None, None]: @pytest.fixture(scope="module") -def plan_result(cli_runner: CliRunner, initial_deploy: None) -> Result: +def plan_result(cli_runner: CliRunner, initial_deploy: None) -> Result: # noqa: ARG001 """Execute `runway plan`.""" - return cli_runner.invoke( - cli, ["plan"], env={"CI": "1", "DEPLOY_ENVIRONMENT": "test2"} - ) + return cli_runner.invoke(cli, ["plan"], env={"CI": "1", "DEPLOY_ENVIRONMENT": "test2"}) @pytest.mark.order("first") diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py index 68ddf6e6d..5abacb0ea 100644 --- a/tests/functional/conftest.py +++ b/tests/functional/conftest.py @@ -1,15 +1,13 @@ """Pytest configuration, fixtures, and plugins.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import os from pathlib import Path -from typing import TYPE_CHECKING, Any, Generator +from typing import TYPE_CHECKING, Any +from unittest.mock import patch import pytest -from click.testing import CliRunner -from mock import patch from runway.config import CfnginConfig, RunwayConfig from runway.context import CfnginContext, RunwayContext @@ -19,12 +17,14 @@ from ..factories import cli_runner_factory if TYPE_CHECKING: + from collections.abc import Generator + from _pytest.config import Config from _pytest.fixtures import SubRequest + from click.testing import CliRunner -# pylint: disable=unused-argument -def pytest_ignore_collect(path: Any, config: Config) -> bool: +def pytest_ignore_collect(path: Any, config: Config) -> bool: # noqa: ARG001 """Determine if this directory should have its tests collected.""" return not config.option.functional @@ -58,9 +58,7 @@ def cfngin_config( request: SubRequest, runway_config: RunwayConfig, runway_context: RunwayContext ) -> CfnginConfig: """Find and return the CFNgin config.""" - runway_config.deployments[0].resolve( - runway_context, variables=runway_config.variables - ) + runway_config.deployments[0].resolve(runway_context, variables=runway_config.variables) return CfnginConfig.parse_file( path=request.path.parent / "cfngin.yml", parameters=runway_config.deployments[0].parameters, @@ -84,7 +82,7 @@ def cfngin_context( @pytest.fixture(scope="module") -def cli_runner(cd_test_dir: Path, request: SubRequest) -> CliRunner: +def cli_runner(cd_test_dir: Path, request: SubRequest) -> CliRunner: # noqa: ARG001 """Initialize instance of `click.testing.CliRunner`.""" return cli_runner_factory(request) diff --git a/tests/functional/serverless/test_promotezip/package.json b/tests/functional/serverless/test_promotezip/package.json index 5a74a853d..413f27c15 100644 --- a/tests/functional/serverless/test_promotezip/package.json +++ b/tests/functional/serverless/test_promotezip/package.json @@ -1,19 +1,19 @@ { - "name": "test_promotezip", - "version": "1.0.0", - "description": "", - "main": "index.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [], "author": "", - "license": "ISC", + "description": "", "devDependencies": { "@typescript-eslint/eslint-plugin": "^5.48.1", "@typescript-eslint/parser": "^5.48.1", "eslint": "^8.31.0", "serverless": "~3.30.1", "typescript": "^4.9.4" - } + }, + "keywords": [], + "license": "ISC", + "main": "index.js", + "name": "test_promotezip", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "version": "1.0.0" } diff --git a/tests/functional/serverless/test_promotezip/test_runner.py b/tests/functional/serverless/test_promotezip/test_runner.py index 58f680dd1..2b16ebdc3 100644 --- a/tests/functional/serverless/test_promotezip/test_runner.py +++ b/tests/functional/serverless/test_promotezip/test_runner.py @@ -1,26 +1,27 @@ """Test promote zip between environments.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent @pytest.fixture(scope="module") -def deploy_promotezip_result(cli_runner: CliRunner) -> Generator[Result, None, None]: +def deploy_promotezip_result(cli_runner: CliRunner) -> Result: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" - yield cli_runner.invoke( + return cli_runner.invoke( cli, ["deploy", "--tag", "sls"], env={"DEPLOY_ENVIRONMENT": "promotezip", "CI": "1"}, @@ -28,15 +29,15 @@ def deploy_promotezip_result(cli_runner: CliRunner) -> Generator[Result, None, N @pytest.fixture(scope="module") -def deploy_result(cli_runner: CliRunner) -> Generator[Result, None, None]: +def deploy_result(cli_runner: CliRunner) -> Result: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" - yield cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) + return cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) @pytest.fixture(scope="module") -def destroy_promotezip_result(cli_runner: CliRunner) -> Generator[Result, None, None]: +def destroy_promotezip_result(cli_runner: CliRunner) -> Result: """Execute `runway destroy`.""" - yield cli_runner.invoke( + return cli_runner.invoke( cli, ["destroy", "--tag", "sls"], env={"DEPLOY_ENVIRONMENT": "promotezip", "CI": "1"}, @@ -70,15 +71,13 @@ def test_deploy_promotezip_exit_code(deploy_promotezip_result: Result) -> None: def test_deploy_promotezip_log_messages(deploy_promotezip_result: Result) -> None: """Test deploy log messages.""" assert ( - "test_promotezip:found existing package for helloWorld0" - in deploy_promotezip_result.stdout + "test_promotezip:found existing package for helloWorld0" in deploy_promotezip_result.stdout ), f"expected not in stdout:\n{deploy_promotezip_result.stdout}" assert ( "downloading s3://" in deploy_promotezip_result.stdout ), f"expected not in stdout:\n{deploy_promotezip_result.stdout}" assert ( - "est_promotezip:found existing package for helloWorld1" - in deploy_promotezip_result.stdout + "est_promotezip:found existing package for helloWorld1" in deploy_promotezip_result.stdout ), f"expected not in stdout:\n{deploy_promotezip_result.stdout}" diff --git a/tests/functional/sources/git/test_runner.py b/tests/functional/sources/git/test_runner.py index 46d8c6a64..a68e9d8ab 100644 --- a/tests/functional/sources/git/test_runner.py +++ b/tests/functional/sources/git/test_runner.py @@ -1,26 +1,27 @@ """Test Runway module from git repo.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent @pytest.fixture(scope="module") -def deploy_result(cli_runner: CliRunner) -> Generator[Result, None, None]: +def deploy_result(cli_runner: CliRunner) -> Result: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" - yield cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) + return cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) @pytest.fixture(scope="module") diff --git a/tests/functional/staticsite/test_simple_build/runway.yml b/tests/functional/staticsite/test_simple_build/runway.yml index 7edf53770..bc80d1521 100644 --- a/tests/functional/staticsite/test_simple_build/runway.yml +++ b/tests/functional/staticsite/test_simple_build/runway.yml @@ -1,12 +1,12 @@ deployments: - modules: - - name: test-simple-build - path: ./ - type: static - parameters: - namespace: ${env RUNWAY_TEST_NAMESPACE::default=${env USER::default=user}-local} - staticsite_cf_disable: true - options: - build_output: site + - name: test-simple-build + path: ./ + type: static + parameters: + namespace: ${env RUNWAY_TEST_NAMESPACE::default=${env USER::default=user}-local} + staticsite_cf_disable: true + options: + build_output: site regions: - us-east-1 diff --git a/tests/functional/staticsite/test_simple_build/test_runner.py b/tests/functional/staticsite/test_simple_build/test_runner.py index 59c26bb64..2a1bd97de 100644 --- a/tests/functional/staticsite/test_simple_build/test_runner.py +++ b/tests/functional/staticsite/test_simple_build/test_runner.py @@ -1,26 +1,27 @@ """Test staticsite.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest from runway._cli import cli if TYPE_CHECKING: + from collections.abc import Generator + from click.testing import CliRunner, Result CURRENT_DIR = Path(__file__).parent @pytest.fixture(scope="module") -def deploy_result(cli_runner: CliRunner) -> Generator[Result, None, None]: +def deploy_result(cli_runner: CliRunner) -> Result: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" - yield cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) + return cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) @pytest.fixture(scope="module") diff --git a/tests/functional/terraform/conftest.py b/tests/functional/terraform/conftest.py index 1117f5a55..32355317a 100644 --- a/tests/functional/terraform/conftest.py +++ b/tests/functional/terraform/conftest.py @@ -1,15 +1,16 @@ """Pytest configuration, fixtures, and plugins.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING import pytest if TYPE_CHECKING: + from collections.abc import Generator + from _pytest.fixtures import SubRequest @@ -19,10 +20,8 @@ def fixture_dir() -> Path: return Path(__file__).parent / "fixtures" -@pytest.fixture(scope="function") -def local_backend( - fixture_dir: Path, request: SubRequest -) -> Generator[Path, None, None]: +@pytest.fixture() +def local_backend(fixture_dir: Path, request: SubRequest) -> Generator[Path, None, None]: """Copy local_backend.tf into the test directory.""" file_name = "local_backend.tf" og_file = fixture_dir / file_name @@ -32,7 +31,7 @@ def local_backend( new_file.unlink() -@pytest.fixture(scope="function") +@pytest.fixture() def no_backend(fixture_dir: Path, request: SubRequest) -> Generator[Path, None, None]: """Copy no_backend.tf into the test directory.""" file_name = "no_backend.tf" @@ -43,7 +42,7 @@ def no_backend(fixture_dir: Path, request: SubRequest) -> Generator[Path, None, new_file.unlink() -@pytest.fixture(scope="function") +@pytest.fixture() def s3_backend(fixture_dir: Path, request: SubRequest) -> Generator[Path, None, None]: """Copy s3_backend.tf into the test directory.""" file_name = "s3_backend.tf" diff --git a/tests/functional/terraform/test_backend_local_2_s3/test_runner.py b/tests/functional/terraform/test_backend_local_2_s3/test_runner.py index ff3fdbd95..29084e52f 100644 --- a/tests/functional/terraform/test_backend_local_2_s3/test_runner.py +++ b/tests/functional/terraform/test_backend_local_2_s3/test_runner.py @@ -1,12 +1,11 @@ """Test migrating local backend to s3.""" -# pylint: disable=redefined-outer-name,unused-argument from __future__ import annotations import locale import shutil from pathlib import Path -from typing import TYPE_CHECKING, Iterator, cast +from typing import TYPE_CHECKING, cast import pytest @@ -14,6 +13,8 @@ from runway.env_mgr.tfenv import TF_VERSION_FILENAME if TYPE_CHECKING: + from collections.abc import Iterator + from _pytest.fixtures import SubRequest from click.testing import CliRunner, Result @@ -25,9 +26,7 @@ def tf_state_bucket(cli_runner: CliRunner) -> Iterator[None]: """Create Terraform state bucket and table.""" cli_runner.invoke(cli, ["deploy", "--tag", "bootstrap"], env={"CI": "1"}) yield - destroy_result = cli_runner.invoke( - cli, ["destroy", "--tag", "cleanup"], env={"CI": "1"} - ) + destroy_result = cli_runner.invoke(cli, ["destroy", "--tag", "cleanup"], env={"CI": "1"}) assert destroy_result.exit_code == 0 @@ -44,20 +43,20 @@ def tf_version(request: SubRequest) -> Iterator[str]: encoding=locale.getpreferredencoding(do_setlocale=False), ) yield cast(str, request.param) - file_path.unlink(missing_ok=True) # pylint: disable=unexpected-keyword-arg + file_path.unlink(missing_ok=True) -@pytest.fixture(scope="function") +@pytest.fixture() def deploy_local_backend_result( - cli_runner: CliRunner, local_backend: Path -) -> Iterator[Result]: + cli_runner: CliRunner, local_backend: Path # noqa: ARG001 +) -> Result: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" - yield cli_runner.invoke(cli, ["deploy", "--tag", "local"], env={"CI": "1"}) + return cli_runner.invoke(cli, ["deploy", "--tag", "local"], env={"CI": "1"}) -@pytest.fixture(scope="function") +@pytest.fixture() def deploy_s3_backend_result( - cli_runner: CliRunner, s3_backend: Path + cli_runner: CliRunner, s3_backend: Path # noqa: ARG001 ) -> Iterator[Result]: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" yield cli_runner.invoke(cli, ["deploy", "--tag", "test"], env={"CI": "1"}) @@ -65,10 +64,7 @@ def deploy_s3_backend_result( shutil.rmtree(CURRENT_DIR / ".runway", ignore_errors=True) shutil.rmtree(CURRENT_DIR / ".terraform", ignore_errors=True) shutil.rmtree(CURRENT_DIR / "terraform.tfstate.d", ignore_errors=True) - (CURRENT_DIR / "local_backend").unlink( # pylint: disable=unexpected-keyword-arg - missing_ok=True - ) - # pylint: disable=unexpected-keyword-arg + (CURRENT_DIR / "local_backend").unlink(missing_ok=True) (CURRENT_DIR / ".terraform.lock.hcl").unlink(missing_ok=True) diff --git a/tests/functional/terraform/test_backend_no_2_local/test_runner.py b/tests/functional/terraform/test_backend_no_2_local/test_runner.py index 0f9c0afbe..e2c7f0c0d 100644 --- a/tests/functional/terraform/test_backend_no_2_local/test_runner.py +++ b/tests/functional/terraform/test_backend_no_2_local/test_runner.py @@ -1,12 +1,11 @@ """Test migration from no backend to local backend.""" -# pylint: disable=redefined-outer-name,unused-argument from __future__ import annotations import locale import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator, cast +from typing import TYPE_CHECKING, cast import pytest @@ -14,6 +13,8 @@ from runway.env_mgr.tfenv import TF_VERSION_FILENAME if TYPE_CHECKING: + from collections.abc import Generator + from _pytest.fixtures import SubRequest from click.testing import CliRunner, Result @@ -33,14 +34,14 @@ def tf_version(request: SubRequest) -> Generator[str, None, None]: encoding=locale.getpreferredencoding(do_setlocale=False), ) yield cast(str, request.param) - file_path.unlink(missing_ok=True) # pylint: disable=unexpected-keyword-arg + file_path.unlink(missing_ok=True) -@pytest.fixture(scope="function") +@pytest.fixture() def deploy_local_backend_result( cli_runner: CliRunner, - local_backend: Path, - tf_version: str, + local_backend: Path, # noqa: ARG001 + tf_version: str, # noqa: ARG001 ) -> Generator[Result, None, None]: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" assert (CURRENT_DIR / "terraform.tfstate.d").exists() @@ -49,18 +50,17 @@ def deploy_local_backend_result( shutil.rmtree(CURRENT_DIR / ".runway", ignore_errors=True) shutil.rmtree(CURRENT_DIR / ".terraform", ignore_errors=True) shutil.rmtree(CURRENT_DIR / "terraform.tfstate.d", ignore_errors=True) - # pylint: disable=unexpected-keyword-arg (CURRENT_DIR / ".terraform.lock.hcl").unlink(missing_ok=True) -@pytest.fixture(scope="function") +@pytest.fixture() def deploy_no_backend_result( cli_runner: CliRunner, - no_backend: Path, - tf_version: str, -) -> Generator[Result, None, None]: + no_backend: Path, # noqa: ARG001 + tf_version: str, # noqa: ARG001 +) -> Result: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" - yield cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) + return cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) def test_deploy_no_backend_result(deploy_no_backend_result: Result) -> None: diff --git a/tests/functional/terraform/test_base/test_runner.py b/tests/functional/terraform/test_base/test_runner.py index 73618bf60..9e9354970 100644 --- a/tests/functional/terraform/test_base/test_runner.py +++ b/tests/functional/terraform/test_base/test_runner.py @@ -5,13 +5,12 @@ """ -# pylint: disable=redefined-outer-name from __future__ import annotations import locale import shutil from pathlib import Path -from typing import TYPE_CHECKING, Generator, cast +from typing import TYPE_CHECKING, cast import pytest @@ -19,6 +18,8 @@ from runway.env_mgr.tfenv import TF_VERSION_FILENAME if TYPE_CHECKING: + from collections.abc import Generator + from _pytest.fixtures import SubRequest from click.testing import CliRunner, Result @@ -38,12 +39,12 @@ def tf_version(request: SubRequest) -> Generator[str, None, None]: encoding=locale.getpreferredencoding(do_setlocale=False), ) yield cast(str, request.param) - file_path.unlink(missing_ok=True) # pylint: disable=unexpected-keyword-arg + file_path.unlink(missing_ok=True) -@pytest.fixture(scope="function") +@pytest.fixture() def deploy_result( - cli_runner: CliRunner, no_backend: Path # pylint: disable=unused-argument + cli_runner: CliRunner, no_backend: Path # noqa: ARG001 ) -> Generator[Result, None, None]: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" yield cli_runner.invoke(cli, ["deploy"], env={"CI": "1"}) @@ -52,7 +53,6 @@ def deploy_result( shutil.rmtree(CURRENT_DIR / ".runway", ignore_errors=True) shutil.rmtree(CURRENT_DIR / ".terraform", ignore_errors=True) shutil.rmtree(CURRENT_DIR / "terraform.tfstate.d", ignore_errors=True) - # pylint: disable=unexpected-keyword-arg (CURRENT_DIR / ".terraform.lock.hcl").unlink(missing_ok=True) assert destroy_result.exit_code == 0 diff --git a/tests/integration/cli/commands/kbenv/conftest.py b/tests/integration/cli/commands/kbenv/conftest.py new file mode 100644 index 000000000..4eb0abf43 --- /dev/null +++ b/tests/integration/cli/commands/kbenv/conftest.py @@ -0,0 +1,24 @@ +"""Pytest fixtures and plugins.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest + +if TYPE_CHECKING: + from pathlib import Path + + from pytest_mock import MockFixture + + +@pytest.fixture(autouse=True) +def versions_dir(cd_tmp_path: Path, mocker: MockFixture) -> Path: + """Patches TFEnvManager.versions_dir.""" + path = cd_tmp_path / "versions" + path.mkdir(exist_ok=True) + mocker.patch("runway._cli.commands._kbenv._install.KBEnvManager.versions_dir", path) + mocker.patch("runway._cli.commands._kbenv._list.KBEnvManager.versions_dir", path) + mocker.patch("runway._cli.commands._kbenv._run.KBEnvManager.versions_dir", path) + mocker.patch("runway._cli.commands._kbenv._uninstall.KBEnvManager.versions_dir", path) + return path diff --git a/tests/integration/cli/commands/kbenv/test_install.py b/tests/integration/cli/commands/kbenv/test_install.py index c39be8277..d5d37464b 100644 --- a/tests/integration/cli/commands/kbenv/test_install.py +++ b/tests/integration/cli/commands/kbenv/test_install.py @@ -1,30 +1,21 @@ """Test ``runway kbenv install`` command.""" -# pylint: disable=unused-argument from __future__ import annotations import logging from pathlib import Path from typing import TYPE_CHECKING -import pytest from click.testing import CliRunner from runway._cli import cli -from runway.env_mgr.kbenv import KB_VERSION_FILENAME, KBEnvManager +from runway.env_mgr.kbenv import KB_VERSION_FILENAME if TYPE_CHECKING: - from pytest import LogCaptureFixture - from pytest_mock import MockerFixture + import pytest -@pytest.fixture(autouse=True, scope="function") -def patch_versions_dir(mocker: MockerFixture, tmp_path: Path) -> None: - """Patch TFEnvManager.versions_dir.""" - mocker.patch.object(KBEnvManager, "versions_dir", tmp_path) - - -def test_kbenv_install(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_kbenv_install(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway kbenv install`` reading version from a file. For best results, remove any existing installs. @@ -41,12 +32,11 @@ def test_kbenv_install(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: def test_kbenv_install_no_version_file( - cd_tmp_path: Path, caplog: LogCaptureFixture + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner ) -> None: """Test ``runway kbenv install`` no version file.""" caplog.set_level(logging.WARNING, logger="runway") - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "install"]) + result = cli_runner.invoke(cli, ["kbenv", "install"]) assert result.exit_code == 1 assert ( @@ -55,7 +45,7 @@ def test_kbenv_install_no_version_file( ) -def test_kbenv_install_version(caplog: LogCaptureFixture) -> None: +def test_kbenv_install_version(caplog: pytest.LogCaptureFixture) -> None: """Test ``runway kbenv install ``. For best results, remove any existing installs. diff --git a/tests/integration/cli/commands/kbenv/test_list.py b/tests/integration/cli/commands/kbenv/test_list.py index 122c225b8..39ea3d155 100644 --- a/tests/integration/cli/commands/kbenv/test_list.py +++ b/tests/integration/cli/commands/kbenv/test_list.py @@ -5,44 +5,40 @@ import logging from typing import TYPE_CHECKING -from click.testing import CliRunner - from runway._cli import cli -from runway.env_mgr.kbenv import KBEnvManager if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture - from pytest_mock import MockerFixture + import pytest + from click.testing import CliRunner def test_kbenv_list( - caplog: LogCaptureFixture, mocker: MockerFixture, tmp_path: Path + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner, versions_dir: Path ) -> None: """Test ``runway kbenv list``.""" - caplog.set_level(logging.INFO, logger="runway.cli.commands.kbenv") - mocker.patch.object(KBEnvManager, "versions_dir", tmp_path) - version_dirs = [tmp_path / "v1.14.0", tmp_path / "v1.21.0"] + caplog.set_level(logging.INFO, logger="runway._cli.commands._kbenv") + version_dirs = [versions_dir / "v1.14.0", versions_dir / "v1.21.0"] for v_dir in version_dirs: v_dir.mkdir() - (tmp_path / "something.txt").touch() - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "list"]) + (versions_dir / "something.txt").touch() + result = cli_runner.invoke(cli, ["kbenv", "list"]) assert result.exit_code == 0 assert caplog.messages == ["kubectl versions installed:"] - assert result.stdout == "\n".join( - ["[runway] kubectl versions installed:", "v1.14.0", "v1.21.0", ""] - ) + assert {i.strip() for i in result.output.split("\n")} == { + "[runway] kubectl versions installed:", + "v1.14.0", + "v1.21.0", + "", + } def test_kbenv_list_none( - caplog: LogCaptureFixture, mocker: MockerFixture, tmp_path: Path + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner, versions_dir: Path ) -> None: """Test ``runway kbenv list`` no versions installed.""" - caplog.set_level(logging.WARNING, logger="runway.cli.commands.kbenv") - mocker.patch.object(KBEnvManager, "versions_dir", tmp_path) - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "list"]) + caplog.set_level(logging.WARNING, logger="runway._cli.commands._kbenv") + result = cli_runner.invoke(cli, ["kbenv", "list"]) assert result.exit_code == 0 - assert caplog.messages == [f"no versions of kubectl installed at path {tmp_path}"] + assert caplog.messages == [f"no versions of kubectl installed at path {versions_dir}"] diff --git a/tests/integration/cli/commands/kbenv/test_run.py b/tests/integration/cli/commands/kbenv/test_run.py index 2baaa39e8..d5624375a 100644 --- a/tests/integration/cli/commands/kbenv/test_run.py +++ b/tests/integration/cli/commands/kbenv/test_run.py @@ -1,29 +1,27 @@ """Test ``runway kbenv run`` command.""" -# pylint: disable=unused-argument from __future__ import annotations import logging from typing import TYPE_CHECKING -from click.testing import CliRunner - from runway._cli import cli from runway.env_mgr.kbenv import KB_VERSION_FILENAME if TYPE_CHECKING: from pathlib import Path - from pytest import CaptureFixture, LogCaptureFixture + import pytest + from click.testing import CliRunner def test_kbenv_run_no_version_file( - cd_tmp_path: Path, caplog: LogCaptureFixture + caplog: pytest.LogCaptureFixture, + cli_runner: CliRunner, ) -> None: """Test ``runway kbenv run -- --help`` no version file.""" caplog.set_level(logging.WARNING, logger="runway") - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "run", "--", "--help"]) + result = cli_runner.invoke(cli, ["kbenv", "run", "--", "--help"]) assert result.exit_code == 1 assert ( @@ -32,7 +30,9 @@ def test_kbenv_run_no_version_file( ) -def test_kbenv_run_separator(cd_tmp_path: Path, capfd: CaptureFixture[str]) -> None: +def test_kbenv_run_separator( + capfd: pytest.CaptureFixture[str], cli_runner: CliRunner, tmp_path: Path +) -> None: """Test ``runway kbenv run -- --help``. Parsing of command using ``--`` as a separator between options and args. @@ -41,24 +41,24 @@ def test_kbenv_run_separator(cd_tmp_path: Path, capfd: CaptureFixture[str]) -> N pass options shared with Runway such as ``--help``. """ - (cd_tmp_path / KB_VERSION_FILENAME).write_text("v1.14.0") - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "run", "--", "--help"]) + (tmp_path / KB_VERSION_FILENAME).write_text("v1.14.0") + result = cli_runner.invoke(cli, ["kbenv", "run", "--", "--help"]) captured = capfd.readouterr() # capfd required for subprocess assert result.exit_code == 0 assert "runway" not in captured.out assert "kubectl --help" in captured.out -def test_kbenv_run_version(cd_tmp_path: Path, capfd: CaptureFixture[str]) -> None: +def test_kbenv_run_version( + capfd: pytest.CaptureFixture[str], cli_runner: CliRunner, tmp_path: Path +) -> None: """Test ``runway kbenv run version``. Parsing of bare command. """ - (cd_tmp_path / KB_VERSION_FILENAME).write_text("v1.14.0") - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "run", "version", "--client"]) + (tmp_path / KB_VERSION_FILENAME).write_text("v1.14.0") + result = cli_runner.invoke(cli, ["kbenv", "run", "version", "--client"]) captured = capfd.readouterr() # capfd required for subprocess assert result.exit_code == 0 assert "v1.14.0" in captured.out diff --git a/tests/integration/cli/commands/kbenv/test_uninstall.py b/tests/integration/cli/commands/kbenv/test_uninstall.py index a85feeb14..089e652ed 100644 --- a/tests/integration/cli/commands/kbenv/test_uninstall.py +++ b/tests/integration/cli/commands/kbenv/test_uninstall.py @@ -1,50 +1,41 @@ """Test ``runway kbenv uninstall`` command.""" -# pylint: disable=unused-argument from __future__ import annotations import logging -from pathlib import Path from typing import TYPE_CHECKING -import pytest -from click.testing import CliRunner - from runway._cli import cli -from runway.env_mgr.kbenv import KB_VERSION_FILENAME, KBEnvManager +from runway.env_mgr.kbenv import KB_VERSION_FILENAME if TYPE_CHECKING: - from pytest import LogCaptureFixture - from pytest_mock import MockerFixture - -LOGGER = "runway.cli.commands.kbenv" + from pathlib import Path + import pytest + from click.testing import CliRunner -@pytest.fixture(autouse=True, scope="function") -def patch_versions_dir(mocker: MockerFixture, tmp_path: Path) -> None: - """Patch KBEnvManager.versions_dir.""" - mocker.patch.object(KBEnvManager, "versions_dir", tmp_path) +LOGGER = "runway.cli.commands.kbenv" -def test_kbenv_uninstall(cd_tmp_path: Path) -> None: +def test_kbenv_uninstall(cli_runner: CliRunner, versions_dir: Path) -> None: """Test ``runway kbenv uninstall``.""" version = "v1.21.0" - version_dir = cd_tmp_path / version + version_dir = versions_dir / version version_dir.mkdir() - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "uninstall", version]) + result = cli_runner.invoke(cli, ["kbenv", "uninstall", version]) assert result.exit_code == 0 assert not version_dir.exists() -def test_kbenv_uninstall_all(caplog: LogCaptureFixture, cd_tmp_path: Path) -> None: +def test_kbenv_uninstall_all( + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner, versions_dir: Path +) -> None: """Test ``runway kbenv uninstall --all``.""" caplog.set_level(logging.INFO, logger=LOGGER) - version_dirs = [cd_tmp_path / "v1.14.0", cd_tmp_path / "v1.21.0"] + version_dirs = [versions_dir / "v1.14.0", versions_dir / "v1.21.0"] for v in version_dirs: v.mkdir() - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "uninstall", "--all"]) + result = cli_runner.invoke(cli, ["kbenv", "uninstall", "--all"]) assert result.exit_code == 0 assert "uninstalling all versions of kubectl..." in caplog.messages assert "all versions of kubectl have been uninstalled" in caplog.messages @@ -52,15 +43,14 @@ def test_kbenv_uninstall_all(caplog: LogCaptureFixture, cd_tmp_path: Path) -> No def test_kbenv_uninstall_all_takes_precedence( - caplog: LogCaptureFixture, cd_tmp_path: Path + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner, versions_dir: Path ) -> None: """Test ``runway kbenv uninstall --all`` takes precedence over arg.""" caplog.set_level(logging.INFO, logger=LOGGER) - version_dirs = [cd_tmp_path / "v1.14.0", cd_tmp_path / "v1.21.0"] + version_dirs = [versions_dir / "v1.14.0", versions_dir / "v1.21.0"] for v in version_dirs: v.mkdir() - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "uninstall", "0.13.0", "--all"]) + result = cli_runner.invoke(cli, ["kbenv", "uninstall", "0.13.0", "--all"]) assert result.exit_code == 0 assert "uninstalling all versions of kubectl..." in caplog.messages assert "all versions of kubectl have been uninstalled" in caplog.messages @@ -68,54 +58,52 @@ def test_kbenv_uninstall_all_takes_precedence( def test_kbenv_uninstall_all_none_installed( - caplog: LogCaptureFixture, cd_tmp_path: Path + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner ) -> None: """Test ``runway kbenv uninstall --all`` none installed.""" caplog.set_level(logging.INFO, logger=LOGGER) - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "uninstall", "--all"]) + result = cli_runner.invoke(cli, ["kbenv", "uninstall", "--all"]) assert result.exit_code == 0 assert "uninstalling all versions of kubectl..." in caplog.messages assert "all versions of kubectl have been uninstalled" in caplog.messages -def test_kbenv_uninstall_arg_takes_precedence(cd_tmp_path: Path) -> None: +def test_kbenv_uninstall_arg_takes_precedence( + cd_tmp_path: Path, cli_runner: CliRunner, versions_dir: Path +) -> None: """Test ``runway kbenv uninstall`` arg takes precedence over file.""" version = "v1.21.0" - version_dir = cd_tmp_path / version + version_dir = versions_dir / version version_dir.mkdir() (cd_tmp_path / KB_VERSION_FILENAME).write_text("v1.14.0") - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "uninstall", version]) + result = cli_runner.invoke(cli, ["kbenv", "uninstall", version]) assert result.exit_code == 0 assert not version_dir.exists() def test_kbenv_uninstall_no_version( - caplog: LogCaptureFixture, cd_tmp_path: Path + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner ) -> None: """Test ``runway kbenv uninstall`` no version.""" caplog.set_level(logging.ERROR, logger=LOGGER) - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "uninstall"]) + result = cli_runner.invoke(cli, ["kbenv", "uninstall"]) assert result.exit_code != 0 assert "version not specified" in caplog.messages -def test_kbenv_uninstall_not_installed(cd_tmp_path: Path) -> None: +def test_kbenv_uninstall_not_installed(cli_runner: CliRunner) -> None: """Test ``runway kbenv uninstall`` not installed.""" - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "uninstall", "1.21.0"]) - assert result.exit_code != 0 + assert cli_runner.invoke(cli, ["kbenv", "uninstall", "1.21.0"]).exit_code != 0 -def test_kbenv_uninstall_version_file(cd_tmp_path: Path) -> None: +def test_kbenv_uninstall_version_file( + cd_tmp_path: Path, cli_runner: CliRunner, versions_dir: Path +) -> None: """Test ``runway kbenv uninstall`` version file.""" version = "v1.21.0" - version_dir = cd_tmp_path / version + version_dir = versions_dir / version version_dir.mkdir() (cd_tmp_path / KB_VERSION_FILENAME).write_text(version) - runner = CliRunner() - result = runner.invoke(cli, ["kbenv", "uninstall"]) + result = cli_runner.invoke(cli, ["kbenv", "uninstall"]) assert result.exit_code == 0 assert not version_dir.exists() diff --git a/tests/integration/cli/commands/test_deploy.py b/tests/integration/cli/commands/test_deploy.py index 98944adf7..54757a367 100644 --- a/tests/integration/cli/commands/test_deploy.py +++ b/tests/integration/cli/commands/test_deploy.py @@ -9,9 +9,9 @@ import logging from typing import TYPE_CHECKING +from unittest.mock import Mock from click.testing import CliRunner -from mock import Mock from runway._cli import cli from runway.config import RunwayConfig @@ -21,7 +21,7 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture + import pytest from pytest_mock import MockerFixture from ...conftest import CpConfigTypeDef @@ -32,7 +32,7 @@ def test_deploy( cd_tmp_path: Path, cp_config: CpConfigTypeDef, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, ) -> None: """Test deploy.""" @@ -86,7 +86,7 @@ def test_deploy_options_deploy_environment( def test_deploy_options_tag( - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cd_tmp_path: Path, cp_config: CpConfigTypeDef, mocker: MockerFixture, @@ -96,9 +96,7 @@ def test_deploy_options_tag( mock_runway = mocker.patch(f"{MODULE}.Runway", Mock(spec=Runway, spec_set=True)) cp_config("tagged_modules", cd_tmp_path) runner = CliRunner() - result0 = runner.invoke( - cli, ["deploy", "--tag", "app:test-app", "--tag", "tier:iac"] - ) + result0 = runner.invoke(cli, ["deploy", "--tag", "app:test-app", "--tag", "tier:iac"]) assert result0.exit_code == 0 deployment = mock_runway.return_value.deploy.call_args.args[0][0] assert len(deployment.modules) == 1 diff --git a/tests/integration/cli/commands/test_destroy.py b/tests/integration/cli/commands/test_destroy.py index 5fe113772..deb8a38e4 100644 --- a/tests/integration/cli/commands/test_destroy.py +++ b/tests/integration/cli/commands/test_destroy.py @@ -9,9 +9,9 @@ import logging from typing import TYPE_CHECKING +from unittest.mock import Mock from click.testing import CliRunner -from mock import Mock from runway._cli import cli from runway.config import RunwayConfig @@ -21,7 +21,7 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture, MonkeyPatch + import pytest from pytest_mock import MockerFixture from ...conftest import CpConfigTypeDef @@ -29,9 +29,7 @@ MODULE = "runway._cli.commands._destroy" -def test_destroy( - cd_tmp_path: Path, cp_config: CpConfigTypeDef, mocker: MockerFixture -) -> None: +def test_destroy(cd_tmp_path: Path, cp_config: CpConfigTypeDef, mocker: MockerFixture) -> None: """Test destroy.""" mock_runway = mocker.patch(f"{MODULE}.Runway", Mock(spec=Runway)) cp_config("min_required", cd_tmp_path) @@ -44,7 +42,7 @@ def test_destroy( assert isinstance(mock_runway.call_args.args[1], RunwayContext) mock_runway.reverse_deployments.assert_called_once() - assert len(mock_runway.reverse_deployments.call_args.args[0]) == 1 # type: ignore + assert len(mock_runway.reverse_deployments.call_args.args[0]) == 1 inst = mock_runway.return_value inst.destroy.assert_called_once_with(mock_runway.reverse_deployments.return_value) @@ -83,9 +81,7 @@ def test_destroy_options_deploy_environment( mock_runway = mocker.patch(f"{MODULE}.Runway", Mock(spec=Runway)) cp_config("min_required", cd_tmp_path) runner = CliRunner() - assert ( - runner.invoke(cli, ["destroy", "-e", "e-option"], input="y\ny\n").exit_code == 0 - ) + assert runner.invoke(cli, ["destroy", "-e", "e-option"], input="y\ny\n").exit_code == 0 assert mock_runway.call_args.args[1].env.name == "e-option" assert ( @@ -100,10 +96,10 @@ def test_destroy_options_deploy_environment( def test_destroy_options_tag( - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cd_tmp_path: Path, cp_config: CpConfigTypeDef, - monkeypatch: MonkeyPatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test destroy option --tag.""" caplog.set_level(logging.ERROR, logger="runway.cli.commands.destroy") @@ -121,10 +117,7 @@ def test_destroy_options_tag( assert len(deployment.modules) == 1 assert deployment.modules[0].name == "sampleapp-01.cfn" - assert ( - runner.invoke(cli, ["destroy", "--tag", "app:test-app"], input="y\n").exit_code - == 0 - ) + assert runner.invoke(cli, ["destroy", "--tag", "app:test-app"], input="y\n").exit_code == 0 deployment = mock_destroy.call_args.args[0][0] assert len(deployment.modules) == 3 assert deployment.modules[0].name == "parallel_parent" @@ -133,14 +126,12 @@ def test_destroy_options_tag( assert deployment.modules[1].name == "sampleapp-02.cfn" assert deployment.modules[2].name == "sampleapp-01.cfn" - assert ( - runner.invoke(cli, ["destroy", "--tag", "no-match"], input="y\n").exit_code == 1 - ) + assert runner.invoke(cli, ["destroy", "--tag", "no-match"], input="y\n").exit_code == 1 assert "No modules found with the provided tag(s): no-match" in caplog.messages def test_destroy_select_deployment( - cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: MonkeyPatch + cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: pytest.MonkeyPatch ) -> None: """Test destroy select from two deployments.""" cp_config("min_required_multi", cd_tmp_path) @@ -158,7 +149,7 @@ def test_destroy_select_deployment( def test_destroy_select_deployment_all( - cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: MonkeyPatch + cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: pytest.MonkeyPatch ) -> None: """Test destroy select all deployments.""" cp_config("min_required_multi", cd_tmp_path) @@ -176,7 +167,7 @@ def test_destroy_select_deployment_all( def test_destroy_select_module( - cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: MonkeyPatch + cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: pytest.MonkeyPatch ) -> None: """Test destroy select from two modules.""" cp_config("min_required_multi", cd_tmp_path) @@ -192,7 +183,7 @@ def test_destroy_select_module( def test_destroy_select_module_all( - cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: MonkeyPatch + cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: pytest.MonkeyPatch ) -> None: """Test destroy select all modules.""" cp_config("min_required_multi", cd_tmp_path) @@ -209,7 +200,7 @@ def test_destroy_select_module_all( def test_destroy_select_module_child_modules( - cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: MonkeyPatch + cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: pytest.MonkeyPatch ) -> None: """Test destroy select child module.""" cp_config("simple_child_modules.1", cd_tmp_path) @@ -225,7 +216,7 @@ def test_destroy_select_module_child_modules( def test_destroy_select_module_child_modules_all( - cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: MonkeyPatch + cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: pytest.MonkeyPatch ) -> None: """Test destroy select all child module.""" cp_config("simple_child_modules.1", cd_tmp_path) diff --git a/tests/integration/cli/commands/test_dismantle.py b/tests/integration/cli/commands/test_dismantle.py index 43572b54e..3ce8eab54 100644 --- a/tests/integration/cli/commands/test_dismantle.py +++ b/tests/integration/cli/commands/test_dismantle.py @@ -4,9 +4,9 @@ import logging from typing import TYPE_CHECKING +from unittest.mock import Mock from click.testing import CliRunner -from mock import Mock from runway._cli import cli from runway._cli.commands import destroy @@ -14,16 +14,16 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture, MonkeyPatch + import pytest from ...conftest import CpConfigTypeDef def test_dismantle( - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cd_tmp_path: Path, cp_config: CpConfigTypeDef, - monkeypatch: MonkeyPatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test dismantle.""" cp_config("min_required", cd_tmp_path) diff --git a/tests/integration/cli/commands/test_docs.py b/tests/integration/cli/commands/test_docs.py index fae9717a4..b98c5d56e 100644 --- a/tests/integration/cli/commands/test_docs.py +++ b/tests/integration/cli/commands/test_docs.py @@ -3,16 +3,16 @@ from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import patch from click.testing import CliRunner -from mock import patch from runway._cli import cli if TYPE_CHECKING: - from mock import MagicMock + from unittest.mock import MagicMock -DOCS_URL = "https://docs.onica.com/projects/runway/" +DOCS_URL = "https://runway.readthedocs.io/" @patch("click.launch") @@ -22,10 +22,5 @@ def test_docs(mock_launch: MagicMock) -> None: assert runner.invoke(cli, ["docs"], env={}).exit_code == 0 mock_launch.assert_called_once_with(DOCS_URL) - assert ( - runner.invoke( - cli, ["docs"], env={"LD_LIBRARY_PATH_ORIG": "something"} - ).exit_code - == 0 - ) + assert runner.invoke(cli, ["docs"], env={"LD_LIBRARY_PATH_ORIG": "something"}).exit_code == 0 assert mock_launch.call_count == 2 diff --git a/tests/integration/cli/commands/test_envvars.py b/tests/integration/cli/commands/test_envvars.py index 41c50694e..a64c69bdf 100644 --- a/tests/integration/cli/commands/test_envvars.py +++ b/tests/integration/cli/commands/test_envvars.py @@ -4,16 +4,16 @@ import logging from typing import TYPE_CHECKING +from unittest.mock import Mock from click.testing import CliRunner -from mock import Mock from runway._cli import cli if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture, MonkeyPatch + import pytest from ...conftest import CpConfigTypeDef @@ -30,7 +30,7 @@ def test_envvars( - cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: MonkeyPatch + cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: pytest.MonkeyPatch ) -> None: """Test envvars.""" monkeypatch.setattr("platform.system", Mock(return_value="Darwin")) @@ -42,7 +42,7 @@ def test_envvars( def test_envvar_windows( - cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: MonkeyPatch + cd_tmp_path: Path, cp_config: CpConfigTypeDef, monkeypatch: pytest.MonkeyPatch ) -> None: """Test envvars for Windows.""" monkeypatch.setattr("platform.system", Mock(return_value="Windows")) @@ -58,7 +58,7 @@ def test_envvar_windows( assert result1.output == POSIX_OUTPUT -def test_envvars_no_config(caplog: LogCaptureFixture, cd_tmp_path: Path) -> None: +def test_envvars_no_config(caplog: pytest.LogCaptureFixture, cd_tmp_path: Path) -> None: """Test envvars with no config in the directory or parent.""" caplog.set_level(logging.ERROR, logger="runway") runner = CliRunner() @@ -72,7 +72,7 @@ def test_envvars_no_config(caplog: LogCaptureFixture, cd_tmp_path: Path) -> None def test_envvars_no_env_vars( - caplog: LogCaptureFixture, cd_tmp_path: Path, cp_config: CpConfigTypeDef + caplog: pytest.LogCaptureFixture, cd_tmp_path: Path, cp_config: CpConfigTypeDef ) -> None: """Test envvars with no env_vars in the config.""" caplog.set_level(logging.ERROR, logger="runway") diff --git a/tests/integration/cli/commands/test_gen_sample.py b/tests/integration/cli/commands/test_gen_sample.py index 734f89b70..8596dda4d 100644 --- a/tests/integration/cli/commands/test_gen_sample.py +++ b/tests/integration/cli/commands/test_gen_sample.py @@ -13,10 +13,8 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture - -def test_cdk_csharp(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_cdk_csharp(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample cdk-csharp`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -49,7 +47,7 @@ def test_cdk_csharp(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: ] -def test_cdk_py(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_cdk_py(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample cdk-py`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -81,7 +79,7 @@ def test_cdk_py(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: ] -def test_cdk_tsc(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_cdk_tsc(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample cdk-tsc`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -113,7 +111,7 @@ def test_cdk_tsc(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: ] -def test_cfn(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_cfn(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample cfn`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -131,7 +129,7 @@ def test_cfn(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: assert caplog.messages == [f"Sample CloudFormation module created at {module}"] -def test_cfngin(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_cfngin(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample cfngin`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -154,7 +152,7 @@ def test_cfngin(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: assert caplog.messages == [f"Sample CFNgin module created at {module}"] -def test_k8s_cfn_repo(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_k8s_cfn_repo(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample k8s-cfn-repo`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -205,7 +203,7 @@ def test_k8s_cfn_repo(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: ] -def test_k8s_tf_repo(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_k8s_tf_repo(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample k8s-tf-repo`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -251,7 +249,7 @@ def test_k8s_tf_repo(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: ] -def test_sls_py(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_sls_py(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample sls-py`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -282,7 +280,7 @@ def test_sls_py(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: ] -def test_sls_tsc(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_sls_tsc(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample sls-tsc`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -315,7 +313,7 @@ def test_sls_tsc(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: ] -def test_static_angular(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_static_angular(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample static-angular`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -369,7 +367,7 @@ def test_static_angular(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: ] -def test_static_react(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_static_react(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample static-react`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -409,7 +407,7 @@ def test_static_react(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: ] -def test_tf(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_tf(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway gen-sample tf`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -450,7 +448,7 @@ def test_tf(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: ], ) def test_dir_exists( - command: str, dir_name: str, caplog: LogCaptureFixture, cd_tmp_path: Path + command: str, dir_name: str, caplog: pytest.LogCaptureFixture, cd_tmp_path: Path ) -> None: """Test ``runway gen-sample`` commands when directory exists.""" caplog.set_level(logging.ERROR, logger="runway.cli.gen_sample") diff --git a/tests/integration/cli/commands/test_init.py b/tests/integration/cli/commands/test_init.py index ed5e889a6..ccc9010d8 100644 --- a/tests/integration/cli/commands/test_init.py +++ b/tests/integration/cli/commands/test_init.py @@ -9,9 +9,9 @@ import logging from typing import TYPE_CHECKING +from unittest.mock import Mock from click.testing import CliRunner -from mock import Mock from pydantic import ValidationError from runway._cli import cli @@ -23,7 +23,7 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture + import pytest from pytest_mock import MockerFixture from ...conftest import CpConfigTypeDef @@ -34,7 +34,7 @@ def test_init( cd_tmp_path: Path, cp_config: CpConfigTypeDef, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, ) -> None: """Test init.""" @@ -64,13 +64,12 @@ def test_init_handle_validation_error( f"{MODULE}.Runway", spec=Runway, spec_set=True, - init=Mock(side_effect=ValidationError([], Mock())), # type: ignore + init=Mock(side_effect=ValidationError), ) cp_config("min_required", cd_tmp_path) runner = CliRunner() result = runner.invoke(cli, ["init"]) assert result.exit_code == 1 - assert "ValidationError" in result.output def test_init_handle_config_not_found( @@ -117,16 +116,14 @@ def test_init_options_deploy_environment( assert mock_runway.call_args.args[1].env.name == "e-option" assert ( - runner.invoke( - cli, ["init", "--deploy-environment", "deploy-environment-option"] - ).exit_code + runner.invoke(cli, ["init", "--deploy-environment", "deploy-environment-option"]).exit_code == 0 ) assert mock_runway.call_args.args[1].env.name == "deploy-environment-option" def test_init_options_tag( - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cd_tmp_path: Path, cp_config: CpConfigTypeDef, mocker: MockerFixture, diff --git a/tests/integration/cli/commands/test_new.py b/tests/integration/cli/commands/test_new.py index e25f7f12e..a56271a78 100644 --- a/tests/integration/cli/commands/test_new.py +++ b/tests/integration/cli/commands/test_new.py @@ -13,10 +13,10 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture + import pytest -def test_new(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_new(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway new`` command.""" caplog.set_level(logging.INFO, logger="runway.cli") runner = CliRunner() @@ -32,17 +32,15 @@ def test_new(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: assert caplog.messages == [ "runway.yml generated", "See addition getting started information at " - "https://docs.onica.com/projects/runway/page/getting_started.html", + "https://runway.readthedocs.io/page/getting_started.html", ] -def test_new_file_exists(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_new_file_exists(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway new`` command with existing file.""" caplog.set_level(logging.ERROR, logger="runway.cli") (cd_tmp_path / "runway.yml").touch() runner = CliRunner() result = runner.invoke(cli, ["new"]) assert result.exit_code == 1 - assert caplog.messages == [ - "There is already a runway.yml file in the current directory" - ] + assert caplog.messages == ["There is already a runway.yml file in the current directory"] diff --git a/tests/integration/cli/commands/test_plan.py b/tests/integration/cli/commands/test_plan.py index f9344211b..3dca1d8da 100644 --- a/tests/integration/cli/commands/test_plan.py +++ b/tests/integration/cli/commands/test_plan.py @@ -9,9 +9,9 @@ import logging from typing import TYPE_CHECKING +from unittest.mock import Mock from click.testing import CliRunner -from mock import Mock from runway._cli import cli from runway.config import RunwayConfig @@ -21,7 +21,7 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture + import pytest from pytest_mock import MockerFixture from ...conftest import CpConfigTypeDef @@ -29,9 +29,7 @@ MODULE = "runway._cli.commands._plan" -def test_plan( - cd_tmp_path: Path, cp_config: CpConfigTypeDef, mocker: MockerFixture -) -> None: +def test_plan(cd_tmp_path: Path, cp_config: CpConfigTypeDef, mocker: MockerFixture) -> None: """Test plan.""" mock_runway = mocker.patch(f"{MODULE}.Runway", Mock(spec=Runway, spec_set=True)) cp_config("min_required", cd_tmp_path) @@ -73,16 +71,14 @@ def test_plan_options_deploy_environment( assert mock_runway.call_args.args[1].env.name == "e-option" assert ( - runner.invoke( - cli, ["plan", "--deploy-environment", "deploy-environment-option"] - ).exit_code + runner.invoke(cli, ["plan", "--deploy-environment", "deploy-environment-option"]).exit_code == 0 ) assert mock_runway.call_args.args[1].env.name == "deploy-environment-option" def test_plan_options_tag( - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cd_tmp_path: Path, cp_config: CpConfigTypeDef, mocker: MockerFixture, @@ -92,12 +88,7 @@ def test_plan_options_tag( mock_runway = mocker.patch(f"{MODULE}.Runway", Mock(spec=Runway, spec_set=True)) cp_config("tagged_modules", cd_tmp_path) runner = CliRunner() - assert ( - runner.invoke( - cli, ["plan", "--tag", "app:test-app", "--tag", "tier:iac"] - ).exit_code - == 0 - ) + assert runner.invoke(cli, ["plan", "--tag", "app:test-app", "--tag", "tier:iac"]).exit_code == 0 deployment = mock_runway.return_value.plan.call_args.args[0][0] assert len(deployment.modules) == 1 assert deployment.modules[0].name == "sampleapp-01.cfn" diff --git a/tests/integration/cli/commands/test_preflight.py b/tests/integration/cli/commands/test_preflight.py index bb307bbd3..ebe95952f 100644 --- a/tests/integration/cli/commands/test_preflight.py +++ b/tests/integration/cli/commands/test_preflight.py @@ -4,9 +4,9 @@ import logging from typing import TYPE_CHECKING +from unittest.mock import Mock from click.testing import CliRunner -from mock import Mock from runway._cli import cli from runway._cli.commands import test @@ -14,16 +14,16 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture, MonkeyPatch + import pytest from ...conftest import CpConfigTypeDef def test_preflight( - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cd_tmp_path: Path, cp_config: CpConfigTypeDef, - monkeypatch: MonkeyPatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test ``runway preflight``.""" cp_config("min_required", cd_tmp_path) diff --git a/tests/integration/cli/commands/test_run_python.py b/tests/integration/cli/commands/test_run_python.py index 3fa7aee53..beb5a57fa 100644 --- a/tests/integration/cli/commands/test_run_python.py +++ b/tests/integration/cli/commands/test_run_python.py @@ -14,9 +14,7 @@ def test_run_python(cd_tmp_path: Path) -> None: """Test ``runway run-python hello_world.py``.""" - (cd_tmp_path / "hello_world.py").write_text( - "if __name__ == '__main__': print('hello world')" - ) + (cd_tmp_path / "hello_world.py").write_text("if __name__ == '__main__': print('hello world')") runner = CliRunner() result = runner.invoke(cli, ["run-python", "hello_world.py"]) assert result.exit_code == 0 diff --git a/tests/integration/cli/commands/test_schema_cfngin.py b/tests/integration/cli/commands/test_schema_cfngin.py index dfb90df2c..7164b4115 100644 --- a/tests/integration/cli/commands/test_schema_cfngin.py +++ b/tests/integration/cli/commands/test_schema_cfngin.py @@ -2,6 +2,7 @@ from __future__ import annotations +import json from typing import TYPE_CHECKING from click.testing import CliRunner @@ -17,14 +18,20 @@ def test_schema_cfngin() -> None: """Test ``runway schema cfngin``.""" result = CliRunner().invoke(cli, ["schema", "cfngin"]) assert result.exit_code == 0 - assert result.output == CfnginConfigDefinitionModel.schema_json(indent=4) + "\n" + assert ( + result.output + == json.dumps(CfnginConfigDefinitionModel.model_json_schema(), indent=4) + "\n" + ) def test_schema_cfngin_indent() -> None: """Test ``runway schema cfngin --indent 2``.""" result = CliRunner().invoke(cli, ["schema", "cfngin", "--indent", "2"]) assert result.exit_code == 0 - assert result.output == CfnginConfigDefinitionModel.schema_json(indent=2) + "\n" + assert ( + result.output + == json.dumps(CfnginConfigDefinitionModel.model_json_schema(), indent=2) + "\n" + ) def test_schema_cfngin_output(cd_tmp_path: Path) -> None: @@ -36,5 +43,5 @@ def test_schema_cfngin_output(cd_tmp_path: Path) -> None: assert file_path.is_file() assert ( file_path.read_text() - == CfnginConfigDefinitionModel.schema_json(indent=4) + "\n" + == json.dumps(CfnginConfigDefinitionModel.model_json_schema(), indent=4) + "\n" ) diff --git a/tests/integration/cli/commands/test_schema_runway.py b/tests/integration/cli/commands/test_schema_runway.py index 10695a62b..612ed413b 100644 --- a/tests/integration/cli/commands/test_schema_runway.py +++ b/tests/integration/cli/commands/test_schema_runway.py @@ -2,6 +2,7 @@ from __future__ import annotations +import json from typing import TYPE_CHECKING from click.testing import CliRunner @@ -17,14 +18,20 @@ def test_schema_runway() -> None: """Test ``runway schema runway``.""" result = CliRunner().invoke(cli, ["schema", "runway"]) assert result.exit_code == 0 - assert result.output == RunwayConfigDefinitionModel.schema_json(indent=4) + "\n" + assert ( + result.output + == json.dumps(RunwayConfigDefinitionModel.model_json_schema(), indent=4) + "\n" + ) def test_schema_runway_indent() -> None: """Test ``runway schema runway --indent 2``.""" result = CliRunner().invoke(cli, ["schema", "runway", "--indent", "2"]) assert result.exit_code == 0 - assert result.output == RunwayConfigDefinitionModel.schema_json(indent=2) + "\n" + assert ( + result.output + == json.dumps(RunwayConfigDefinitionModel.model_json_schema(), indent=2) + "\n" + ) def test_schema_runway_output(cd_tmp_path: Path) -> None: @@ -36,5 +43,5 @@ def test_schema_runway_output(cd_tmp_path: Path) -> None: assert file_path.is_file() assert ( file_path.read_text() - == RunwayConfigDefinitionModel.schema_json(indent=4) + "\n" + == json.dumps(RunwayConfigDefinitionModel.model_json_schema(), indent=4) + "\n" ) diff --git a/tests/integration/cli/commands/test_takeoff.py b/tests/integration/cli/commands/test_takeoff.py index f78ee1b89..a53761a54 100644 --- a/tests/integration/cli/commands/test_takeoff.py +++ b/tests/integration/cli/commands/test_takeoff.py @@ -4,9 +4,9 @@ import logging from typing import TYPE_CHECKING +from unittest.mock import Mock from click.testing import CliRunner -from mock import Mock from runway._cli import cli from runway._cli.commands import deploy @@ -14,16 +14,16 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture, MonkeyPatch + import pytest from ...conftest import CpConfigTypeDef def test_takeoff( - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cd_tmp_path: Path, cp_config: CpConfigTypeDef, - monkeypatch: MonkeyPatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test takeoff.""" cp_config("min_required", cd_tmp_path) diff --git a/tests/integration/cli/commands/test_taxi.py b/tests/integration/cli/commands/test_taxi.py index 782596f93..035d7f9e3 100644 --- a/tests/integration/cli/commands/test_taxi.py +++ b/tests/integration/cli/commands/test_taxi.py @@ -4,9 +4,9 @@ import logging from typing import TYPE_CHECKING +from unittest.mock import Mock from click.testing import CliRunner -from mock import Mock from runway._cli import cli from runway._cli.commands import plan @@ -14,16 +14,16 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture, MonkeyPatch + import pytest from ...conftest import CpConfigTypeDef def test_taxi( - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cd_tmp_path: Path, cp_config: CpConfigTypeDef, - monkeypatch: MonkeyPatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test taxi.""" cp_config("min_required", cd_tmp_path) diff --git a/tests/integration/cli/commands/test_test.py b/tests/integration/cli/commands/test_test.py index 3dd451b0f..ae2156b25 100644 --- a/tests/integration/cli/commands/test_test.py +++ b/tests/integration/cli/commands/test_test.py @@ -37,40 +37,28 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import CaptureFixture, LogCaptureFixture + import pytest # def test_test_invalid_type(cd_tmp_path, capfd, caplog): def test_test_invalid_type( cd_tmp_path: Path, -) -> None: # TODO update after catching error +) -> None: # TODO (kyle): update after catching error """Test ``runway test`` with two tests; one invalid.""" # caplog.set_level(logging.INFO, logger="runway.core") runway_yml = cd_tmp_path / "runway.yml" runway_yml.write_text( - yaml.safe_dump( - {"deployments": [], "tests": [INVALID_TYPE.copy(), SUCCESS.copy()]} - ) + yaml.safe_dump({"deployments": [], "tests": [INVALID_TYPE.copy(), SUCCESS.copy()]}) ) runner = CliRunner() result = runner.invoke(cli, ["test"]) assert result.exit_code == 1 - assert result.exception.errors()[0]["loc"] == ("tests", 0, "type") + assert result.exception.errors()[0]["loc"] == ("tests", 0, "type") # type: ignore - # captured = capfd.readouterr() - # logs = "\n".join(caplog.messages) - # print(captured) - # assert "found 2 test(s)" in logs - # assert "invalid-type:running test (in progress)" in logs - # assert 'invalid-type:unable to find handler of type "invalid"' in logs - # assert "success:running test (in progress)" in logs - # assert "Hello world" in captured.out - # assert "success:running test (pass)" in logs - -def test_test_not_defined(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_test_not_defined(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway test`` with no tests defined.""" caplog.set_level(logging.ERROR) runway_yml = cd_tmp_path / "runway.yml" @@ -83,14 +71,12 @@ def test_test_not_defined(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: def test_test_single_successful( - cd_tmp_path: Path, capfd: CaptureFixture[str], caplog: LogCaptureFixture + cd_tmp_path: Path, capfd: pytest.CaptureFixture[str], caplog: pytest.LogCaptureFixture ) -> None: """Test ``runway test`` with a single, successful test.""" caplog.set_level(logging.INFO, logger="runway.core") runway_yml = cd_tmp_path / "runway.yml" - runway_yml.write_text( - yaml.safe_dump({"deployments": [], "tests": [SUCCESS.copy()]}) - ) + runway_yml.write_text(yaml.safe_dump({"deployments": [], "tests": [SUCCESS.copy()]})) runner = CliRunner() result = runner.invoke(cli, ["test"]) @@ -105,7 +91,7 @@ def test_test_single_successful( def test_test_two_test( - cd_tmp_path: Path, capfd: CaptureFixture[str], caplog: LogCaptureFixture + cd_tmp_path: Path, capfd: pytest.CaptureFixture[str], caplog: pytest.LogCaptureFixture ) -> None: """Test ``runway test`` with two tests; one failing.""" caplog.set_level(logging.INFO, logger="runway.core") @@ -130,15 +116,13 @@ def test_test_two_test( def test_test_two_test_required( - cd_tmp_path: Path, capfd: CaptureFixture[str], caplog: LogCaptureFixture + cd_tmp_path: Path, capfd: pytest.CaptureFixture[str], caplog: pytest.LogCaptureFixture ) -> None: """Test ``runway test`` with two tests; one failing required.""" caplog.set_level(logging.INFO) runway_yml = cd_tmp_path / "runway.yml" runway_yml.write_text( - yaml.safe_dump( - {"deployments": [], "tests": [FAIL_REQUIRED.copy(), SUCCESS.copy()]} - ) + yaml.safe_dump({"deployments": [], "tests": [FAIL_REQUIRED.copy(), SUCCESS.copy()]}) ) runner = CliRunner() diff --git a/tests/integration/cli/commands/test_whichenv.py b/tests/integration/cli/commands/test_whichenv.py index d90aa47b9..71a29a83f 100644 --- a/tests/integration/cli/commands/test_whichenv.py +++ b/tests/integration/cli/commands/test_whichenv.py @@ -13,29 +13,25 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture + import pytest -def test_whichenv(caplog: LogCaptureFixture, cd_tmp_path: Path) -> None: +def test_whichenv(caplog: pytest.LogCaptureFixture, cd_tmp_path: Path) -> None: """Test ``runway whichenv``.""" caplog.set_level(logging.DEBUG, logger="runway") runway_yml = cd_tmp_path / "runway.yml" - runway_yml.write_text( - yaml.safe_dump({"deployments": [], "ignore_git_branch": True}) - ) + runway_yml.write_text(yaml.safe_dump({"deployments": [], "ignore_git_branch": True})) runner = CliRunner() result = runner.invoke(cli, ["whichenv"], env={}) assert result.exit_code == 0 assert result.output == cd_tmp_path.name + "\n" -def test_whichenv_debug(caplog: LogCaptureFixture, cd_tmp_path: Path) -> None: +def test_whichenv_debug(caplog: pytest.LogCaptureFixture, cd_tmp_path: Path) -> None: """Test ``runway whichenv`` debug.""" caplog.set_level(logging.DEBUG, logger="runway") runway_yml = cd_tmp_path / "runway.yml" - runway_yml.write_text( - yaml.safe_dump({"deployments": [], "ignore_git_branch": True}) - ) + runway_yml.write_text(yaml.safe_dump({"deployments": [], "ignore_git_branch": True})) runner = CliRunner() result = runner.invoke(cli, ["whichenv", "--debug"]) assert result.exit_code == 0 @@ -43,13 +39,11 @@ def test_whichenv_debug(caplog: LogCaptureFixture, cd_tmp_path: Path) -> None: assert "set dependency log level to debug" not in caplog.messages -def test_whichenv_debug_debug(caplog: LogCaptureFixture, cd_tmp_path: Path) -> None: +def test_whichenv_debug_debug(caplog: pytest.LogCaptureFixture, cd_tmp_path: Path) -> None: """Test ``runway whichenv`` debug.""" caplog.set_level(logging.DEBUG, logger="runway") runway_yml = cd_tmp_path / "runway.yml" - runway_yml.write_text( - yaml.safe_dump({"deployments": [], "ignore_git_branch": True}) - ) + runway_yml.write_text(yaml.safe_dump({"deployments": [], "ignore_git_branch": True})) runner = CliRunner() result = runner.invoke(cli, ["whichenv"], env={"DEBUG": "2"}) assert result.exit_code == 0 @@ -60,12 +54,8 @@ def test_whichenv_debug_debug(caplog: LogCaptureFixture, cd_tmp_path: Path) -> N def test_whichenv_invalid_debug_environ(cd_tmp_path: Path) -> None: """Test ``runway whichenv`` with invalid debug environ.""" runway_yml = cd_tmp_path / "runway.yml" - runway_yml.write_text( - yaml.safe_dump({"deployments": [], "ignore_git_branch": True}) - ) + runway_yml.write_text(yaml.safe_dump({"deployments": [], "ignore_git_branch": True})) runner = CliRunner() result = runner.invoke(cli, ["whichenv"], env={"DEBUG": "invalid"}) assert result.exit_code == 2 - assert ( - "Invalid value for '--debug': 'invalid' is not a valid integer" in result.output - ) + assert "Invalid value for '--debug': 'invalid' is not a valid integer" in result.output diff --git a/tests/integration/cli/commands/tfenv/conftest.py b/tests/integration/cli/commands/tfenv/conftest.py new file mode 100644 index 000000000..086a598d0 --- /dev/null +++ b/tests/integration/cli/commands/tfenv/conftest.py @@ -0,0 +1,24 @@ +"""Pytest fixtures and plugins.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest + +if TYPE_CHECKING: + from pathlib import Path + + from pytest_mock import MockFixture + + +@pytest.fixture(autouse=True) +def versions_dir(cd_tmp_path: Path, mocker: MockFixture) -> Path: + """Patches TFEnvManager.versions_dir.""" + path = cd_tmp_path / "versions" + path.mkdir(exist_ok=True) + mocker.patch("runway._cli.commands._tfenv._install.TFEnvManager.versions_dir", path) + mocker.patch("runway._cli.commands._tfenv._list.TFEnvManager.versions_dir", path) + mocker.patch("runway._cli.commands._tfenv._run.TFEnvManager.versions_dir", path) + mocker.patch("runway._cli.commands._tfenv._uninstall.TFEnvManager.versions_dir", path) + return path diff --git a/tests/integration/cli/commands/tfenv/test_install.py b/tests/integration/cli/commands/tfenv/test_install.py index f803a316b..44712b058 100644 --- a/tests/integration/cli/commands/tfenv/test_install.py +++ b/tests/integration/cli/commands/tfenv/test_install.py @@ -1,30 +1,20 @@ """Test ``runway tfenv install`` command.""" -# pylint: disable=unused-argument from __future__ import annotations import logging from pathlib import Path from typing import TYPE_CHECKING -import pytest from click.testing import CliRunner from runway._cli import cli -from runway.env_mgr.tfenv import TFEnvManager if TYPE_CHECKING: - from pytest import LogCaptureFixture - from pytest_mock import MockerFixture + import pytest -@pytest.fixture(autouse=True, scope="function") -def patch_versions_dir(mocker: MockerFixture, tmp_path: Path) -> None: - """Patch TFEnvManager.versions_dir.""" - mocker.patch.object(TFEnvManager, "versions_dir", tmp_path) - - -def test_tfenv_install(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: +def test_tfenv_install(cd_tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway tfenv install`` reading version from a file. For best results, remove any existing installs. @@ -41,18 +31,16 @@ def test_tfenv_install(cd_tmp_path: Path, caplog: LogCaptureFixture) -> None: def test_tfenv_install_no_version_file( - cd_tmp_path: Path, caplog: LogCaptureFixture + cli_runner: CliRunner, caplog: pytest.LogCaptureFixture ) -> None: """Test ``runway tfenv install`` no version file.""" caplog.set_level(logging.ERROR, logger="runway") - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "install"]) - assert result.exit_code == 1 + assert cli_runner.invoke(cli, ["tfenv", "install"]).exit_code == 1 assert "unable to find a .terraform-version file" in "\n".join(caplog.messages) -def test_tfenv_install_version(caplog: LogCaptureFixture) -> None: +def test_tfenv_install_version(caplog: pytest.LogCaptureFixture) -> None: """Test ``runway tfenv install ``. For best results, remove any existing installs. @@ -63,5 +51,5 @@ def test_tfenv_install_version(caplog: LogCaptureFixture) -> None: result = runner.invoke(cli, ["tfenv", "install", "0.12.1"]) assert result.exit_code == 0 - kb_bin = Path(caplog.messages[-1].replace("terraform path: ", "")) - assert kb_bin.exists() + tf_bin = Path(caplog.messages[-1].replace("terraform path: ", "")) + assert tf_bin.exists() diff --git a/tests/integration/cli/commands/tfenv/test_list.py b/tests/integration/cli/commands/tfenv/test_list.py index 161d99820..e6aaf6a06 100644 --- a/tests/integration/cli/commands/tfenv/test_list.py +++ b/tests/integration/cli/commands/tfenv/test_list.py @@ -13,12 +13,12 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture + import pytest from pytest_mock import MockerFixture def test_tfenv_list( - caplog: LogCaptureFixture, mocker: MockerFixture, tmp_path: Path + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, tmp_path: Path ) -> None: """Test ``runway tfenv list``.""" caplog.set_level(logging.INFO, logger="runway.cli.commands.tfenv") @@ -31,13 +31,11 @@ def test_tfenv_list( result = runner.invoke(cli, ["tfenv", "list"]) assert result.exit_code == 0 assert caplog.messages == ["Terraform versions installed:"] - assert result.stdout == "\n".join( - ["[runway] Terraform versions installed:", "0.13.0", "1.0.0", ""] - ) + assert result.stdout == "[runway] Terraform versions installed:\n0.13.0\n1.0.0\n" def test_tfenv_list_none( - caplog: LogCaptureFixture, mocker: MockerFixture, tmp_path: Path + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, tmp_path: Path ) -> None: """Test ``runway tfenv list`` no versions installed.""" caplog.set_level(logging.WARNING, logger="runway.cli.commands.tfenv") diff --git a/tests/integration/cli/commands/tfenv/test_run.py b/tests/integration/cli/commands/tfenv/test_run.py index 9de265278..e8ab9b1b4 100644 --- a/tests/integration/cli/commands/tfenv/test_run.py +++ b/tests/integration/cli/commands/tfenv/test_run.py @@ -1,33 +1,29 @@ """Test ``runway tfenv run`` command.""" -# pylint: disable=unused-argument from __future__ import annotations import logging from typing import TYPE_CHECKING -from click.testing import CliRunner - from runway._cli import cli if TYPE_CHECKING: from pathlib import Path - from pytest import CaptureFixture, LogCaptureFixture + import pytest + from click.testing import CliRunner -def test_tfenv_run_no_version_file( - cd_tmp_path: Path, caplog: LogCaptureFixture -) -> None: +def test_tfenv_run_no_version_file(cli_runner: CliRunner, caplog: pytest.LogCaptureFixture) -> None: """Test ``runway tfenv run -- --help`` no version file.""" caplog.set_level(logging.ERROR, logger="runway") - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "run", "--", "--help"]) - assert result.exit_code == 1 + assert cli_runner.invoke(cli, ["tfenv", "run", "--", "--help"]).exit_code == 1 assert "unable to find a .terraform-version file" in "\n".join(caplog.messages) -def test_tfenv_run_separator(cd_tmp_path: Path, capfd: CaptureFixture[str]) -> None: +def test_tfenv_run_separator( + cli_runner: CliRunner, capfd: pytest.CaptureFixture[str], tmp_path: Path +) -> None: """Test ``runway tfenv run -- --help``. Parsing of command using ``--`` as a separator between options and args. @@ -36,25 +32,25 @@ def test_tfenv_run_separator(cd_tmp_path: Path, capfd: CaptureFixture[str]) -> N pass options shared with Runway such as ``--help``. """ - (cd_tmp_path / ".terraform-version").write_text("0.12.0") - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "run", "--", "--help"]) + (tmp_path / ".terraform-version").write_text("0.12.0") + result = cli_runner.invoke(cli, ["tfenv", "run", "--", "--help"]) captured = capfd.readouterr() # capfd required for subprocess assert result.exit_code == 0 assert "runway" not in captured.out assert "terraform [-version] [-help] [args]" in captured.out -def test_tfenv_run_version(cd_tmp_path: Path, capfd: CaptureFixture[str]) -> None: +def test_tfenv_run_version( + cli_runner: CliRunner, capfd: pytest.CaptureFixture[str], tmp_path: Path +) -> None: """Test ``runway tfenv run --version``. Parsing of bare command. """ version = "0.12.0" - (cd_tmp_path / ".terraform-version").write_text(version) - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "run", "--version"]) + (tmp_path / ".terraform-version").write_text(version) + result = cli_runner.invoke(cli, ["tfenv", "run", "--version"]) captured = capfd.readouterr() # capfd required for subprocess assert result.exit_code == 0 assert f"Terraform v{version}" in captured.out diff --git a/tests/integration/cli/commands/tfenv/test_uninstall.py b/tests/integration/cli/commands/tfenv/test_uninstall.py index ddfdf06f9..f658a0af9 100644 --- a/tests/integration/cli/commands/tfenv/test_uninstall.py +++ b/tests/integration/cli/commands/tfenv/test_uninstall.py @@ -1,50 +1,41 @@ """Test ``runway tfenv uninstall`` command.""" -# pylint: disable=unused-argument from __future__ import annotations import logging -from pathlib import Path from typing import TYPE_CHECKING -import pytest -from click.testing import CliRunner - from runway._cli import cli -from runway.env_mgr.tfenv import TF_VERSION_FILENAME, TFEnvManager +from runway.env_mgr.tfenv import TF_VERSION_FILENAME if TYPE_CHECKING: - from pytest import LogCaptureFixture - from pytest_mock import MockerFixture - -LOGGER = "runway.cli.commands.tfenv" + from pathlib import Path + import pytest + from click.testing import CliRunner -@pytest.fixture(autouse=True, scope="function") -def patch_versions_dir(mocker: MockerFixture, tmp_path: Path) -> None: - """Patch TFEnvManager.versions_dir.""" - mocker.patch.object(TFEnvManager, "versions_dir", tmp_path) +LOGGER = "runway.cli.commands.tfenv" -def test_tfenv_uninstall(cd_tmp_path: Path) -> None: +def test_tfenv_uninstall(cli_runner: CliRunner, versions_dir: Path) -> None: """Test ``runway tfenv uninstall``.""" version = "1.0.0" - version_dir = cd_tmp_path / version + version_dir = versions_dir / version version_dir.mkdir() - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "uninstall", "1.0.0"]) + result = cli_runner.invoke(cli, ["tfenv", "uninstall", "1.0.0"]) assert result.exit_code == 0 assert not version_dir.exists() -def test_tfenv_uninstall_all(caplog: LogCaptureFixture, cd_tmp_path: Path) -> None: +def test_tfenv_uninstall_all( + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner, versions_dir: Path +) -> None: """Test ``runway tfenv uninstall --all``.""" caplog.set_level(logging.INFO, logger=LOGGER) - version_dirs = [cd_tmp_path / "0.12.0", cd_tmp_path / "1.0.0"] + version_dirs = [versions_dir / "0.12.0", versions_dir / "1.0.0"] for v in version_dirs: v.mkdir() - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "uninstall", "--all"]) + result = cli_runner.invoke(cli, ["tfenv", "uninstall", "--all"]) assert result.exit_code == 0 assert "uninstalling all versions of Terraform..." in caplog.messages assert "all versions of Terraform have been uninstalled" in caplog.messages @@ -52,15 +43,14 @@ def test_tfenv_uninstall_all(caplog: LogCaptureFixture, cd_tmp_path: Path) -> No def test_tfenv_uninstall_all_takes_precedence( - caplog: LogCaptureFixture, cd_tmp_path: Path + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner, versions_dir: Path ) -> None: """Test ``runway tfenv uninstall --all`` takes precedence over arg.""" caplog.set_level(logging.INFO, logger=LOGGER) - version_dirs = [cd_tmp_path / "0.12.0", cd_tmp_path / "1.0.0"] + version_dirs = [versions_dir / "0.12.0", versions_dir / "1.0.0"] for v in version_dirs: v.mkdir() - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "uninstall", "0.13.0", "--all"]) + result = cli_runner.invoke(cli, ["tfenv", "uninstall", "0.13.0", "--all"]) assert result.exit_code == 0 assert "uninstalling all versions of Terraform..." in caplog.messages assert "all versions of Terraform have been uninstalled" in caplog.messages @@ -68,54 +58,52 @@ def test_tfenv_uninstall_all_takes_precedence( def test_tfenv_uninstall_all_none_installed( - caplog: LogCaptureFixture, cd_tmp_path: Path + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner ) -> None: """Test ``runway tfenv uninstall --all`` none installed.""" caplog.set_level(logging.INFO, logger=LOGGER) - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "uninstall", "--all"]) + result = cli_runner.invoke(cli, ["tfenv", "uninstall", "--all"]) assert result.exit_code == 0 assert "uninstalling all versions of Terraform..." in caplog.messages assert "all versions of Terraform have been uninstalled" in caplog.messages -def test_tfenv_uninstall_arg_takes_precedence(cd_tmp_path: Path) -> None: +def test_tfenv_uninstall_arg_takes_precedence( + cd_tmp_path: Path, cli_runner: CliRunner, versions_dir: Path +) -> None: """Test ``runway tfenv uninstall`` arg takes precedence over file.""" version = "1.0.0" - version_dir = cd_tmp_path / version + version_dir = versions_dir / version version_dir.mkdir() (cd_tmp_path / TF_VERSION_FILENAME).write_text("0.12.0") - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "uninstall", "1.0.0"]) + result = cli_runner.invoke(cli, ["tfenv", "uninstall", "1.0.0"]) assert result.exit_code == 0 assert not version_dir.exists() def test_tfenv_uninstall_no_version( - caplog: LogCaptureFixture, cd_tmp_path: Path + caplog: pytest.LogCaptureFixture, cli_runner: CliRunner ) -> None: """Test ``runway tfenv uninstall`` no version.""" caplog.set_level(logging.ERROR, logger=LOGGER) - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "uninstall"]) + result = cli_runner.invoke(cli, ["tfenv", "uninstall"]) assert result.exit_code != 0 assert "version not specified" in caplog.messages -def test_tfenv_uninstall_not_installed(cd_tmp_path: Path) -> None: +def test_tfenv_uninstall_not_installed(cli_runner: CliRunner) -> None: """Test ``runway tfenv uninstall`` not installed.""" - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "uninstall", "1.0.0"]) - assert result.exit_code != 0 + assert cli_runner.invoke(cli, ["tfenv", "uninstall", "1.0.0"]).exit_code != 0 -def test_tfenv_uninstall_version_file(cd_tmp_path: Path) -> None: +def test_tfenv_uninstall_version_file( + cd_tmp_path: Path, cli_runner: CliRunner, versions_dir: Path +) -> None: """Test ``runway tfenv uninstall`` version file.""" version = "1.0.0" - version_dir = cd_tmp_path / version + version_dir = versions_dir / version version_dir.mkdir() (cd_tmp_path / TF_VERSION_FILENAME).write_text(version) - runner = CliRunner() - result = runner.invoke(cli, ["tfenv", "uninstall"]) + result = cli_runner.invoke(cli, ["tfenv", "uninstall"]) assert result.exit_code == 0 assert not version_dir.exists() diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 9f793eef2..7d642d531 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,6 +1,5 @@ """Pytest configuration, fixtures, and plugins.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import shutil @@ -17,8 +16,7 @@ CpConfigTypeDef = Callable[[str, Path], Path] -# pylint: disable=unused-argument -def pytest_ignore_collect(path: Any, config: Config) -> bool: +def pytest_ignore_collect(path: Any, config: Config) -> bool: # noqa: ARG001 """Determine if this directory should have its tests collected.""" if config.option.functional: return True @@ -27,13 +25,13 @@ def pytest_ignore_collect(path: Any, config: Config) -> bool: return not (config.option.integration or config.option.integration_only) -@pytest.fixture +@pytest.fixture() def configs() -> Path: """Path to Runway config fixtures.""" return TEST_ROOT.parent / "fixtures" / "configs" -@pytest.fixture +@pytest.fixture() def cp_config(configs: Path) -> Callable[[str, Path], Path]: """Copy a config file.""" diff --git a/tests/unit/cfngin/actions/conftest.py b/tests/unit/cfngin/actions/conftest.py index ac890d883..2a22d3288 100644 --- a/tests/unit/cfngin/actions/conftest.py +++ b/tests/unit/cfngin/actions/conftest.py @@ -1,22 +1,22 @@ """Pytest fixtures and plugins.""" -# pyright: basic from __future__ import annotations from datetime import datetime from typing import TYPE_CHECKING import pytest -from mock import MagicMock from runway.cfngin.providers.aws.default import Provider if TYPE_CHECKING: + from unittest.mock import MagicMock + from mypy_boto3_cloudformation.type_defs import StackTypeDef from pytest_mock import MockerFixture -@pytest.fixture(scope="function") +@pytest.fixture() def provider_get_stack(mocker: MockerFixture) -> MagicMock: """Patches ``runway.cfngin.providers.aws.default.Provider.get_stack``.""" return_value: StackTypeDef = { diff --git a/tests/unit/cfngin/actions/test_base.py b/tests/unit/cfngin/actions/test_base.py index 0c80e09fd..920f826b0 100644 --- a/tests/unit/cfngin/actions/test_base.py +++ b/tests/unit/cfngin/actions/test_base.py @@ -1,11 +1,10 @@ """Tests for runway.cfngin.actions.base.""" -# pylint: disable=protected-access -# pyright: basic import unittest +from unittest.mock import MagicMock, PropertyMock, patch import botocore.exceptions -from mock import MagicMock, PropertyMock, patch +import pytest from runway.cfngin.actions.base import BaseAction from runway.cfngin.blueprints.base import Blueprint @@ -64,9 +63,7 @@ def test_ensure_cfn_bucket_exists(self, mock_ensure_s3_bucket: MagicMock) -> Non """Test ensure cfn bucket exists.""" action = BaseAction( context=mock_context("mynamespace"), - provider_builder=MockProviderBuilder( - provider=Provider(get_session("us-east-1")) - ), + provider_builder=MockProviderBuilder(provider=Provider(get_session("us-east-1"))), ) assert not action.ensure_cfn_bucket() mock_ensure_s3_bucket.assert_called_once_with( @@ -83,22 +80,16 @@ def test_ensure_cfn_bucket_exists_raise_cfngin_bucket_not_found( ) action = BaseAction( context=mock_context("mynamespace"), - provider_builder=MockProviderBuilder( - provider=Provider(get_session("us-east-1")) - ), + provider_builder=MockProviderBuilder(provider=Provider(get_session("us-east-1"))), ) - with self.assertRaises(CfnginBucketNotFound): + with pytest.raises(CfnginBucketNotFound): assert action.ensure_cfn_bucket() mock_ensure_s3_bucket.assert_called_once_with( action.s3_conn, action.bucket_name, None, create=False ) - @patch( - "runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock - ) - @patch( - "runway.cfngin.actions.base.BaseAction._stack_action", new_callable=PropertyMock - ) + @patch("runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock) + @patch("runway.cfngin.actions.base.BaseAction._stack_action", new_callable=PropertyMock) def test_generate_plan_no_persist_exclude( self, mock_stack_action: PropertyMock, mock_tags: PropertyMock ) -> None: @@ -112,29 +103,23 @@ def test_generate_plan_no_persist_exclude( ) action = BaseAction( context=context, - provider_builder=MockProviderBuilder( - provider=self.provider, region=self.region - ), + provider_builder=MockProviderBuilder(provider=self.provider, region=self.region), ) plan = action._generate_plan(include_persistent_graph=False) mock_tags.assert_not_called() - self.assertIsInstance(plan, Plan) + assert isinstance(plan, Plan) # order is different between python2/3 so can't compare dicts result_graph_dict = plan.graph.to_dict() - self.assertEqual(2, len(result_graph_dict)) - self.assertEqual(set(), result_graph_dict["stack1"]) - self.assertEqual({"stack1"}, result_graph_dict["stack2"]) - self.assertEqual(BaseAction.DESCRIPTION, plan.description) - self.assertTrue(plan.require_unlocked) - - @patch( - "runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock - ) - @patch( - "runway.cfngin.actions.base.BaseAction._stack_action", new_callable=PropertyMock - ) + assert len(result_graph_dict) == 2 + assert set() == result_graph_dict["stack1"] + assert {"stack1"} == result_graph_dict["stack2"] + assert plan.description == BaseAction.DESCRIPTION + assert plan.require_unlocked + + @patch("runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock) + @patch("runway.cfngin.actions.base.BaseAction._stack_action", new_callable=PropertyMock) def test_generate_plan_no_persist_include( self, mock_stack_action: PropertyMock, mock_tags: PropertyMock ) -> None: @@ -148,29 +133,23 @@ def test_generate_plan_no_persist_include( ) action = BaseAction( context=context, - provider_builder=MockProviderBuilder( - provider=self.provider, region=self.region - ), + provider_builder=MockProviderBuilder(provider=self.provider, region=self.region), ) plan = action._generate_plan(include_persistent_graph=True) mock_tags.assert_not_called() - self.assertIsInstance(plan, Plan) + assert isinstance(plan, Plan) # order is different between python2/3 so can't compare dicts result_graph_dict = plan.graph.to_dict() - self.assertEqual(2, len(result_graph_dict)) - self.assertEqual(set(), result_graph_dict["stack1"]) - self.assertEqual({"stack1"}, result_graph_dict["stack2"]) - self.assertEqual(BaseAction.DESCRIPTION, plan.description) - self.assertTrue(plan.require_unlocked) - - @patch( - "runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock - ) - @patch( - "runway.cfngin.actions.base.BaseAction._stack_action", new_callable=PropertyMock - ) + assert len(result_graph_dict) == 2 + assert set() == result_graph_dict["stack1"] + assert {"stack1"} == result_graph_dict["stack2"] + assert plan.description == BaseAction.DESCRIPTION + assert plan.require_unlocked + + @patch("runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock) + @patch("runway.cfngin.actions.base.BaseAction._stack_action", new_callable=PropertyMock) def test_generate_plan_with_persist_exclude( self, mock_stack_action: PropertyMock, mock_tags: PropertyMock ) -> None: @@ -184,28 +163,22 @@ def test_generate_plan_with_persist_exclude( context._persistent_graph = Graph.from_steps([persist_step]) action = BaseAction( context=context, - provider_builder=MockProviderBuilder( - provider=self.provider, region=self.region - ), + provider_builder=MockProviderBuilder(provider=self.provider, region=self.region), ) plan = action._generate_plan(include_persistent_graph=False) - self.assertIsInstance(plan, Plan) + assert isinstance(plan, Plan) # order is different between python2/3 so can't compare dicts result_graph_dict = plan.graph.to_dict() - self.assertEqual(2, len(result_graph_dict)) - self.assertEqual(set(), result_graph_dict["stack1"]) - self.assertEqual({"stack1"}, result_graph_dict["stack2"]) - self.assertEqual(BaseAction.DESCRIPTION, plan.description) - self.assertTrue(plan.require_unlocked) - - @patch( - "runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock - ) - @patch( - "runway.cfngin.actions.base.BaseAction._stack_action", new_callable=PropertyMock - ) + assert len(result_graph_dict) == 2 + assert set() == result_graph_dict["stack1"] + assert {"stack1"} == result_graph_dict["stack2"] + assert plan.description == BaseAction.DESCRIPTION + assert plan.require_unlocked + + @patch("runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock) + @patch("runway.cfngin.actions.base.BaseAction._stack_action", new_callable=PropertyMock) def test_generate_plan_with_persist_include( self, mock_stack_action: PropertyMock, mock_tags: PropertyMock ) -> None: @@ -219,30 +192,24 @@ def test_generate_plan_with_persist_include( context._persistent_graph = Graph.from_steps([persist_step]) action = BaseAction( context=context, - provider_builder=MockProviderBuilder( - provider=self.provider, region=self.region - ), + provider_builder=MockProviderBuilder(provider=self.provider, region=self.region), ) plan = action._generate_plan(include_persistent_graph=True) - self.assertIsInstance(plan, Plan) + assert isinstance(plan, Plan) mock_tags.assert_called_once() # order is different between python2/3 so can't compare dicts result_graph_dict = plan.graph.to_dict() - self.assertEqual(3, len(result_graph_dict)) - self.assertEqual(set(), result_graph_dict["stack1"]) - self.assertEqual({"stack1"}, result_graph_dict["stack2"]) - self.assertEqual(set(), result_graph_dict["removed"]) - self.assertEqual(BaseAction.DESCRIPTION, plan.description) - self.assertTrue(plan.require_unlocked) - - @patch( - "runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock - ) - @patch( - "runway.cfngin.actions.base.BaseAction._stack_action", new_callable=PropertyMock - ) + assert len(result_graph_dict) == 3 + assert set() == result_graph_dict["stack1"] + assert {"stack1"} == result_graph_dict["stack2"] + assert set() == result_graph_dict["removed"] + assert plan.description == BaseAction.DESCRIPTION + assert plan.require_unlocked + + @patch("runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock) + @patch("runway.cfngin.actions.base.BaseAction._stack_action", new_callable=PropertyMock) def test_generate_plan_with_persist_no_lock_req( self, mock_stack_action: PropertyMock, mock_tags: PropertyMock ) -> None: @@ -256,30 +223,26 @@ def test_generate_plan_with_persist_no_lock_req( context._persistent_graph = Graph.from_steps([persist_step]) action = BaseAction( context=context, - provider_builder=MockProviderBuilder( - provider=self.provider, region=self.region - ), + provider_builder=MockProviderBuilder(provider=self.provider, region=self.region), ) - plan = action._generate_plan( - include_persistent_graph=True, require_unlocked=False - ) + plan = action._generate_plan(include_persistent_graph=True, require_unlocked=False) - self.assertIsInstance(plan, Plan) + assert isinstance(plan, Plan) mock_tags.assert_called_once() # order is different between python2/3 so can't compare dicts result_graph_dict = plan.graph.to_dict() - self.assertEqual(3, len(result_graph_dict)) - self.assertEqual(set(), result_graph_dict["stack1"]) - self.assertEqual({"stack1"}, result_graph_dict["stack2"]) - self.assertEqual(set(), result_graph_dict["removed"]) - self.assertEqual(BaseAction.DESCRIPTION, plan.description) - self.assertFalse(plan.require_unlocked) + assert len(result_graph_dict) == 3 + assert set() == result_graph_dict["stack1"] + assert {"stack1"} == result_graph_dict["stack2"] + assert set() == result_graph_dict["removed"] + assert plan.description == BaseAction.DESCRIPTION + assert not plan.require_unlocked def test_stack_template_url(self) -> None: """Test stack template url.""" context = mock_context("mynamespace") - blueprint = MockBlueprint(name="myblueprint", context=context) + blueprint = MockBlueprint(name="test-blueprint", context=context) region = "us-east-1" endpoint = "https://example.com" @@ -295,8 +258,8 @@ def test_stack_template_url(self) -> None: autospec=True, return_value=endpoint, ): - self.assertEqual( - action.stack_template_url(blueprint), - f"{endpoint}/cfngin-{context.namespace}-{region}/stack_templates/" - f"{context.namespace}-{blueprint.name}/{blueprint.name}-{MOCK_VERSION}.json", + assert ( + action.stack_template_url(blueprint) + == f"{endpoint}/cfngin-{context.namespace}-{region}/stack_templates/" + f"{context.namespace}-{blueprint.name}/{blueprint.name}-{MOCK_VERSION}.json" ) diff --git a/tests/unit/cfngin/actions/test_deploy.py b/tests/unit/cfngin/actions/test_deploy.py index 801d96529..80383a03a 100644 --- a/tests/unit/cfngin/actions/test_deploy.py +++ b/tests/unit/cfngin/actions/test_deploy.py @@ -1,15 +1,14 @@ """Tests for runway.cfngin.actions.deploy.""" -# pylint: disable=unused-argument, protected-access -# pyright: basic from __future__ import annotations import unittest from collections import namedtuple -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from datetime import datetime +from typing import TYPE_CHECKING, Any, cast +from unittest.mock import MagicMock, PropertyMock, patch import pytest -from mock import MagicMock, PropertyMock, patch from runway.cfngin import exceptions from runway.cfngin.actions import deploy @@ -50,39 +49,35 @@ from runway.cfngin.status import Status -def mock_stack_parameters(parameters: Dict[str, Any]) -> StackTypeDef: +def mock_stack_parameters(parameters: dict[str, Any]) -> StackTypeDef: """Mock stack parameters.""" return { # type: ignore - "Parameters": [ - {"ParameterKey": k, "ParameterValue": v} for k, v in parameters.items() - ] + "Parameters": [{"ParameterKey": k, "ParameterValue": v} for k, v in parameters.items()] } class MockProvider(BaseProvider): """Mock provider.""" - _outputs: Dict[str, Dict[str, str]] + _outputs: dict[str, dict[str, str]] - def __init__( - self, *, outputs: Optional[Dict[str, Dict[str, str]]] = None, **_: Any - ) -> None: + def __init__(self, *, outputs: dict[str, dict[str, str]] | None = None, **_: Any) -> None: """Instantiate class.""" self._outputs = outputs or {} - def set_outputs(self, outputs: Dict[str, Dict[str, str]]) -> None: + def set_outputs(self, outputs: dict[str, dict[str, str]]) -> None: """Set outputs.""" self._outputs = outputs def get_stack( self, stack_name: str, *_args: Any, **_kwargs: Any - ) -> Dict[str, Union[Dict[str, str], str]]: + ) -> dict[str, dict[str, str] | str]: """Get stack.""" if stack_name not in self._outputs: raise exceptions.StackDoesNotExist(stack_name) return {"name": stack_name, "outputs": self._outputs[stack_name]} - def get_outputs(self, stack_name: str, *args: Any, **kwargs: Any) -> Dict[str, Any]: + def get_outputs(self, stack_name: str, *_args: Any, **_kwargs: Any) -> dict[str, Any]: """Get outputs.""" stack = self.get_stack(stack_name) return stack["outputs"] # type: ignore @@ -94,9 +89,9 @@ class MockStack: def __init__( self, name: str, - in_progress_behavior: Optional[str] = None, - tags: Any = None, - **_: Any, + in_progress_behavior: str | None = None, + *_args: Any, + **_kwargs: Any, ) -> None: """Instantiate class.""" self.name = name @@ -121,7 +116,7 @@ class TestAction: ) def test_upload_disabled( self, - bucket_name: Optional[str], + bucket_name: str | None, cfngin_context: CfnginContext, explicit: bool, expected: bool, @@ -156,16 +151,12 @@ def test_upload_disabled_setter_raise_cfngin_bucket_required( Action(cfngin_context).upload_disabled = False -class TestBuildAction( - unittest.TestCase -): # TODO: refactor tests into the TestAction class +class TestBuildAction(unittest.TestCase): # TODO (kyle): refactor tests into the TestAction class """Tests for runway.cfngin.actions.deploy.BuildAction.""" def setUp(self) -> None: """Run before tests.""" - self.context = CfnginContext( - config=CfnginConfig.parse_obj({"namespace": "namespace"}) - ) + self.context = CfnginContext(config=CfnginConfig.parse_obj({"namespace": "namespace"})) self.provider = MockProvider() self.deploy_action = deploy.Action( self.context, @@ -173,10 +164,10 @@ def setUp(self) -> None: ) def _get_context( - self, extra_config_args: Optional[Dict[str, Any]] = None, **kwargs: Any + self, extra_config_args: dict[str, Any] | None = None, **kwargs: Any ) -> CfnginContext: """Get context.""" - config: Dict[str, Any] = { + config: dict[str, Any] = { "namespace": "namespace", "stacks": [ {"name": "vpc", "template_path": "."}, @@ -217,56 +208,40 @@ def test_destroy_stack_delete_failed(self) -> None: status = self.deploy_action._destroy_stack( MockStack("vpc", in_progress_behavior="wait"), status=PENDING # type: ignore ) - provider.is_stack_being_destroyed.assert_called_once_with( - provider.get_stack.return_value - ) - provider.is_stack_destroyed.assert_called_once_with( - provider.get_stack.return_value - ) - provider.is_stack_in_progress.assert_called_once_with( - provider.get_stack.return_value - ) - provider.is_stack_destroy_possible.assert_called_once_with( - provider.get_stack.return_value - ) + provider.is_stack_being_destroyed.assert_called_once_with(provider.get_stack.return_value) + provider.is_stack_destroyed.assert_called_once_with(provider.get_stack.return_value) + provider.is_stack_in_progress.assert_called_once_with(provider.get_stack.return_value) + provider.is_stack_destroy_possible.assert_called_once_with(provider.get_stack.return_value) provider.get_delete_failed_status_reason.assert_called_once_with("vpc") - provider.get_stack_status_reason.assert_called_once_with( - provider.get_stack.return_value - ) + provider.get_stack_status_reason.assert_called_once_with(provider.get_stack.return_value) assert isinstance(status, FailedStatus) assert status.reason == "reason" - @patch( - "runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock - ) + @patch("runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock) def test_generate_plan_persist_destroy(self, mock_graph_tags: PropertyMock) -> None: """Test generate plan persist destroy.""" mock_graph_tags.return_value = {} - context = self._get_context( - extra_config_args={"persistent_graph_key": "test.json"} - ) - context._persistent_graph = Graph.from_steps( - [Step.from_stack_name("removed", context)] - ) + context = self._get_context(extra_config_args={"persistent_graph_key": "test.json"}) + context._persistent_graph = Graph.from_steps([Step.from_stack_name("removed", context)]) deploy_action = deploy.Action(context=context) plan = cast(Plan, deploy_action._Action__generate_plan()) # type: ignore - self.assertIsInstance(plan, Plan) - self.assertEqual(deploy.Action.DESCRIPTION, plan.description) + assert isinstance(plan, Plan) + assert plan.description == deploy.Action.DESCRIPTION mock_graph_tags.assert_called_once() # order is different between python2/3 so can't compare dicts result_graph_dict = plan.graph.to_dict() - self.assertEqual(5, len(result_graph_dict)) - self.assertEqual(set(), result_graph_dict["other"]) - self.assertEqual(set(), result_graph_dict["removed"]) - self.assertEqual(set(), result_graph_dict["vpc"]) - self.assertEqual({"vpc"}, result_graph_dict["bastion"]) - self.assertEqual({"bastion", "vpc"}, result_graph_dict["db"]) - self.assertEqual(deploy_action._destroy_stack, plan.graph.steps["removed"].fn) - self.assertEqual(deploy_action._launch_stack, plan.graph.steps["vpc"].fn) - self.assertEqual(deploy_action._launch_stack, plan.graph.steps["bastion"].fn) - self.assertEqual(deploy_action._launch_stack, plan.graph.steps["db"].fn) - self.assertEqual(deploy_action._launch_stack, plan.graph.steps["other"].fn) + assert len(result_graph_dict) == 5 + assert set() == result_graph_dict["other"] + assert set() == result_graph_dict["removed"] + assert set() == result_graph_dict["vpc"] + assert {"vpc"} == result_graph_dict["bastion"] + assert {"bastion", "vpc"} == result_graph_dict["db"] + assert deploy_action._destroy_stack == plan.graph.steps["removed"].fn + assert deploy_action._launch_stack == plan.graph.steps["vpc"].fn + assert deploy_action._launch_stack == plan.graph.steps["bastion"].fn + assert deploy_action._launch_stack == plan.graph.steps["db"].fn + assert deploy_action._launch_stack == plan.graph.steps["other"].fn def test_handle_missing_params(self) -> None: """Test handle missing params.""" @@ -282,17 +257,17 @@ def test_handle_missing_params(self) -> None: result = _handle_missing_parameters( parameter_values, all_params, required, existing_stack_params ) - self.assertEqual(sorted(result), sorted(expected_params.items())) + assert sorted(result) == sorted(expected_params.items()) def test_missing_params_no_existing_stack(self) -> None: """Test missing params no existing stack.""" all_params = ["Address", "StackName"] required = ["Address"] - parameter_values: Dict[str, Any] = {} - with self.assertRaises(exceptions.MissingParameterException) as result: + parameter_values: dict[str, Any] = {} + with pytest.raises(exceptions.MissingParameterException) as result: _handle_missing_parameters(parameter_values, all_params, required) - self.assertEqual(result.exception.parameters, required) + assert result.value.parameters == required def test_existing_stack_params_does_not_override_given_params(self) -> None: """Test existing stack params does not override given params.""" @@ -304,22 +279,19 @@ def test_existing_stack_params_does_not_override_given_params(self) -> None: result = _handle_missing_parameters( parameter_values, all_params, required, existing_stack_params ) - self.assertEqual(sorted(result), sorted(parameter_values.items())) + assert sorted(result) == sorted(parameter_values.items()) def test_generate_plan(self) -> None: """Test generate plan.""" context = self._get_context() deploy_action = deploy.Action(context, cancel=MockThreadingEvent()) # type: ignore plan = cast(Plan, deploy_action._Action__generate_plan()) # type: ignore - self.assertEqual( - { - "db": {"bastion", "vpc"}, - "bastion": {"vpc"}, - "other": set(), - "vpc": set(), - }, - plan.graph.to_dict(), - ) + assert plan.graph.to_dict() == { + "db": {"bastion", "vpc"}, + "bastion": {"vpc"}, + "other": set(), + "vpc": set(), + } def test_does_not_execute_plan_when_outline_specified(self) -> None: """Test does not execute plan when outline specified.""" @@ -327,7 +299,7 @@ def test_does_not_execute_plan_when_outline_specified(self) -> None: deploy_action = deploy.Action(context, cancel=MockThreadingEvent()) # type: ignore with patch.object(deploy_action, "_generate_plan") as mock_generate_plan: deploy_action.run(outline=True) - self.assertEqual(mock_generate_plan().execute.call_count, 0) + assert mock_generate_plan().execute.call_count == 0 def test_execute_plan_when_outline_not_specified(self) -> None: """Test execute plan when outline not specified.""" @@ -335,15 +307,11 @@ def test_execute_plan_when_outline_not_specified(self) -> None: deploy_action = deploy.Action(context, cancel=MockThreadingEvent()) # type: ignore with patch.object(deploy_action, "_generate_plan") as mock_generate_plan: deploy_action.run(outline=False) - self.assertEqual(mock_generate_plan().execute.call_count, 1) + assert mock_generate_plan().execute.call_count == 1 - @patch( - "runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock - ) + @patch("runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock) @patch("runway.context.CfnginContext.lock_persistent_graph", new_callable=MagicMock) - @patch( - "runway.context.CfnginContext.unlock_persistent_graph", new_callable=MagicMock - ) + @patch("runway.context.CfnginContext.unlock_persistent_graph", new_callable=MagicMock) @patch("runway.cfngin.plan.Plan.execute", new_callable=MagicMock) def test_run_persist( self, @@ -354,12 +322,8 @@ def test_run_persist( ) -> None: """Test run persist.""" mock_graph_tags.return_value = {} - context = self._get_context( - extra_config_args={"persistent_graph_key": "test.json"} - ) - context._persistent_graph = Graph.from_steps( - [Step.from_stack_name("removed", context)] - ) + context = self._get_context(extra_config_args={"persistent_graph_key": "test.json"}) + context._persistent_graph = Graph.from_steps([Step.from_stack_name("removed", context)]) deploy_action = deploy.Action(context=context) deploy_action.run() @@ -382,7 +346,7 @@ def test_should_update(self) -> None: for test in test_scenarios: mock_stack.locked = test.locked mock_stack.force = test.force - self.assertEqual(deploy.should_update(mock_stack), test.result) # type: ignore + assert deploy.should_update(mock_stack) == test.result # type: ignore def test_should_ensure_cfn_bucket(self) -> None: """Test should ensure cfn bucket.""" @@ -399,9 +363,7 @@ def test_should_ensure_cfn_bucket(self) -> None: dump = scenario["dump"] result = scenario["result"] try: - self.assertEqual( - deploy.should_ensure_cfn_bucket(outline, dump), result # type: ignore - ) + assert deploy.should_ensure_cfn_bucket(outline, dump) == result # type: ignore except AssertionError as err: err.args += ("scenario", str(scenario)) raise @@ -418,10 +380,10 @@ def test_should_submit(self) -> None: mock_stack.name = "test-stack" for test in test_scenarios: mock_stack.enabled = test.enabled - self.assertEqual(deploy.should_submit(mock_stack), test.result) # type: ignore + assert deploy.should_submit(mock_stack) == test.result # type: ignore -class TestLaunchStack(TestBuildAction): # TODO: refactor tests to be pytest tests +class TestLaunchStack(TestBuildAction): # TODO (kyle): refactor tests to be pytest tests """Tests for runway.cfngin.actions.deploy.BuildAction launch stack.""" def setUp(self) -> None: @@ -453,7 +415,7 @@ def patch_object(*args: Any, **kwargs: Any) -> None: self.addCleanup(mock_object.stop) mock_object.start() - def get_stack(name: str, *_args: Any, **_kwargs: Any) -> Dict[str, Any]: + def get_stack(name: str, *_args: Any, **_kwargs: Any) -> dict[str, Any]: if name != self.stack.name or not self.stack_status: raise StackDoesNotExist(name) @@ -464,11 +426,12 @@ def get_stack(name: str, *_args: Any, **_kwargs: Any) -> Dict[str, Any]: "Tags": [], } - def get_events(name: str, *_args: Any, **_kwargs: Any) -> List[Dict[str, str]]: + def get_events(*_args: Any, **_kwargs: Any) -> list[dict[str, Any]]: return [ { "ResourceStatus": "ROLLBACK_IN_PROGRESS", "ResourceStatusReason": "CFN fail", + "Timestamp": datetime(2015, 1, 1), } ] @@ -482,19 +445,19 @@ def get_events(name: str, *_args: Any, **_kwargs: Any) -> List[Dict[str, str]]: def _advance( self, - new_provider_status: Optional[str], - expected_status: Optional[Status], + new_provider_status: str | None, + expected_status: Status | None, expected_reason: str, ) -> None: """Advance.""" self.stack_status = new_provider_status status = self.step._run_once() - self.assertEqual(status, expected_status) - self.assertEqual(status.reason, expected_reason) + assert status == expected_status + assert status.reason == expected_reason def test_launch_stack_disabled(self) -> None: """Test launch stack disabled.""" - self.assertEqual(self.step.status, PENDING) + assert self.step.status == PENDING self.stack.enabled = False self._advance(None, NotSubmittedStatus(), "disabled") @@ -502,7 +465,7 @@ def test_launch_stack_disabled(self) -> None: def test_launch_stack_create(self) -> None: """Test launch stack create.""" # initial status should be PENDING - self.assertEqual(self.step.status, PENDING) + assert self.step.status == PENDING # initial run should return SUBMITTED since we've passed off to CF self._advance(None, SUBMITTED, "creating new stack") @@ -516,7 +479,7 @@ def test_launch_stack_create(self) -> None: def test_launch_stack_create_rollback(self) -> None: """Test launch stack create rollback.""" # initial status should be PENDING - self.assertEqual(self.step.status, PENDING) + assert self.step.status == PENDING # initial run should return SUBMITTED since we've passed off to CF self._advance(None, SUBMITTED, "creating new stack") @@ -535,21 +498,16 @@ def test_launch_stack_create_rollback(self) -> None: def test_launch_stack_recreate(self) -> None: """Test launch stack recreate.""" - # pylint: disable=attribute-defined-outside-init self.provider.recreate_failed = True # initial status should be PENDING - self.assertEqual(self.step.status, PENDING) + assert self.step.status == PENDING # first action with an existing failed stack should be deleting it - self._advance( - "ROLLBACK_COMPLETE", SUBMITTED, "destroying stack for re-creation" - ) + self._advance("ROLLBACK_COMPLETE", SUBMITTED, "destroying stack for re-creation") # status should stay as submitted during deletion - self._advance( - "DELETE_IN_PROGRESS", SUBMITTED, "destroying stack for re-creation" - ) + self._advance("DELETE_IN_PROGRESS", SUBMITTED, "destroying stack for re-creation") # deletion being complete must trigger re-creation self._advance("DELETE_COMPLETE", SUBMITTED, "re-creating stack") @@ -563,7 +521,7 @@ def test_launch_stack_recreate(self) -> None: def test_launch_stack_update_skipped(self) -> None: """Test launch stack update skipped.""" # initial status should be PENDING - self.assertEqual(self.step.status, PENDING) + assert self.step.status == PENDING # start the upgrade, that will be skipped self.provider.update_stack.side_effect = StackDidNotChange # type: ignore @@ -572,7 +530,7 @@ def test_launch_stack_update_skipped(self) -> None: def test_launch_stack_update_rollback(self) -> None: """Test launch stack update rollback.""" # initial status should be PENDING - self.assertEqual(self.step.status, PENDING) + assert self.step.status == PENDING # initial run should return SUBMITTED since we've passed off to CF self._advance("CREATE_COMPLETE", SUBMITTED, "updating existing stack") @@ -589,7 +547,7 @@ def test_launch_stack_update_rollback(self) -> None: def test_launch_stack_update_success(self) -> None: """Test launch stack update success.""" # initial status should be PENDING - self.assertEqual(self.step.status, PENDING) + assert self.step.status == PENDING # initial run should return SUBMITTED since we've passed off to CF self._advance("CREATE_COMPLETE", SUBMITTED, "updating existing stack") @@ -601,7 +559,7 @@ def test_launch_stack_update_success(self) -> None: self._advance("UPDATE_COMPLETE", COMPLETE, "updating existing stack") -class TestFunctions(unittest.TestCase): # TODO: refactor tests to be pytest tests +class TestFunctions(unittest.TestCase): # TODO (kyle): refactor tests to be pytest tests """Tests for runway.cfngin.actions.deploy module level functions.""" def setUp(self) -> None: @@ -618,8 +576,8 @@ def test_resolve_parameters_unused_parameter(self) -> None: } params = {"a": "Apple", "c": "Carrot"} resolved_params = _resolve_parameters(params, self.blueprint) - self.assertNotIn("c", resolved_params) - self.assertIn("a", resolved_params) + assert "c" not in resolved_params + assert "a" in resolved_params def test_resolve_parameters_none_conversion(self) -> None: """Test resolve parameters none conversion.""" @@ -629,7 +587,7 @@ def test_resolve_parameters_none_conversion(self) -> None: } params = {"a": None, "c": "Carrot"} resolved_params = _resolve_parameters(params, self.blueprint) - self.assertNotIn("a", resolved_params) + assert "a" not in resolved_params def test_resolve_parameters_booleans(self) -> None: """Test resolve parameters booleans.""" @@ -639,5 +597,5 @@ def test_resolve_parameters_booleans(self) -> None: } params = {"a": True, "b": False} resolved_params = _resolve_parameters(params, self.blueprint) - self.assertEqual("true", resolved_params["a"]) - self.assertEqual("false", resolved_params["b"]) + assert resolved_params["a"] == "true" + assert resolved_params["b"] == "false" diff --git a/tests/unit/cfngin/actions/test_destroy.py b/tests/unit/cfngin/actions/test_destroy.py index 9b743b2d1..48e5abeba 100644 --- a/tests/unit/cfngin/actions/test_destroy.py +++ b/tests/unit/cfngin/actions/test_destroy.py @@ -1,13 +1,10 @@ """Tests for runway.cfngin.actions.destroy.""" -# pylint: disable=protected-access,unused-argument -# pyright: basic from __future__ import annotations import unittest -from typing import Any, Dict, Optional - -from mock import MagicMock, PropertyMock, patch +from typing import Any +from unittest.mock import MagicMock, PropertyMock, patch from runway.cfngin.actions import destroy from runway.cfngin.exceptions import StackDoesNotExist @@ -22,7 +19,7 @@ class MockStack: """Mock our local CFNgin stack and an AWS provider stack.""" - def __init__(self, name: str, tags: Any = None, **_: Any) -> None: + def __init__(self, name: str, *_args: Any, **_kwargs: Any) -> None: """Instantiate class.""" self.name = name self.fqn = name @@ -34,13 +31,13 @@ def __init__(self, name: str, tags: Any = None, **_: Any) -> None: class TestDestroyAction(unittest.TestCase): """Tests for runway.cfngin.actions.destroy.DestroyAction.""" - def setUp(self): + def setUp(self) -> None: """Run before tests.""" self.context = self._get_context() self.action = destroy.Action(self.context, cancel=MockThreadingEvent()) # type: ignore def _get_context( - self, extra_config_args: Optional[Dict[str, Any]] = None, **kwargs: Any + self, extra_config_args: dict[str, Any] | None = None, **kwargs: Any ) -> CfnginContext: """Get context.""" config = { @@ -68,28 +65,25 @@ def _get_context( def test_generate_plan(self) -> None: """Test generate plan.""" plan = self.action._generate_plan(reverse=True) - self.assertEqual( - { - "vpc": {"db", "instance", "bastion"}, - "other": set(), - "bastion": {"instance", "db"}, - "instance": {"db"}, - "db": {"other"}, - }, - plan.graph.to_dict(), - ) + assert plan.graph.to_dict() == { + "vpc": {"db", "instance", "bastion"}, + "other": set(), + "bastion": {"instance", "db"}, + "instance": {"db"}, + "db": {"other"}, + } def test_only_execute_plan_when_forced(self) -> None: """Test only execute plan when forced.""" with patch.object(self.action, "_generate_plan") as mock_generate_plan: self.action.run(force=False) - self.assertEqual(mock_generate_plan().execute.call_count, 0) + assert mock_generate_plan().execute.call_count == 0 def test_execute_plan_when_forced(self) -> None: """Test execute plan when forced.""" with patch.object(self.action, "_generate_plan") as mock_generate_plan: self.action.run(force=True) - self.assertEqual(mock_generate_plan().execute.call_count, 1) + assert mock_generate_plan().execute.call_count == 1 def test_destroy_stack_complete_if_state_submitted(self) -> None: """Test destroy stack complete if state submitted.""" @@ -101,11 +95,11 @@ def test_destroy_stack_complete_if_state_submitted(self) -> None: status = self.action._destroy_stack(MockStack("vpc"), status=PENDING) # type: ignore # if we haven't processed the step (ie. has never been SUBMITTED, # should be skipped) - self.assertEqual(status, SKIPPED) + assert status == SKIPPED status = self.action._destroy_stack(MockStack("vpc"), status=SUBMITTED) # type: ignore # if we have processed the step and then can't find the stack, it means # we successfully deleted it - self.assertEqual(status, COMPLETE) + assert status == COMPLETE def test_destroy_stack_delete_failed(self) -> None: """Test _destroy_stack DELETE_FAILED.""" @@ -121,19 +115,11 @@ def test_destroy_stack_delete_failed(self) -> None: provider.get_stack_status_reason.return_value = "reason" self.action.provider_builder = MockProviderBuilder(provider=provider) status = self.action._destroy_stack(MockStack("vpc"), status=PENDING) # type: ignore - provider.is_stack_destroyed.assert_called_once_with( - provider.get_stack.return_value - ) - provider.is_stack_in_progress.assert_called_once_with( - provider.get_stack.return_value - ) - provider.is_stack_destroy_possible.assert_called_once_with( - provider.get_stack.return_value - ) + provider.is_stack_destroyed.assert_called_once_with(provider.get_stack.return_value) + provider.is_stack_in_progress.assert_called_once_with(provider.get_stack.return_value) + provider.is_stack_destroy_possible.assert_called_once_with(provider.get_stack.return_value) provider.get_delete_failed_status_reason.assert_called_once_with("vpc") - provider.get_stack_status_reason.assert_called_once_with( - provider.get_stack.return_value - ) + provider.get_stack_status_reason.assert_called_once_with(provider.get_stack.return_value) assert isinstance(status, FailedStatus) assert status.reason == "reason" @@ -156,7 +142,7 @@ def get_stack(stack_name: Any) -> Any: mock_provider.get_stack.side_effect = StackDoesNotExist("mock") step.run() - self.assertEqual(step.status, SKIPPED) + assert step.status == SKIPPED # simulate stack getting successfully deleted mock_provider.get_stack.side_effect = get_stack @@ -164,25 +150,21 @@ def get_stack(stack_name: Any) -> Any: mock_provider.is_stack_in_progress.return_value = False step._run_once() - self.assertEqual(step.status, SUBMITTED) + assert step.status == SUBMITTED mock_provider.is_stack_destroyed.return_value = False mock_provider.is_stack_in_progress.return_value = True step._run_once() - self.assertEqual(step.status, SUBMITTED) + assert step.status == SUBMITTED mock_provider.is_stack_destroyed.return_value = True mock_provider.is_stack_in_progress.return_value = False step._run_once() - self.assertEqual(step.status, COMPLETE) + assert step.status == COMPLETE - @patch( - "runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock - ) + @patch("runway.context.CfnginContext.persistent_graph_tags", new_callable=PropertyMock) @patch("runway.context.CfnginContext.lock_persistent_graph", new_callable=MagicMock) - @patch( - "runway.context.CfnginContext.unlock_persistent_graph", new_callable=MagicMock - ) + @patch("runway.context.CfnginContext.unlock_persistent_graph", new_callable=MagicMock) @patch("runway.cfngin.plan.Plan.execute", new_callable=MagicMock) def test_run_persist( self, @@ -193,12 +175,8 @@ def test_run_persist( ) -> None: """Test run persist.""" mock_graph_tags.return_value = {} - context = self._get_context( - extra_config_args={"persistent_graph_key": "test.json"} - ) - context._persistent_graph = Graph.from_steps( - [Step.from_stack_name("removed", context)] - ) + context = self._get_context(extra_config_args={"persistent_graph_key": "test.json"}) + context._persistent_graph = Graph.from_steps([Step.from_stack_name("removed", context)]) destroy_action = destroy.Action(context=context) destroy_action.run(force=True) diff --git a/tests/unit/cfngin/actions/test_diff.py b/tests/unit/cfngin/actions/test_diff.py index 84906f175..5c217471e 100644 --- a/tests/unit/cfngin/actions/test_diff.py +++ b/tests/unit/cfngin/actions/test_diff.py @@ -1,17 +1,15 @@ """Tests for runway.cfngin.actions.diff.""" -# pylint: disable=protected-access -# pyright: basic from __future__ import annotations import logging import unittest from operator import attrgetter -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING +from unittest.mock import MagicMock, Mock, patch import pytest from botocore.exceptions import ClientError -from mock import MagicMock, Mock, patch from runway.cfngin.actions.diff import ( Action, @@ -26,10 +24,9 @@ from ..factories import MockProviderBuilder, MockThreadingEvent if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture - from ...factories import MockCFNginContext + from ...factories import MockCfnginContext MODULE = "runway.cfngin.actions.diff" @@ -50,11 +47,11 @@ class TestAction: def test_pre_run( self, mock_bucket_init: MagicMock, - caplog: LogCaptureFixture, - bucket_name: Optional[str], + caplog: pytest.LogCaptureFixture, + bucket_name: str | None, forbidden: bool, not_found: bool, - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, ) -> None: """Test pre_run.""" caplog.set_level(logging.DEBUG, logger=MODULE) @@ -71,9 +68,7 @@ def test_pre_run( with pytest.raises(SystemExit) as excinfo: action.pre_run() assert excinfo.value.code == 1 - assert ( - f"access denied for CFNgin bucket: {bucket_name}" - ) in caplog.messages + assert (f"access denied for CFNgin bucket: {bucket_name}") in caplog.messages return action.pre_run() @@ -88,8 +83,8 @@ def test_pre_run( @pytest.mark.parametrize("stack_not_exist", [False, True]) def test__diff_stack_validationerror_template_too_large( self, - caplog: LogCaptureFixture, - cfngin_context: MockCFNginContext, + caplog: pytest.LogCaptureFixture, + cfngin_context: MockCfnginContext, mocker: MockerFixture, provider_get_stack: MagicMock, stack_not_exist: bool, @@ -100,9 +95,7 @@ def test__diff_stack_validationerror_template_too_large( cfngin_context.add_stubber("cloudformation") cfngin_context.config.cfngin_bucket = "" expected = SkippedStatus("cfngin_bucket: existing bucket required") - mock_build_parameters = mocker.patch.object( - Action, "build_parameters", return_value=[] - ) + mock_build_parameters = mocker.patch.object(Action, "build_parameters", return_value=[]) mock_get_stack_changes = mocker.patch.object( Provider, "get_stack_changes", @@ -147,35 +140,28 @@ class TestDictValueFormat(unittest.TestCase): def test_status(self) -> None: """Test status.""" added = DictValue("k0", None, "value_0") - self.assertEqual(added.status(), DictValue.ADDED) + assert added.status() == DictValue.ADDED removed = DictValue("k1", "value_1", None) - self.assertEqual(removed.status(), DictValue.REMOVED) + assert removed.status() == DictValue.REMOVED modified = DictValue("k2", "value_1", "value_2") - self.assertEqual(modified.status(), DictValue.MODIFIED) + assert modified.status() == DictValue.MODIFIED unmodified = DictValue("k3", "value_1", "value_1") - self.assertEqual(unmodified.status(), DictValue.UNMODIFIED) + assert unmodified.status() == DictValue.UNMODIFIED def test_format(self) -> None: """Test format.""" added = DictValue("k0", None, "value_0") - self.assertEqual(added.changes(), [f"+{added.key} = {added.new_value}"]) + assert added.changes() == [f"+{added.key} = {added.new_value}"] removed = DictValue("k1", "value_1", None) - self.assertEqual(removed.changes(), [f"-{removed.key} = {removed.old_value}"]) + assert removed.changes() == [f"-{removed.key} = {removed.old_value}"] modified = DictValue("k2", "value_1", "value_2") - self.assertEqual( - modified.changes(), - [ - f"-{modified.key} = {modified.old_value}", - f"+{modified.key} = {modified.new_value}", - ], - ) + assert modified.changes() == [ + f"-{modified.key} = {modified.old_value}", + f"+{modified.key} = {modified.new_value}", + ] unmodified = DictValue("k3", "value_1", "value_1") - self.assertEqual( - unmodified.changes(), [f" {unmodified.key} = {unmodified.old_value}"] - ) - self.assertEqual( - unmodified.changes(), [f" {unmodified.key} = {unmodified.new_value}"] - ) + assert unmodified.changes() == [f" {unmodified.key} = {unmodified.old_value}"] + assert unmodified.changes() == [f" {unmodified.key} = {unmodified.new_value}"] class TestDiffDictionary(unittest.TestCase): @@ -195,7 +181,7 @@ def test_diff_dictionaries(self) -> None: } count, changes = diff_dictionaries(old_dict, new_dict) - self.assertEqual(count, 3) + assert count == 3 expected_output = [ DictValue("a", "Apple", "Apple"), DictValue("b", "Banana", "Bob"), @@ -207,10 +193,10 @@ def test_diff_dictionaries(self) -> None: # compare all the outputs to the expected change for expected_change in expected_output: change = changes.pop(0) - self.assertEqual(change, expected_change) + assert change == expected_change # No extra output - self.assertEqual(len(changes), 0) + assert len(changes) == 0 class TestDiffParameters(unittest.TestCase): @@ -222,4 +208,4 @@ def test_diff_parameters_no_changes(self) -> None: new_params = {"a": "Apple"} param_diffs = diff_parameters(old_params, new_params) - self.assertEqual(param_diffs, []) + assert param_diffs == [] diff --git a/tests/unit/cfngin/actions/test_init.py b/tests/unit/cfngin/actions/test_init.py index 51411cec7..117663ddc 100644 --- a/tests/unit/cfngin/actions/test_init.py +++ b/tests/unit/cfngin/actions/test_init.py @@ -3,9 +3,9 @@ from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import Mock import pytest -from mock import Mock from runway._logging import LogLevels from runway.cfngin.actions.init import Action @@ -14,7 +14,6 @@ from runway.core.providers.aws.s3 import Bucket if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from runway.context import CfnginContext @@ -25,9 +24,7 @@ class TestAction: """Test Action.""" - def test___init__( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test___init__(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test __init__.""" copied_context = mocker.patch.object(cfngin_context, "copy") obj = Action(cfngin_context) @@ -36,25 +33,18 @@ def test___init__( def test__stack_action(self, cfngin_context: CfnginContext) -> None: """Test _stack_action.""" - # pylint: disable=protected-access assert Action(cfngin_context)._stack_action is None - def test_cfngin_bucket( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_cfngin_bucket(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test cfngin_bucket.""" mocker.patch.object(cfngin_context, "copy", return_value=cfngin_context) mocker.patch.object(cfngin_context, "s3_client") bucket = mocker.patch(f"{MODULE}.Bucket") bucket_name = mocker.patch.object(cfngin_context, "bucket_name", "bucket_name") - bucket_region = mocker.patch.object( - cfngin_context, "bucket_region", "bucket_region" - ) + bucket_region = mocker.patch.object(cfngin_context, "bucket_region", "bucket_region") obj = Action(cfngin_context) assert obj.cfngin_bucket == bucket.return_value - bucket.assert_called_once_with( - cfngin_context, name=bucket_name, region=bucket_region - ) + bucket.assert_called_once_with(cfngin_context, name=bucket_name, region=bucket_region) def test_cfngin_bucket_handle_no_bucket( self, cfngin_context: CfnginContext, mocker: MockerFixture @@ -74,9 +64,7 @@ def test_default_cfngin_bucket_stack( """Test default_cfngin_bucket_stack.""" mocker.patch.object(cfngin_context, "copy", return_value=cfngin_context) bucket_name = mocker.patch.object(cfngin_context, "bucket_name", "bucket_name") - assert Action( - cfngin_context - ).default_cfngin_bucket_stack == CfnginStackDefinitionModel( + assert Action(cfngin_context).default_cfngin_bucket_stack == CfnginStackDefinitionModel( class_path="runway.cfngin.blueprints.cfngin_bucket.CfnginBucket", in_progress_behavior="wait", name="cfngin-bucket", @@ -86,7 +74,7 @@ def test_default_cfngin_bucket_stack( def test_run( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, mocker: MockerFixture, ) -> None: @@ -115,7 +103,7 @@ def test_run( def test_run_cfngin_bucket_region( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, mocker: MockerFixture, ) -> None: @@ -147,7 +135,7 @@ def test_run_cfngin_bucket_region( def test_run_exists( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, mocker: MockerFixture, ) -> None: @@ -162,9 +150,7 @@ def test_run_exists( assert not Action(cfngin_context).run() assert f"cfngin_bucket {cfngin_bucket.name} already exists" in caplog.messages - def test_run_forbidden( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_run_forbidden(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test run.""" cfngin_bucket = mocker.patch.object( Action, @@ -177,7 +163,7 @@ def test_run_forbidden( def test_run_get_stack( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, mocker: MockerFixture, ) -> None: @@ -193,9 +179,7 @@ def test_run_get_stack( ) assert not Action(cfngin_context, provider_builder, cancel).run() get_stack.assert_called_once_with("cfngin-bucket") - assert ( - "found stack for creating cfngin_bucket: cfngin-bucket" in caplog.messages - ) + assert "found stack for creating cfngin_bucket: cfngin-bucket" in caplog.messages assert cfngin_context.stack_names == ["cfngin-bucket"] mock_deploy.Action.assert_called_once_with( context=cfngin_context, provider_builder=provider_builder, cancel=cancel @@ -208,7 +192,7 @@ def test_run_get_stack( def test_run_no_cfngin_bucket( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, mocker: MockerFixture, ) -> None: diff --git a/tests/unit/cfngin/blueprints/test_base.py b/tests/unit/cfngin/blueprints/test_base.py index a80d73602..d40545aac 100644 --- a/tests/unit/cfngin/blueprints/test_base.py +++ b/tests/unit/cfngin/blueprints/test_base.py @@ -3,10 +3,10 @@ from __future__ import annotations import json -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Union +from typing import TYPE_CHECKING, Any, ClassVar +from unittest.mock import Mock import pytest -from mock import Mock from troposphere import Parameter, Ref, s3, sns from runway.cfngin.blueprints.base import ( @@ -46,14 +46,14 @@ class SampleBlueprint(Blueprint): """Sample Blueprint to use for testing.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "Var0": {"type": CFNString, "default": "test"}, "Var1": {"type": str, "default": ""}, } def create_template(self) -> None: """Create template.""" - return None + return def resolve_troposphere_var(tpe: Any, value: Any, **kwargs: Any) -> Any: @@ -83,9 +83,7 @@ def create_template(self) -> None: blueprint = _Blueprint(name="test", context=cfngin_context) blueprint.render_template() - assert ( - blueprint.template.outputs[output_name].properties["Value"] == output_value - ) + assert blueprint.template.outputs[output_name].properties["Value"] == output_value def test_cfn_parameters(self, cfngin_context: CfnginContext) -> None: """Test cfn_parameters.""" @@ -107,20 +105,14 @@ def test_defined_variables(self, cfngin_context: CfnginContext) -> None: def test_description(self, cfngin_context: CfnginContext) -> None: """Test description.""" description = "my blueprint description" - obj = SampleBlueprint( - name="test", context=cfngin_context, description=description - ) + obj = SampleBlueprint(name="test", context=cfngin_context, description=description) assert obj.description == description obj.render_template() assert obj.template.description == description - def test_get_cfn_parameters( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_get_cfn_parameters(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test get_cfn_parameters.""" - mock_cfn_parameters = mocker.patch.object( - Blueprint, "cfn_parameters", "success" - ) + mock_cfn_parameters = mocker.patch.object(Blueprint, "cfn_parameters", "success") assert ( Blueprint(name="test", context=cfngin_context).get_cfn_parameters() == mock_cfn_parameters @@ -130,9 +122,7 @@ def test_get_output_definitions( self, cfngin_context: CfnginContext, mocker: MockerFixture ) -> None: """Test get_output_definitions.""" - mock_output_definitions = mocker.patch.object( - Blueprint, "output_definitions", "success" - ) + mock_output_definitions = mocker.patch.object(Blueprint, "output_definitions", "success") assert ( Blueprint(name="test", context=cfngin_context).get_output_definitions() == mock_output_definitions @@ -154,9 +144,7 @@ def test_get_parameter_values( self, cfngin_context: CfnginContext, mocker: MockerFixture ) -> None: """Test get_parameter_values.""" - mock_parameter_values = mocker.patch.object( - Blueprint, "parameter_values", "success" - ) + mock_parameter_values = mocker.patch.object(Blueprint, "parameter_values", "success") assert ( Blueprint(name="test", context=cfngin_context).get_parameter_values() == mock_parameter_values @@ -170,31 +158,24 @@ def test_get_required_parameter_definitions( Blueprint, "required_parameter_definitions", "success" ) assert ( - Blueprint( - name="test", context=cfngin_context - ).get_required_parameter_definitions() + Blueprint(name="test", context=cfngin_context).get_required_parameter_definitions() == mock_required_parameter_definitions ) - def test_get_variables( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_get_variables(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test get_variables.""" mock_variables = mocker.patch.object(Blueprint, "variables", "success") - assert ( - Blueprint(name="test", context=cfngin_context).get_variables() - == mock_variables - ) + assert Blueprint(name="test", context=cfngin_context).get_variables() == mock_variables def test_init_raise_attribute_error(self, cfngin_context: CfnginContext) -> None: """Test __init__.""" class _Blueprint(Blueprint): - PARAMETERS: ClassVar[Dict[str, BlueprintVariableTypeDef]] = {} + PARAMETERS: ClassVar[dict[str, BlueprintVariableTypeDef]] = {} def create_template(self) -> None: """Create template.""" - return None + return with pytest.raises(AttributeError): _Blueprint("test", cfngin_context) @@ -208,9 +189,9 @@ def test_output_definitions(self, cfngin_context: CfnginContext) -> None: def test_parameter_definitions(self, cfngin_context: CfnginContext) -> None: """Test parameter_definitions.""" - assert SampleBlueprint( - name="test", context=cfngin_context - ).parameter_definitions == {"Var0": {"type": "String", "default": "test"}} + assert SampleBlueprint(name="test", context=cfngin_context).parameter_definitions == { + "Var0": {"type": "String", "default": "test"} + } def test_parameter_values(self, cfngin_context: CfnginContext) -> None: """Test parameter_values.""" @@ -218,16 +199,12 @@ def test_parameter_values(self, cfngin_context: CfnginContext) -> None: obj.resolve_variables([]) assert obj.parameter_values == {"Var0": "test"} - def test_read_user_data( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_read_user_data(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test read_user_data.""" mock_read_value_from_path = mocker.patch( f"{MODULE}.read_value_from_path", return_value="something" ) - mock_parse_user_data = mocker.patch( - f"{MODULE}.parse_user_data", return_value="success" - ) + mock_parse_user_data = mocker.patch(f"{MODULE}.parse_user_data", return_value="success") obj = SampleBlueprint(name="test", context=cfngin_context) obj.resolve_variables([]) assert obj.read_user_data("path") == mock_parse_user_data.return_value @@ -236,9 +213,7 @@ def test_read_user_data( obj.variables, mock_read_value_from_path.return_value, obj.name ) - def test_rendered( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_rendered(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test rendered.""" mock_render_template = mocker.patch.object( SampleBlueprint, "render_template", return_value=("version", "render") @@ -247,32 +222,25 @@ def test_rendered( assert obj.rendered == "render" mock_render_template.assert_called_once_with() - def test_required_parameter_definitions( - self, cfngin_context: CfnginContext - ) -> None: + def test_required_parameter_definitions(self, cfngin_context: CfnginContext) -> None: """Test required_parameter_definitions.""" class _Blueprint(SampleBlueprint): - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "Var0": {"type": CFNString}, "Var1": {"type": str, "default": ""}, } - assert _Blueprint( - name="test", context=cfngin_context - ).required_parameter_definitions == {"Var0": {"type": "String"}} + assert _Blueprint(name="test", context=cfngin_context).required_parameter_definitions == { + "Var0": {"type": "String"} + } - def test_required_parameter_definitions_none( - self, cfngin_context: CfnginContext - ) -> None: + def test_required_parameter_definitions_none(self, cfngin_context: CfnginContext) -> None: """Test required_parameter_definitions.""" - assert SampleBlueprint( - name="test", context=cfngin_context - ).required_parameter_definitions + assert SampleBlueprint(name="test", context=cfngin_context).required_parameter_definitions def test_reset_template(self, cfngin_context: CfnginContext) -> None: """Test reset_template.""" - # pylint: disable=protected-access obj = SampleBlueprint(name="test", context=cfngin_context) obj._rendered = "true" obj._version = "test" @@ -289,9 +257,7 @@ def test_requires_change_set(self, cfngin_context: CfnginContext) -> None: obj.template.transform = "something" # type: ignore assert obj.requires_change_set - def test_setup_parameters( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_setup_parameters(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test setup_parameters.""" template = Mock() mocker.patch(f"{MODULE}.build_parameter", return_value="params") @@ -303,7 +269,7 @@ def test_to_json(self, cfngin_context: CfnginContext) -> None: """Test to_json.""" class _Blueprint(Blueprint): - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "Param1": {"default": "default", "type": CFNString}, "Param2": {"type": CFNNumber}, "Param3": {"type": CFNCommaDelimitedList}, @@ -316,9 +282,7 @@ def create_template(self) -> None: self.template.set_version("2010-09-09") self.template.set_description("TestBlueprint") - result = _Blueprint("test", context=cfngin_context).to_json( - {"Param3": "something"} - ) + result = _Blueprint("test", context=cfngin_context).to_json({"Param3": "something"}) assert isinstance(result, str) assert json.loads(result) == { "AWSTemplateFormatVersion": "2010-09-09", @@ -343,9 +307,7 @@ def test_variables(self, cfngin_context: CfnginContext) -> None: """Test variables.""" class _Blueprint(Blueprint): - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { - "Var0": {"type": str} - } + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = {"Var0": {"type": str}} def create_template(self) -> None: """Create template.""" @@ -358,9 +320,7 @@ def create_template(self) -> None: obj.variables = {"key": "val"} assert obj.variables == {"key": "val"} - def test_version( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_version(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test version.""" mock_render_template = mocker.patch.object( SampleBlueprint, "render_template", return_value=("version", "render") @@ -399,7 +359,7 @@ def test_to_parameter_value(self) -> None: (1, "1"), ], ) - def test_value(self, expected: Union[List[str], str], provided: Any) -> None: + def test_value(self, expected: list[str] | str, provided: Any) -> None: """Test value.""" assert CFNParameter("myParameter", provided).value == expected @@ -447,14 +407,10 @@ def test_resolve_variable_allowed_values() -> None: """Test resolve_variable.""" var_name = "testVar" var_def: BlueprintVariableTypeDef = {"type": str, "allowed_values": ["allowed"]} - with pytest.raises(ValueError): - resolve_variable( - var_name, var_def, Variable(var_name, "not_allowed", "cfngin"), "test" - ) + with pytest.raises(ValueError): # noqa: PT011 + resolve_variable(var_name, var_def, Variable(var_name, "not_allowed", "cfngin"), "test") assert ( - resolve_variable( - var_name, var_def, Variable(var_name, "allowed", "cfngin"), "test" - ) + resolve_variable(var_name, var_def, Variable(var_name, "allowed", "cfngin"), "test") == "allowed" ) @@ -484,9 +440,7 @@ def test_resolve_variable_provided_not_resolved(mocker: MockerFixture) -> None: """Test resolve_variable.""" mocker.patch("runway.variables.CFNGIN_LOOKUP_HANDLERS", {"mock": Mock()}) with pytest.raises(UnresolvedBlueprintVariable): - resolve_variable( - "name", {"type": str}, Variable("name", "${mock abc}", "cfngin"), "test" - ) + resolve_variable("name", {"type": str}, Variable("name", "${mock abc}", "cfngin"), "test") def test_resolve_variable_troposphere_fail() -> None: @@ -654,14 +608,9 @@ def test_validate_variable_type_python_raise_type_error() -> None: def test_validate_variable_type_troposphere(mocker: MockerFixture) -> None: """Test validate_variable_type.""" - mock_create = mocker.patch.object( - TroposphereType, "create", side_effect=["success", Exception] - ) + mock_create = mocker.patch.object(TroposphereType, "create", side_effect=["success", Exception]) value = {"Endpoint": "test", "Protocol": "test"} - assert ( - validate_variable_type("test", TroposphereType(sns.Subscription), value) - == "success" - ) + assert validate_variable_type("test", TroposphereType(sns.Subscription), value) == "success" mock_create.assert_called_once_with(value) with pytest.raises(ValidatorError): validate_variable_type("test", TroposphereType(sns.Subscription), value) diff --git a/tests/unit/cfngin/blueprints/test_cfngin_bucket.py b/tests/unit/cfngin/blueprints/test_cfngin_bucket.py index eeeb41f70..da2e2abb3 100644 --- a/tests/unit/cfngin/blueprints/test_cfngin_bucket.py +++ b/tests/unit/cfngin/blueprints/test_cfngin_bucket.py @@ -3,8 +3,8 @@ from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import ANY, Mock -from mock import ANY, Mock from troposphere import s3 from runway import __version__ @@ -32,9 +32,7 @@ def test_bucket(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> N "VersioningStatus": Mock(ref="Ref(VersioningStatus)"), }, ) - mock_bucket = Mock( - get_att=Mock(return_value="get_att"), ref=Mock(return_value="ref") - ) + mock_bucket = Mock(get_att=Mock(return_value="get_att"), ref=Mock(return_value="ref")) mock_bucket.return_value = mock_bucket mocker.patch(f"{MODULE}.s3", Bucket=mock_bucket) bucket_encryption = mocker.patch.object( @@ -80,9 +78,7 @@ def test_bucket_encryption(self, cfngin_context: CfnginContext) -> None: == "AES256" ) - def test_bucket_name( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_bucket_name(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test bucket_name.""" mocker.patch.object( CfnginBucket, @@ -110,9 +106,7 @@ def test_bucket_tags(self, cfngin_context: CfnginContext) -> None: obj = CfnginBucket("test", cfngin_context) assert obj.bucket_tags.to_dict() == [{"Key": "version", "Value": __version__}] - def test_create_template( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_create_template(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test create_template.""" bucket = mocker.patch.object(CfnginBucket, "bucket", "bucket") obj = CfnginBucket("test", cfngin_context) diff --git a/tests/unit/cfngin/blueprints/test_raw.py b/tests/unit/cfngin/blueprints/test_raw.py index 312188204..4fb37da1f 100644 --- a/tests/unit/cfngin/blueprints/test_raw.py +++ b/tests/unit/cfngin/blueprints/test_raw.py @@ -1,15 +1,13 @@ """Tests for runway.cfngin.blueprints.raw.""" -# pylint: disable=unused-argument -# pyright: basic from __future__ import annotations import json from pathlib import Path from typing import TYPE_CHECKING, cast +from unittest.mock import MagicMock, Mock import pytest -from mock import MagicMock, Mock from runway.cfngin.blueprints.raw import ( RawTemplateBlueprint, @@ -26,7 +24,6 @@ from ..factories import mock_context if TYPE_CHECKING: - from pytest import MonkeyPatch from pytest_mock import MockerFixture from runway.context import CfnginContext @@ -137,13 +134,12 @@ def test_parameter_definitions_yaml(self) -> None: "Param2": {"Default": "default", "Type": "CommaDelimitedList"}, } - def test_parameter_values( - self, cfngin_context: CfnginContext, tmp_path: Path - ) -> None: + def test_parameter_values(self, cfngin_context: CfnginContext, tmp_path: Path) -> None: """Test parameter_values.""" obj = RawTemplateBlueprint("test", cfngin_context, raw_template_path=tmp_path) - assert not obj.parameter_values and isinstance(obj.parameter_values, dict) - obj._resolved_variables = {"var": "val"} # pylint: disable=protected-access + assert not obj.parameter_values + assert isinstance(obj.parameter_values, dict) + obj._resolved_variables = {"var": "val"} del obj.parameter_values assert obj.parameter_values == {"var": "val"} @@ -152,18 +148,14 @@ def test_required_parameter_definitions_json(self) -> None: blueprint = RawTemplateBlueprint( name="test", context=MagicMock(), raw_template_path=RAW_JSON_TEMPLATE_PATH ) - assert blueprint.required_parameter_definitions == { - "Param1": {"Type": "String"} - } + assert blueprint.required_parameter_definitions == {"Param1": {"Type": "String"}} def test_required_parameter_definitions_yaml(self) -> None: """Verify required_parameter_definitions.""" blueprint = RawTemplateBlueprint( name="test", context=MagicMock(), raw_template_path=RAW_YAML_TEMPLATE_PATH ) - assert blueprint.required_parameter_definitions == { - "Param1": {"Type": "String"} - } + assert blueprint.required_parameter_definitions == {"Param1": {"Type": "String"}} def test_requires_change_set( self, cfngin_context: CfnginContext, mocker: MockerFixture, tmp_path: Path @@ -189,13 +181,9 @@ def test_to_dict( mock_parse_cloudformation_template = mocker.patch( f"{MODULE}.parse_cloudformation_template", return_value="success" ) - mock_rendered = mocker.patch.object( - RawTemplateBlueprint, "rendered", "rendered template" - ) + mock_rendered = mocker.patch.object(RawTemplateBlueprint, "rendered", "rendered template") assert ( - RawTemplateBlueprint( - "test", cfngin_context, raw_template_path=tmp_path - ).to_dict() + RawTemplateBlueprint("test", cfngin_context, raw_template_path=tmp_path).to_dict() == mock_parse_cloudformation_template.return_value ) mock_parse_cloudformation_template.assert_called_once_with(mock_rendered) @@ -204,21 +192,15 @@ def test_to_json( self, cfngin_context: CfnginContext, mocker: MockerFixture, tmp_path: Path ) -> None: """Test to_json.""" - mock_to_dict = mocker.patch.object( - RawTemplateBlueprint, "to_dict", return_value="dict" - ) + mock_to_dict = mocker.patch.object(RawTemplateBlueprint, "to_dict", return_value="dict") mock_dumps = Mock(return_value="success") mocker.patch(f"{MODULE}.json", dumps=mock_dumps) assert ( - RawTemplateBlueprint( - "test", cfngin_context, raw_template_path=tmp_path - ).to_json() + RawTemplateBlueprint("test", cfngin_context, raw_template_path=tmp_path).to_json() == mock_dumps.return_value ) mock_to_dict.assert_called_once_with() - mock_dumps.assert_called_once_with( - mock_to_dict.return_value, sort_keys=True, indent=4 - ) + mock_dumps.assert_called_once_with(mock_to_dict.return_value, sort_keys=True, indent=4) def test_to_json_cfn_template(self, cfngin_context: CfnginContext) -> None: """Test to_json.""" @@ -230,9 +212,7 @@ def test_to_json_cfn_template(self, cfngin_context: CfnginContext) -> None: "Param1": {"Type": "String"}, "Param2": {"Default": "default", "Type": "CommaDelimitedList"}, }, - "Resources": { - "Dummy": {"Type": "AWS::CloudFormation::WaitConditionHandle"} - }, + "Resources": {"Dummy": {"Type": "AWS::CloudFormation::WaitConditionHandle"}}, "Outputs": {"DummyId": {"Value": "dummy-1234"}}, }, sort_keys=True, @@ -257,9 +237,7 @@ def test_to_json_j2(self) -> None: "Param1": {"Type": "String"}, "Param2": {"Default": "default", "Type": "CommaDelimitedList"}, }, - "Resources": { - "Dummy": {"Type": "AWS::CloudFormation::WaitConditionHandle"} - }, + "Resources": {"Dummy": {"Type": "AWS::CloudFormation::WaitConditionHandle"}}, "Outputs": {"DummyId": {"Value": "dummy-bar-param1val-foo-1234"}}, }, sort_keys=True, @@ -293,9 +271,7 @@ def test_render_template( self, cfngin_context: CfnginContext, mocker: MockerFixture, tmp_path: Path ) -> None: """Test render_template.""" - mock_rendered = mocker.patch.object( - RawTemplateBlueprint, "rendered", "rendered" - ) + mock_rendered = mocker.patch.object(RawTemplateBlueprint, "rendered", "rendered") mock_version = mocker.patch.object(RawTemplateBlueprint, "version", "version") assert RawTemplateBlueprint( "test", cfngin_context, raw_template_path=tmp_path @@ -307,7 +283,7 @@ def test_variables(self, cfngin_context: CfnginContext, tmp_path: Path) -> None: with pytest.raises(UnresolvedBlueprintVariables): _ = obj.variables # obj.resolve_variables([Variable("Var0", "test")]) - obj._resolved_variables = {"var": "val"} # pylint: disable=protected-access + obj._resolved_variables = {"var": "val"} assert obj.variables == {"var": "val"} obj.variables = {"key": "val"} assert obj.variables == {"key": "val"} @@ -318,9 +294,7 @@ def test_version( """Test version.""" mocker.patch.object(RawTemplateBlueprint, "rendered", "success") assert ( - RawTemplateBlueprint( - "test", cfngin_context, raw_template_path=tmp_path - ).version + RawTemplateBlueprint("test", cfngin_context, raw_template_path=tmp_path).version == "260ca9dd" ) @@ -335,14 +309,12 @@ def test_get_template_path_local_file(tmp_path: Path) -> None: assert template_path.samefile(cast(Path, result)) -def test_get_template_path_invalid_file(cd_tmp_path: Path) -> None: +def test_get_template_path_invalid_file(cd_tmp_path: Path) -> None: # noqa: ARG001 """Verify get_template_path with an invalid filename.""" assert get_template_path(Path("cfn_template.json")) is None -def test_get_template_path_file_in_syspath( - tmp_path: Path, monkeypatch: MonkeyPatch -) -> None: +def test_get_template_path_file_in_syspath(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: """Verify get_template_path with a file in sys.path. This ensures templates are able to be retrieved from remote packages. @@ -358,10 +330,7 @@ def test_get_template_path_file_in_syspath( def test_resolve_variable() -> None: """Test resolve_variable.""" - assert ( - resolve_variable(Variable("var", "val", variable_type="cfngin"), "test") - == "val" - ) + assert resolve_variable(Variable("var", "val", variable_type="cfngin"), "test") == "val" def test_resolve_variable_raise_unresolved() -> None: diff --git a/tests/unit/cfngin/blueprints/test_testutil.py b/tests/unit/cfngin/blueprints/test_testutil.py index 453bb051a..f94781ad1 100644 --- a/tests/unit/cfngin/blueprints/test_testutil.py +++ b/tests/unit/cfngin/blueprints/test_testutil.py @@ -1,8 +1,8 @@ """Tests for runway.cfngin.blueprints.testutil.""" -# pyright: basic import unittest +import pytest from troposphere import ecr from runway.cfngin.blueprints.base import Blueprint @@ -24,9 +24,7 @@ class Repositories(Blueprint): def create_template(self) -> None: """Create template.""" for repo in self.variables["Repositories"]: - self.template.add_resource( - ecr.Repository(f"{repo}Repository", RepositoryName=repo) - ) + self.template.add_resource(ecr.Repository(f"{repo}Repository", RepositoryName=repo)) class TestRepositories(BlueprintTestCase): @@ -38,9 +36,7 @@ def test_create_template_passes(self) -> None: """Test create template passes.""" ctx = CfnginContext() blueprint = Repositories("test_repo", ctx) - blueprint.resolve_variables( - [Variable("Repositories", ["repo1", "repo2"], "cfngin")] - ) + blueprint.resolve_variables([Variable("Repositories", ["repo1", "repo2"], "cfngin")]) blueprint.create_template() self.assertRenderedBlueprint(blueprint) @@ -52,7 +48,7 @@ def test_create_template_fails(self) -> None: [Variable("Repositories", ["repo1", "repo2", "repo3"], "cfngin")] ) blueprint.create_template() - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): self.assertRenderedBlueprint(blueprint) diff --git a/tests/unit/cfngin/blueprints/variables/test_types.py b/tests/unit/cfngin/blueprints/variables/test_types.py index ecd81c085..09e592493 100644 --- a/tests/unit/cfngin/blueprints/variables/test_types.py +++ b/tests/unit/cfngin/blueprints/variables/test_types.py @@ -3,7 +3,6 @@ from __future__ import annotations import re -from typing import Type import pytest @@ -12,12 +11,8 @@ PATTERN_LIST = r"(AWS|CFN)?(?P.*)List?" PATTERN_SUB_AWS_PARAMETER_TYPE = r"(AWS|::)" -AWS_CLASSES = [ - kls for kls in CFNType.__subclasses__() if not kls.__name__.startswith("CFN") -] -CFN_CLASSES = [ - kls for kls in CFNType.__subclasses__() if kls.__name__.startswith("CFN") -] +AWS_CLASSES = [kls for kls in CFNType.__subclasses__() if not kls.__name__.startswith("CFN")] +CFN_CLASSES = [kls for kls in CFNType.__subclasses__() if kls.__name__.startswith("CFN")] def handle_ssm_parameter_value(value: str) -> str: @@ -28,7 +23,7 @@ def handle_ssm_parameter_value(value: str) -> str: @pytest.mark.parametrize("kls", AWS_CLASSES) -def test_aws_types(kls: Type[CFNType]) -> None: +def test_aws_types(kls: type[CFNType]) -> None: """Test variable types for parameter types beginning with ``AWS::``. This does not test the formatting of the value. @@ -47,7 +42,7 @@ def test_aws_types(kls: Type[CFNType]) -> None: @pytest.mark.parametrize("kls", CFN_CLASSES) -def test_cfn_types(kls: Type[CFNType]) -> None: +def test_cfn_types(kls: type[CFNType]) -> None: """Test variable types beginning with CFN.""" if kls.__name__.endswith("List") and "CommaDelimited" not in kls.__name__: match = re.search(PATTERN_LIST, kls.__name__) diff --git a/tests/unit/cfngin/conftest.py b/tests/unit/cfngin/conftest.py index fd4ccc771..48b5ce9a2 100644 --- a/tests/unit/cfngin/conftest.py +++ b/tests/unit/cfngin/conftest.py @@ -1,7 +1,5 @@ """Pytest fixtures and plugins.""" -# pyright: basic -import os from pathlib import Path import pytest @@ -12,17 +10,16 @@ @pytest.fixture(scope="package") def cfngin_fixtures() -> Path: """CFNgin fixture directory Path object.""" - path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures") - return Path(path) + return Path(__file__).parent / "fixtures" -@pytest.fixture +@pytest.fixture() def empty_dag() -> DAG: """Create an empty DAG.""" return DAG() -@pytest.fixture +@pytest.fixture() def basic_dag() -> DAG: """Create a basic DAG.""" dag = DAG() diff --git a/tests/unit/cfngin/factories.py b/tests/unit/cfngin/factories.py index a29ecb981..9b60fd666 100644 --- a/tests/unit/cfngin/factories.py +++ b/tests/unit/cfngin/factories.py @@ -1,12 +1,9 @@ """Factories for tests.""" -# pylint: disable=unused-argument -# pyright: basic from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, NamedTuple, Optional - -from mock import MagicMock +from typing import TYPE_CHECKING, Any, NamedTuple +from unittest.mock import MagicMock from runway.cfngin.providers.aws.default import ProviderBuilder from runway.config import CfnginConfig, CfnginStackDefinitionModel @@ -27,7 +24,7 @@ class Lookup(NamedTuple): class MockThreadingEvent: """Mock thread events.""" - def wait(self, timeout: Optional[int] = None) -> bool: + def wait(self, timeout: int | None = None) -> bool: # noqa: ARG002 """Mock wait method.""" return False @@ -35,15 +32,13 @@ def wait(self, timeout: Optional[int] = None) -> bool: class MockProviderBuilder(ProviderBuilder): """Mock provider builder.""" - def __init__( # pylint: disable=super-init-not-called - self, *, provider: Provider, region: Optional[str] = None, **_: Any - ) -> None: + def __init__(self, *, provider: Provider, region: str | None = None, **_: Any) -> None: """Instantiate class.""" self.provider = provider self.region = region def build( - self, *, profile: Optional[str] = None, region: Optional[str] = None + self, *, profile: str | None = None, region: str | None = None # noqa: ARG002 ) -> Provider: """Mock build method.""" return self.provider @@ -56,7 +51,7 @@ def mock_provider(**kwargs: Any) -> MagicMock: def mock_context( namespace: str = "default", - extra_config_args: Optional[Dict[str, Any]] = None, + extra_config_args: dict[str, Any] | None = None, **kwargs: Any, ) -> CfnginContext: """Mock context.""" @@ -73,7 +68,7 @@ def generate_definition( base_name: str, stack_id: Any = None, **overrides: Any ) -> CfnginStackDefinitionModel: """Generate definitions.""" - definition: Dict[str, Any] = { + definition: dict[str, Any] = { "name": f"{base_name}-{stack_id}" if stack_id else base_name, "class_path": f"tests.unit.cfngin.fixtures.mock_blueprints.{base_name.upper()}", "requires": [], @@ -82,9 +77,7 @@ def generate_definition( return CfnginStackDefinitionModel(**definition) -def mock_lookup( - lookup_input: Any, lookup_type: str, raw: Optional[str] = None -) -> Lookup: +def mock_lookup(lookup_input: Any, lookup_type: str, raw: str | None = None) -> Lookup: """Mock lookup.""" if raw is None: raw = f"{lookup_type} {lookup_input}" @@ -109,11 +102,11 @@ def myfile_test(self, client_stub): """ - def __init__(self, client_stub: Any): + def __init__(self, client_stub: Any) -> None: """Instantiate class.""" self.client_stub = client_stub - def client(self, region: str) -> Any: + def client(self, region: str) -> Any: # noqa: ARG002 """Return the stubbed client object. Args: diff --git a/tests/unit/cfngin/fixtures/cfn_template.json b/tests/unit/cfngin/fixtures/cfn_template.json index 623cbd662..cadc99a90 100644 --- a/tests/unit/cfngin/fixtures/cfn_template.json +++ b/tests/unit/cfngin/fixtures/cfn_template.json @@ -1,6 +1,11 @@ { "AWSTemplateFormatVersion": "2010-09-09", "Description": "TestTemplate", + "Outputs": { + "DummyId": { + "Value": "dummy-1234" + } + }, "Parameters": { "Param1": { "Type": "String" @@ -11,13 +16,8 @@ } }, "Resources": { - "Dummy": { - "Type": "AWS::CloudFormation::WaitConditionHandle" - } - }, - "Outputs": { - "DummyId": { - "Value": "dummy-1234" - } + "Dummy": { + "Type": "AWS::CloudFormation::WaitConditionHandle" + } } } diff --git a/tests/unit/cfngin/fixtures/cfn_template.yaml b/tests/unit/cfngin/fixtures/cfn_template.yaml index 41826a17f..fbe3b38c5 100644 --- a/tests/unit/cfngin/fixtures/cfn_template.yaml +++ b/tests/unit/cfngin/fixtures/cfn_template.yaml @@ -10,10 +10,9 @@ Resources: Bucket: Type: AWS::S3::Bucket Properties: - BucketName: - !Join - - "-" - - - !Ref "AWS::StackName" + BucketName: !Join + - "-" + - - !Ref "AWS::StackName" - !Ref "AWS::Region" Dummy: Type: AWS::CloudFormation::WaitConditionHandle diff --git a/tests/unit/cfngin/fixtures/mock_blueprints.py b/tests/unit/cfngin/fixtures/mock_blueprints.py index 5c8312d32..8daf38b10 100644 --- a/tests/unit/cfngin/fixtures/mock_blueprints.py +++ b/tests/unit/cfngin/fixtures/mock_blueprints.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict +from typing import TYPE_CHECKING, ClassVar import awacs import awacs.cloudformation @@ -31,7 +31,7 @@ class FunctionalTests(Blueprint): """Creates a stack with an IAM user and access key for functional tests.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "Namespace": { "type": CFNString, "description": "The namespace that the tests will use. " @@ -166,9 +166,7 @@ def create_template(self) -> None: template.add_output(Output("User", Value=Ref(user))) template.add_output(Output("AccessKeyId", Value=Ref(key))) template.add_output( - Output( - "SecretAccessKey", Value=GetAtt("FunctionalTestKey", "SecretAccessKey") - ) + Output("SecretAccessKey", Value=GetAtt("FunctionalTestKey", "SecretAccessKey")) ) template.add_output(Output("FunctionalTestRole", Value=GetAtt(role, "Arn"))) @@ -176,7 +174,7 @@ def create_template(self) -> None: class Dummy(Blueprint): """Dummy blueprint.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "StringVariable": {"type": str, "default": ""} } @@ -194,7 +192,7 @@ class Dummy2(Blueprint): """ - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "StringVariable": {"type": str, "default": ""} } @@ -214,7 +212,7 @@ class LongRunningDummy(Blueprint): """ - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "Count": { "type": int, "description": "The # of WaitConditionHandles to create.", @@ -228,7 +226,7 @@ class LongRunningDummy(Blueprint): }, "OutputValue": { "type": str, - "description": "The value to put in an output to allow for " "updates.", + "description": "The value to put in an output to allow for updates.", "default": "DefaultOutput", }, } @@ -271,7 +269,7 @@ class Broken(Blueprint): """ - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "StringVariable": {"type": str, "default": ""} } @@ -294,7 +292,7 @@ def create_template(self) -> None: class VPC(Blueprint): """VPC blueprint.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "AZCount": {"type": int, "default": 2}, "PrivateSubnets": { "type": CFNCommaDelimitedList, @@ -351,7 +349,7 @@ def create_template(self) -> None: class DiffTester(Blueprint): """Diff test blueprint.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "InstanceType": { "type": CFNString, "description": "NAT EC2 instance type.", @@ -359,12 +357,11 @@ class DiffTester(Blueprint): }, "WaitConditionCount": { "type": int, - "description": "Number of WaitConditionHandle resources " - "to add to the template", + "description": "Number of WaitConditionHandle resources to add to the template", }, } - def create_template(self): + def create_template(self) -> None: """Create template.""" for i in range(self.variables["WaitConditionCount"]): self.template.add_resource(WaitConditionHandle(f"VPC{i}")) @@ -373,7 +370,7 @@ def create_template(self): class Bastion(Blueprint): """Bastion blueprint.""" - VARIABLES: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + VARIABLES: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "VpcId": {"type": EC2VPCId, "description": "Vpc Id"}, "DefaultSG": { "type": EC2SecurityGroupId, @@ -381,15 +378,15 @@ class Bastion(Blueprint): }, "PublicSubnets": { "type": EC2SubnetIdList, - "description": "Subnets to deploy public " "instances in.", + "description": "Subnets to deploy public instances in.", }, "PrivateSubnets": { "type": EC2SubnetIdList, - "description": "Subnets to deploy private " "instances in.", + "description": "Subnets to deploy private instances in.", }, "AvailabilityZones": { "type": CFNCommaDelimitedList, - "description": "Availability Zones to deploy " "instances in.", + "description": "Availability Zones to deploy instances in.", }, "InstanceType": { "type": CFNString, @@ -419,7 +416,7 @@ class Bastion(Blueprint): }, } - def create_template(self): + def create_template(self) -> None: """Create template.""" return @@ -427,7 +424,7 @@ def create_template(self): class PreOneOhBastion(Blueprint): """Used to ensure old blueprints won't be usable in 1.0.""" - PARAMETERS: ClassVar[Dict[str, BlueprintVariableTypeDef]] = { + PARAMETERS: ClassVar[dict[str, BlueprintVariableTypeDef]] = { "VpcId": {"type": "AWS::EC2::VPC::Id", "description": "Vpc Id"}, "DefaultSG": { "type": "AWS::EC2::SecurityGroup::Id", @@ -435,15 +432,15 @@ class PreOneOhBastion(Blueprint): }, "PublicSubnets": { "type": "List", - "description": "Subnets to deploy public " "instances in.", + "description": "Subnets to deploy public instances in.", }, "PrivateSubnets": { "type": "List", - "description": "Subnets to deploy private " "instances in.", + "description": "Subnets to deploy private instances in.", }, "AvailabilityZones": { "type": "CommaDelimitedList", - "description": "Availability Zones to deploy " "instances in.", + "description": "Availability Zones to deploy instances in.", }, "InstanceType": { "type": "String", diff --git a/tests/unit/cfngin/fixtures/mock_hooks.py b/tests/unit/cfngin/fixtures/mock_hooks.py index 665d88009..5ee91a062 100644 --- a/tests/unit/cfngin/fixtures/mock_hooks.py +++ b/tests/unit/cfngin/fixtures/mock_hooks.py @@ -1,9 +1,9 @@ """Mock hook.""" -from typing import Any, Dict +from typing import Any -def mock_hook(*, value: Any, **_: Any) -> Dict[str, Any]: +def mock_hook(*, value: Any, **_: Any) -> dict[str, Any]: """Mock hook. Returns: diff --git a/tests/unit/cfngin/fixtures/vpc-bastion-db-web-pre-1.0.yaml b/tests/unit/cfngin/fixtures/vpc-bastion-db-web-pre-1.0.yaml index 33b866d25..290d07dc8 100644 --- a/tests/unit/cfngin/fixtures/vpc-bastion-db-web-pre-1.0.yaml +++ b/tests/unit/cfngin/fixtures/vpc-bastion-db-web-pre-1.0.yaml @@ -10,7 +10,8 @@ mappings: us-east-1: NAT: ami-ad227cc4 ubuntu1404: &ubuntu1404 ami-74e27e1c # Setting an anchor - bastion: *ubuntu1404 # Using the anchor above + bastion: *ubuntu1404 + # Using the anchor above us-west-2: NAT: ami-290f4119 ubuntu1404west2: &ubuntu1404west2 ami-5189a661 @@ -52,7 +53,7 @@ stacks: # parameters the stack actually needs and only submits those to each # stack. For example, most stacks are in the PrivateSubnets, but not # the PublicSubnets, but cfngin deals with it for you. - << : *vpc_parameters + <<: *vpc_parameters InstanceType: m3.medium OfficeNetwork: 203.0.113.0/24 MinSize: 2 diff --git a/tests/unit/cfngin/fixtures/vpc-bastion-db-web.yaml b/tests/unit/cfngin/fixtures/vpc-bastion-db-web.yaml index e73e79224..34417576c 100644 --- a/tests/unit/cfngin/fixtures/vpc-bastion-db-web.yaml +++ b/tests/unit/cfngin/fixtures/vpc-bastion-db-web.yaml @@ -10,7 +10,8 @@ mappings: us-east-1: NAT: ami-ad227cc4 ubuntu1404: &ubuntu1404 ami-74e27e1c # Setting an anchor - bastion: *ubuntu1404 # Using the anchor above + bastion: *ubuntu1404 + # Using the anchor above us-west-2: NAT: ami-290f4119 ubuntu1404west2: &ubuntu1404west2 ami-5189a661 @@ -51,7 +52,7 @@ stacks: # parameters the stack actually needs and only submits those to each # stack. For example, most stacks are in the PrivateSubnets, but not # the PublicSubnets, but cfngin deals with it for you. - << : *vpc_parameters + <<: *vpc_parameters InstanceType: m3.medium OfficeNetwork: 203.0.113.0/24 MinSize: 2 diff --git a/tests/unit/cfngin/hooks/awslambda/factories.py b/tests/unit/cfngin/hooks/awslambda/factories.py index 6dd6a38e0..b6a62510b 100644 --- a/tests/unit/cfngin/hooks/awslambda/factories.py +++ b/tests/unit/cfngin/hooks/awslambda/factories.py @@ -3,8 +3,7 @@ from __future__ import annotations from typing import TYPE_CHECKING - -from mock import Mock +from unittest.mock import Mock from runway.cfngin.hooks.awslambda.base_classes import Project from runway.cfngin.hooks.awslambda.models.args import AwsLambdaHookArgs @@ -21,8 +20,7 @@ class MockProject(Project[AwsLambdaHookArgs]): def build_directory(self) -> Path: """Directory being used to build deployment package.""" result = ( - self.source_code - / f"{self.source_code.root_directory.name}.{self.source_code.md5_hash}" + self.source_code / f"{self.source_code.root_directory.name}.{self.source_code.md5_hash}" ) result.mkdir(exist_ok=True, parents=True) return result diff --git a/tests/unit/cfngin/hooks/awslambda/models/test_args.py b/tests/unit/cfngin/hooks/awslambda/models/test_args.py index 5e11cb21f..9b6021a69 100644 --- a/tests/unit/cfngin/hooks/awslambda/models/test_args.py +++ b/tests/unit/cfngin/hooks/awslambda/models/test_args.py @@ -3,7 +3,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Dict +from typing import Any import pytest from pydantic import ValidationError @@ -43,7 +43,7 @@ def test__validate_runtime_or_docker(self, tmp_path: Path) -> None: "kwargs", [{"image": "test"}, {"file": ""}, {"file": "", "image": "test"}] ) def test__validate_runtime_or_docker_docker_no_runtime( - self, kwargs: Dict[str, Any], tmp_path: Path + self, kwargs: dict[str, Any], tmp_path: Path ) -> None: """Test _validate_runtime_or_docker no runtime if Docker.""" if "file" in kwargs: @@ -52,7 +52,7 @@ def test__validate_runtime_or_docker_docker_no_runtime( kwargs["file"] = dockerfile obj = AwsLambdaHookArgs( bucket_name="test-bucket", - docker=DockerOptions.parse_obj(kwargs), + docker=DockerOptions.model_validate(kwargs), source_code=tmp_path, ) assert not obj.runtime @@ -63,30 +63,26 @@ def test__validate_runtime_or_docker_docker_disabled(self, tmp_path: Path) -> No With ``runtime=None`` and ``docker.disabled=True``. """ - with pytest.raises(ValidationError) as excinfo: + with pytest.raises( + ValidationError, + match="runtime\n Value error, runtime must be provided if docker.disabled is True", + ): AwsLambdaHookArgs( bucket_name="test-bucket", docker=DockerOptions(disabled=True), source_code=tmp_path, ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("runtime",) - assert errors[0]["msg"] == "runtime must be provided if docker.disabled is True" - def test__validate_runtime_or_docker_no_runtime_or_docker( - self, tmp_path: Path - ) -> None: + def test__validate_runtime_or_docker_no_runtime_or_docker(self, tmp_path: Path) -> None: """Test _validate_runtime_or_docker no runtime or docker.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises( + ValidationError, + match="runtime\n Value error, docker.file, docker.image, or runtime is required", + ): AwsLambdaHookArgs( bucket_name="test-bucket", source_code=tmp_path, ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("runtime",) - assert errors[0]["msg"] == "docker.file, docker.image, or runtime is required" def test_field_defaults(self, tmp_path: Path) -> None: """Test field defaults.""" @@ -95,40 +91,36 @@ def test_field_defaults(self, tmp_path: Path) -> None: runtime="test", source_code=tmp_path, ) - assert not obj.extend_gitignore and isinstance(obj.extend_gitignore, list) + assert not obj.extend_gitignore + assert isinstance(obj.extend_gitignore, list) assert not obj.object_prefix def test_source_code_is_file(self, tmp_path: Path) -> None: """Test source_code is file.""" source_path = tmp_path / "foo" source_path.write_text("bar") - with pytest.raises(ValidationError) as excinfo: + with pytest.raises( + ValidationError, + match="source_code\n Path does not point to a directory", + ): AwsLambdaHookArgs( # these are all required fields bucket_name="test-bucket", runtime="test", source_code=source_path, ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("source_code",) - assert errors[0]["msg"] == f'path "{source_path}" does not point to a directory' def test_source_code_not_exist(self, tmp_path: Path) -> None: """Test source_code directory does not exist.""" source_path = tmp_path / "foo" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises( + ValidationError, + match="source_code\n Path does not point to a directory", + ): AwsLambdaHookArgs( # these are all required fields bucket_name="test-bucket", runtime="test", source_code=source_path, ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("source_code",) - assert ( - errors[0]["msg"] - == f'file or directory at path "{source_path}" does not exist' - ) class TestPythonHookArgs: diff --git a/tests/unit/cfngin/hooks/awslambda/models/test_responses.py b/tests/unit/cfngin/hooks/awslambda/models/test_responses.py index 0484b49cd..7ac956fbf 100644 --- a/tests/unit/cfngin/hooks/awslambda/models/test_responses.py +++ b/tests/unit/cfngin/hooks/awslambda/models/test_responses.py @@ -13,7 +13,7 @@ class TestAwsLambdaHookDeployResponse: def test_extra(self) -> None: """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises(ValidationError, match="invalid\n Extra inputs are not permitted"): AwsLambdaHookDeployResponse( bucket_name="test-bucket", code_sha256="sha256", @@ -21,7 +21,3 @@ def test_extra(self) -> None: object_key="key", runtime="test", ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" diff --git a/tests/unit/cfngin/hooks/awslambda/python_requirements/test__deployment_package.py b/tests/unit/cfngin/hooks/awslambda/python_requirements/test__deployment_package.py index 388243414..eda784429 100644 --- a/tests/unit/cfngin/hooks/awslambda/python_requirements/test__deployment_package.py +++ b/tests/unit/cfngin/hooks/awslambda/python_requirements/test__deployment_package.py @@ -3,9 +3,9 @@ from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import Mock, call import pytest -from mock import Mock, call from runway.cfngin.hooks.awslambda.python_requirements import PythonDeploymentPackage @@ -23,9 +23,7 @@ class TestPythonDeploymentPackage: @pytest.mark.parametrize( "slim, strip", [(False, False), (False, True), (True, False), (True, True)] ) - def test_gitignore_filter( - self, mocker: MockerFixture, slim: bool, strip: bool - ) -> None: + def test_gitignore_filter(self, mocker: MockerFixture, slim: bool, strip: bool) -> None: """Test gitignore_filter.""" mock_ignore_parser = Mock() mock_ignore_parser_class = mocker.patch( @@ -35,9 +33,7 @@ def test_gitignore_filter( project.args.slim = slim project.args.strip = strip if slim: - assert ( - PythonDeploymentPackage(project).gitignore_filter == mock_ignore_parser - ) + assert PythonDeploymentPackage(project).gitignore_filter == mock_ignore_parser mock_ignore_parser_class.assert_called_once_with() calls = [ call("**/*.dist-info*", project.dependency_directory), @@ -57,8 +53,6 @@ def test_insert_layer_dir(self, tmp_path: Path) -> None: == tmp_path / "python" / "foo.txt" ) assert ( - PythonDeploymentPackage.insert_layer_dir( - tmp_path / "bar" / "foo.txt", tmp_path - ) + PythonDeploymentPackage.insert_layer_dir(tmp_path / "bar" / "foo.txt", tmp_path) == tmp_path / "python" / "bar" / "foo.txt" ) diff --git a/tests/unit/cfngin/hooks/awslambda/python_requirements/test__docker.py b/tests/unit/cfngin/hooks/awslambda/python_requirements/test__docker.py index 5f3a1c9de..71090c079 100644 --- a/tests/unit/cfngin/hooks/awslambda/python_requirements/test__docker.py +++ b/tests/unit/cfngin/hooks/awslambda/python_requirements/test__docker.py @@ -3,11 +3,11 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING +from unittest.mock import Mock import pytest from docker.types.services import Mount -from mock import Mock from runway.cfngin.hooks.awslambda.python_requirements import ( PythonDockerDependencyInstaller, @@ -36,9 +36,7 @@ def test_bind_mounts(self, tmp_path: Path) -> None: ) obj = PythonDockerDependencyInstaller(project, client=Mock()) assert obj.bind_mounts == [ - Mount( - target="/var/task/lambda", source="dependency_directory", type="bind" - ), + Mount(target="/var/task/lambda", source="dependency_directory", type="bind"), Mount(target="/var/task/project", source="project_root", type="bind"), Mount( target=f"/var/task/{requirements_txt.name}", @@ -92,7 +90,8 @@ def test_install_commands_no_requirements(self) -> None: result = PythonDockerDependencyInstaller( Mock(requirements_txt=None), client=Mock() ).install_commands - assert not result and isinstance(result, list) + assert not result + assert isinstance(result, list) def test_python_version(self, mocker: MockerFixture) -> None: """Test python_version.""" @@ -105,9 +104,7 @@ def test_python_version(self, mocker: MockerFixture) -> None: mock_version_cls = mocker.patch(f"{MODULE}.Version", return_value="success") obj = PythonDockerDependencyInstaller(Mock(), client=Mock()) assert obj.python_version == mock_version_cls.return_value - mock_run_command.assert_called_once_with( - "python --version", level=logging.DEBUG - ) + mock_run_command.assert_called_once_with("python --version", level=logging.DEBUG) mock_version_cls.assert_called_once_with(version) def test_python_version_not_found(self, mocker: MockerFixture) -> None: @@ -120,9 +117,7 @@ def test_python_version_not_found(self, mocker: MockerFixture) -> None: mock_version_cls = mocker.patch(f"{MODULE}.Version") obj = PythonDockerDependencyInstaller(Mock(), client=Mock()) assert not obj.python_version - mock_run_command.assert_called_once_with( - "python --version", level=logging.DEBUG - ) + mock_run_command.assert_called_once_with("python --version", level=logging.DEBUG) mock_version_cls.assert_not_called() @pytest.mark.parametrize( @@ -136,10 +131,8 @@ def test_python_version_not_found(self, mocker: MockerFixture) -> None: ], ) def test_runtime( - self, expected: Optional[str], mocker: MockerFixture, version: Optional[Version] + self, expected: str | None, mocker: MockerFixture, version: Version | None ) -> None: """Test runtime.""" mocker.patch.object(PythonDockerDependencyInstaller, "python_version", version) - assert ( - PythonDockerDependencyInstaller(Mock(), client=Mock()).runtime == expected - ) + assert PythonDockerDependencyInstaller(Mock(), client=Mock()).runtime == expected diff --git a/tests/unit/cfngin/hooks/awslambda/python_requirements/test__project.py b/tests/unit/cfngin/hooks/awslambda/python_requirements/test__project.py index 3112ac815..cdfc56fac 100644 --- a/tests/unit/cfngin/hooks/awslambda/python_requirements/test__project.py +++ b/tests/unit/cfngin/hooks/awslambda/python_requirements/test__project.py @@ -3,10 +3,10 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, List, Sequence +from typing import TYPE_CHECKING +from unittest.mock import Mock, call import pytest -from mock import Mock, call from runway.cfngin.hooks.awslambda.exceptions import RuntimeMismatchError from runway.cfngin.hooks.awslambda.python_requirements import PythonProject @@ -20,9 +20,9 @@ ) if TYPE_CHECKING: + from collections.abc import Sequence from pathlib import Path - from pytest import LogCaptureFixture from pytest_mock import MockerFixture @@ -74,10 +74,7 @@ def test_cleanup( tmp_requirements_txt.exists.assert_called_once_with() else: tmp_requirements_txt.exists.assert_not_called() - if ( - max([sum([file_exists, pipenv_value]), sum([file_exists, poetry_value])]) - == 2 - ): + if max([sum([file_exists, pipenv_value]), sum([file_exists, poetry_value])]) == 2: tmp_requirements_txt.unlink.assert_called_once_with() else: tmp_requirements_txt.unlink.assert_not_called() @@ -125,9 +122,7 @@ def test_docker(self, mocker: MockerFixture) -> None: @pytest.mark.parametrize( "pipenv, poetry", [(False, False), (False, True), (True, False), (True, True)] ) - def test_install_dependencies( - self, mocker: MockerFixture, pipenv: bool, poetry: bool - ) -> None: + def test_install_dependencies(self, mocker: MockerFixture, pipenv: bool, poetry: bool) -> None: """Test install_dependencies.""" args = Mock(cache_dir="foo", extend_pip_args=["--foo", "bar"], use_cache=True) mocker.patch.object(PythonProject, "pipenv", pipenv) @@ -153,17 +148,13 @@ def test_install_dependencies_docker(self, mocker: MockerFixture) -> None: """Test install_dependencies using Docker.""" mock_docker = mocker.patch.object(PythonProject, "docker") mock_pip = mocker.patch.object(PythonProject, "pip") - mocker.patch.object( - PythonProject, "dependency_directory", "dependency_directory" - ) + mocker.patch.object(PythonProject, "dependency_directory", "dependency_directory") mocker.patch.object(PythonProject, "requirements_txt", "requirements.txt") assert not PythonProject(Mock(), Mock()).install_dependencies() mock_docker.install.assert_called_once_with() mock_pip.install.assert_not_called() - def test_install_dependencies_does_not_catch_errors( - self, mocker: MockerFixture - ) -> None: + def test_install_dependencies_does_not_catch_errors(self, mocker: MockerFixture) -> None: """Test install_dependencies does not catch errors.""" mocker.patch.object(PythonProject, "pipenv", False) mocker.patch.object(PythonProject, "poetry", False) @@ -190,15 +181,13 @@ def test_install_dependencies_does_not_catch_errors( ) def test_install_dependencies_skip( - self, caplog: LogCaptureFixture, mocker: MockerFixture + self, caplog: pytest.LogCaptureFixture, mocker: MockerFixture ) -> None: """Test install_dependencies skip because no dependencies.""" caplog.set_level(logging.INFO, logger=MODULE.replace("._", ".")) mock_docker = mocker.patch.object(PythonProject, "docker") mock_pip = mocker.patch.object(PythonProject, "pip") - mocker.patch.object( - PythonProject, "dependency_directory", "dependency_directory" - ) + mocker.patch.object(PythonProject, "dependency_directory", "dependency_directory") mocker.patch.object(PythonProject, "requirements_txt", None) assert not PythonProject(Mock(), Mock()).install_dependencies() mock_docker.install.assert_not_called() @@ -252,10 +241,7 @@ def test_pipenv(self, mocker: MockerFixture) -> None: ) mocker.patch.object(PythonProject, "project_type", "pipenv") project_root = mocker.patch.object(PythonProject, "project_root") - assert ( - PythonProject(Mock(use_poetry=True), ctx).pipenv - == pipenv_class.return_value - ) + assert PythonProject(Mock(use_poetry=True), ctx).pipenv == pipenv_class.return_value pipenv_class.found_in_path.assert_called_once_with() pipenv_class.assert_called_once_with(ctx, project_root) @@ -286,10 +272,7 @@ def test_poetry(self, mocker: MockerFixture) -> None: ) mocker.patch.object(PythonProject, "project_type", "poetry") project_root = mocker.patch.object(PythonProject, "project_root") - assert ( - PythonProject(Mock(use_poetry=True), ctx).poetry - == poetry_class.return_value - ) + assert PythonProject(Mock(use_poetry=True), ctx).poetry == poetry_class.return_value poetry_class.found_in_path.assert_called_once_with() poetry_class.assert_called_once_with(ctx, project_root) @@ -332,7 +315,7 @@ def test_poetry_not_poetry_project(self, mocker: MockerFixture) -> None: ) def test_project_type( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, expected: str, mocker: MockerFixture, pipenv_project: bool, @@ -368,9 +351,7 @@ def test_project_type( ) else: mock_pipenv_dir_is_project.assert_called_once_with(tmp_path) - if (pipenv_project and not use_pipenv) and sum( - [poetry_project, use_poetry] - ) != 2: + if (pipenv_project and not use_pipenv) and sum([poetry_project, use_poetry]) != 2: assert ( "pipenv project detected but use of pipenv is explicitly disabled" in caplog.messages @@ -380,9 +361,7 @@ def test_requirements_txt(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test requirements_txt.""" expected = tmp_path / "requirements.txt" expected.touch() - mock_dir_is_project = mocker.patch( - f"{MODULE}.Pip.dir_is_project", return_value=True - ) + mock_dir_is_project = mocker.patch(f"{MODULE}.Pip.dir_is_project", return_value=True) mocker.patch.object(PythonProject, "pipenv", None) mocker.patch.object(PythonProject, "poetry", None) mocker.patch.object(PythonProject, "project_root", tmp_path) @@ -391,16 +370,12 @@ def test_requirements_txt(self, mocker: MockerFixture, tmp_path: Path) -> None: def test_requirements_txt_none(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test requirements_txt is None.""" - mock_dir_is_project = mocker.patch( - f"{MODULE}.Pip.dir_is_project", return_value=False - ) + mock_dir_is_project = mocker.patch(f"{MODULE}.Pip.dir_is_project", return_value=False) mocker.patch.object(PythonProject, "pipenv", None) mocker.patch.object(PythonProject, "poetry", None) mocker.patch.object(PythonProject, "project_root", tmp_path) assert not PythonProject(Mock(), Mock()).requirements_txt - mock_dir_is_project.assert_called_once_with( - tmp_path, file_name="requirements.txt" - ) + mock_dir_is_project.assert_called_once_with(tmp_path, file_name="requirements.txt") def test_requirements_txt_pipenv(self, mocker: MockerFixture) -> None: """Test requirements_txt.""" @@ -438,14 +413,10 @@ def test_runtime(self, mocker: MockerFixture) -> None: def test_runtime_pip(self, mocker: MockerFixture) -> None: """Test runtime from pip.""" mocker.patch.object(PythonProject, "docker", None) - mocker.patch.object( - PythonProject, "pip", Mock(python_version=Mock(major="3", minor="8")) - ) + mocker.patch.object(PythonProject, "pip", Mock(python_version=Mock(major="3", minor="8"))) assert PythonProject(Mock(runtime=None), Mock()).runtime == "python3.8" - def test_runtime_raise_runtime_mismatch_error_docker( - self, mocker: MockerFixture - ) -> None: + def test_runtime_raise_runtime_mismatch_error_docker(self, mocker: MockerFixture) -> None: """Test runtime raise RuntimeMismatchError.""" args = Mock(runtime="bar") docker = mocker.patch.object(PythonProject, "docker", Mock(runtime="foo")) @@ -454,15 +425,11 @@ def test_runtime_raise_runtime_mismatch_error_docker( assert excinfo.value.detected_runtime == docker.runtime assert excinfo.value.expected_runtime == args.runtime - def test_runtime_raise_runtime_mismatch_error_pip( - self, mocker: MockerFixture - ) -> None: + def test_runtime_raise_runtime_mismatch_error_pip(self, mocker: MockerFixture) -> None: """Test runtime raise RuntimeMismatchError.""" args = Mock(runtime="bar") mocker.patch.object(PythonProject, "docker", None) - mocker.patch.object( - PythonProject, "pip", Mock(python_version=Mock(major="3", minor="8")) - ) + mocker.patch.object(PythonProject, "pip", Mock(python_version=Mock(major="3", minor="8"))) with pytest.raises(RuntimeMismatchError) as excinfo: assert not PythonProject(args, Mock()).runtime assert excinfo.value.detected_runtime == "python3.8" @@ -478,7 +445,7 @@ def test_runtime_raise_runtime_mismatch_error_pip( ], ) def test_supported_metadata_files( - self, update_expected: List[str], use_pipenv: bool, use_poetry: bool + self, update_expected: list[str], use_pipenv: bool, use_poetry: bool ) -> None: """Test supported_metadata_files.""" expected = {*Pip.CONFIG_FILES} @@ -493,9 +460,7 @@ def test_supported_metadata_files( def test_tmp_requirements_txt(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test tmp_requirements_txt.""" - source_code = mocker.patch.object( - PythonProject, "source_code", Mock(md5_hash="hash") - ) + source_code = mocker.patch.object(PythonProject, "source_code", Mock(md5_hash="hash")) assert ( PythonProject(Mock(), Mock(work_dir=tmp_path)).tmp_requirements_txt == tmp_path / f"{source_code.md5_hash}.requirements.txt" diff --git a/tests/unit/cfngin/hooks/awslambda/test__python_hooks.py b/tests/unit/cfngin/hooks/awslambda/test__python_hooks.py index e3a9dd47e..7cd7a0060 100644 --- a/tests/unit/cfngin/hooks/awslambda/test__python_hooks.py +++ b/tests/unit/cfngin/hooks/awslambda/test__python_hooks.py @@ -1,12 +1,11 @@ """Test runway.cfngin.hooks.awslambda._python_hooks.""" -# pylint: disable=redefined-outer-name from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import Mock import pytest -from mock import Mock from pydantic import ValidationError from runway.cfngin.hooks.awslambda import PythonFunction, PythonLayer @@ -20,7 +19,7 @@ MODULE = "runway.cfngin.hooks.awslambda._python_hooks" -@pytest.fixture(scope="function") +@pytest.fixture() def args(tmp_path: Path) -> PythonHookArgs: """Fixture for creating default function args.""" return PythonHookArgs( @@ -36,7 +35,7 @@ class TestPythonFunction: def test___init__(self, args: PythonHookArgs) -> None: """Test __init__.""" ctx = Mock() - obj = PythonFunction(ctx, **args.dict()) + obj = PythonFunction(ctx, **args.model_dump()) # only two attributes are being set currently assert obj.args == args assert obj.ctx == ctx @@ -49,53 +48,45 @@ def test___init___raise_validation_error(self) -> None: def test_cleanup(self, args: PythonHookArgs, mocker: MockerFixture) -> None: """Test cleanup.""" project = mocker.patch.object(PythonFunction, "project") - assert not PythonFunction(Mock(), **args.dict()).cleanup() + assert not PythonFunction(Mock(), **args.model_dump()).cleanup() project.cleanup.assert_called_once_with() - def test_cleanup_on_error( - self, args: PythonHookArgs, mocker: MockerFixture - ) -> None: + def test_cleanup_on_error(self, args: PythonHookArgs, mocker: MockerFixture) -> None: """Test cleanup_on_error.""" deployment_package = mocker.patch.object(PythonFunction, "deployment_package") project = mocker.patch.object(PythonFunction, "project") - assert not PythonFunction(Mock(), **args.dict()).cleanup_on_error() + assert not PythonFunction(Mock(), **args.model_dump()).cleanup_on_error() deployment_package.delete.assert_called_once_with() project.cleanup_on_error.assert_called_once_with() - def test_deployment_package( - self, args: PythonHookArgs, mocker: MockerFixture - ) -> None: + def test_deployment_package(self, args: PythonHookArgs, mocker: MockerFixture) -> None: """Test deployment_package.""" deployment_package_class = mocker.patch(f"{MODULE}.PythonDeploymentPackage") project = mocker.patch.object(PythonFunction, "project", "project") assert ( - PythonFunction(Mock(), **args.dict()).deployment_package + PythonFunction(Mock(), **args.model_dump()).deployment_package == deployment_package_class.init.return_value ) deployment_package_class.init.assert_called_once_with(project, "function") def test_pre_deploy(self, args: PythonHookArgs, mocker: MockerFixture) -> None: """Test pre_deploy.""" - model = Mock(dict=Mock(return_value="success")) - build_response = mocker.patch.object( - PythonFunction, "build_response", return_value=(model) - ) + model = Mock(model_dump=Mock(return_value="success")) + build_response = mocker.patch.object(PythonFunction, "build_response", return_value=(model)) cleanup = mocker.patch.object(PythonFunction, "cleanup") cleanup_on_error = mocker.patch.object(PythonFunction, "cleanup_on_error") deployment_package = mocker.patch.object(PythonFunction, "deployment_package") assert ( - PythonFunction(Mock(), **args.dict()).pre_deploy() - == model.dict.return_value + PythonFunction(Mock(), **args.model_dump()).pre_deploy() + == model.model_dump.return_value ) deployment_package.upload.assert_called_once_with() build_response.assert_called_once_with("deploy") - model.dict.assert_called_once_with(by_alias=True) + model.model_dump.assert_called_once_with(by_alias=True) cleanup_on_error.assert_not_called() cleanup.assert_called_once_with() - def test_pre_deploy_always_cleanup( - self, args: PythonHookArgs, mocker: MockerFixture - ) -> None: + def test_pre_deploy_always_cleanup(self, args: PythonHookArgs, mocker: MockerFixture) -> None: """Test pre_deploy always cleanup.""" build_response = mocker.patch.object( PythonFunction, "build_response", return_value="success" @@ -107,8 +98,8 @@ def test_pre_deploy_always_cleanup( "deployment_package", Mock(upload=Mock(side_effect=Exception)), ) - with pytest.raises(Exception, match=""): - assert PythonFunction(Mock(), **args.dict()).pre_deploy() + with pytest.raises(Exception): # noqa: B017, PT011 + assert PythonFunction(Mock(), **args.model_dump()).pre_deploy() deployment_package.upload.assert_called_once_with() build_response.assert_not_called() cleanup_on_error.assert_called_once_with() @@ -118,21 +109,19 @@ def test_project(self, args: PythonHookArgs, mocker: MockerFixture) -> None: """Test project.""" ctx = Mock() project_class = mocker.patch(f"{MODULE}.PythonProject") - assert PythonFunction(ctx, **args.dict()).project == project_class.return_value + assert PythonFunction(ctx, **args.model_dump()).project == project_class.return_value project_class.assert_called_once_with(args, ctx) class TestPythonLayer: """Test PythonLayer.""" - def test_deployment_package( - self, args: PythonHookArgs, mocker: MockerFixture - ) -> None: + def test_deployment_package(self, args: PythonHookArgs, mocker: MockerFixture) -> None: """Test deployment_package.""" deployment_package_class = mocker.patch(f"{MODULE}.PythonDeploymentPackage") project = mocker.patch.object(PythonLayer, "project", "project") assert ( - PythonLayer(Mock(), **args.dict()).deployment_package + PythonLayer(Mock(), **args.model_dump()).deployment_package == deployment_package_class.init.return_value ) deployment_package_class.init.assert_called_once_with(project, "layer") diff --git a/tests/unit/cfngin/hooks/awslambda/test_base_classes.py b/tests/unit/cfngin/hooks/awslambda/test_base_classes.py index 922d03a37..a07d67b73 100644 --- a/tests/unit/cfngin/hooks/awslambda/test_base_classes.py +++ b/tests/unit/cfngin/hooks/awslambda/test_base_classes.py @@ -1,14 +1,12 @@ """Test runway.cfngin.hooks.awslambda.base_classes.""" -# pylint: disable=unused-argument from __future__ import annotations import logging -from pathlib import Path from typing import TYPE_CHECKING, Any, cast +from unittest.mock import Mock import pytest -from mock import Mock from runway.cfngin.hooks.awslambda.base_classes import AwsLambdaHook, Project from runway.cfngin.hooks.awslambda.deployment_package import DeploymentPackage @@ -17,7 +15,8 @@ from runway.cfngin.hooks.awslambda.models.responses import AwsLambdaHookDeployResponse if TYPE_CHECKING: - from pytest import LogCaptureFixture + from pathlib import Path + from pytest_mock import MockerFixture from runway.context import CfnginContext @@ -33,9 +32,7 @@ def test___init__(self, cfngin_context: CfnginContext) -> None: obj: AwsLambdaHook[Any] = AwsLambdaHook(cfngin_context) assert not obj.BUILD_LAYER # class var assert obj.ctx # only one attribute is currently set by this base class - assert not hasattr( - obj, "attrs" - ), "should be set by subclasses not by the parent" + assert not hasattr(obj, "attrs"), "should be set by subclasses not by the parent" def test_build_response_deploy(self, mocker: MockerFixture) -> None: """Test build_response.""" @@ -58,9 +55,7 @@ def test_build_response_deploy(self, mocker: MockerFixture) -> None: ), ) deployment_package.bucket.name = "test-bucket" - assert AwsLambdaHook(Mock()).build_response( - "deploy" - ) == AwsLambdaHookDeployResponse( + assert AwsLambdaHook(Mock()).build_response("deploy") == AwsLambdaHookDeployResponse( bucket_name=deployment_package.bucket.name, code_sha256=deployment_package.code_sha256, license="license", @@ -94,9 +89,7 @@ def test_build_response_plan(self, mocker: MockerFixture) -> None: ), ) deployment_package.bucket.name = "test-bucket" - assert AwsLambdaHook(Mock()).build_response( - "plan" - ) == AwsLambdaHookDeployResponse( + assert AwsLambdaHook(Mock()).build_response("plan") == AwsLambdaHookDeployResponse( bucket_name=deployment_package.bucket.name, code_sha256=deployment_package.code_sha256, object_key=deployment_package.object_key, @@ -104,9 +97,7 @@ def test_build_response_plan(self, mocker: MockerFixture) -> None: runtime=deployment_package.runtime, ) - def test_build_response_plan_handle_file_not_found_error( - self, mocker: MockerFixture - ) -> None: + def test_build_response_plan_handle_file_not_found_error(self, mocker: MockerFixture) -> None: """Test build_response.""" mocker.patch.object( AwsLambdaHook, @@ -133,49 +124,37 @@ def test_deployment_package(self) -> None: def test_plan(self, mocker: MockerFixture) -> None: """Test plan.""" - response_obj = Mock(dict=Mock(return_value="success")) + response_obj = Mock(model_dump=Mock(return_value="success")) build_response = mocker.patch.object( AwsLambdaHook, "build_response", return_value=response_obj ) - assert AwsLambdaHook(Mock()).plan() == response_obj.dict.return_value + assert AwsLambdaHook(Mock()).plan() == response_obj.model_dump.return_value build_response.assert_called_once_with("plan") - response_obj.dict.assert_called_once_with(by_alias=True) + response_obj.model_dump.assert_called_once_with(by_alias=True) - def test_post_deploy(self, caplog: LogCaptureFixture) -> None: + def test_post_deploy(self, caplog: pytest.LogCaptureFixture) -> None: """Test post_deploy.""" caplog.set_level(logging.WARNING, logger=MODULE) assert AwsLambdaHook(Mock()).post_deploy() - assert ( - f"post_deploy not implimented for {AwsLambdaHook.__name__}" - in caplog.messages - ) + assert f"post_deploy not implimented for {AwsLambdaHook.__name__}" in caplog.messages - def test_post_destroy(self, caplog: LogCaptureFixture) -> None: + def test_post_destroy(self, caplog: pytest.LogCaptureFixture) -> None: """Test post_destroy.""" caplog.set_level(logging.WARNING, logger=MODULE) assert AwsLambdaHook(Mock()).post_destroy() - assert ( - f"post_destroy not implimented for {AwsLambdaHook.__name__}" - in caplog.messages - ) + assert f"post_destroy not implimented for {AwsLambdaHook.__name__}" in caplog.messages - def test_pre_deploy(self, caplog: LogCaptureFixture) -> None: + def test_pre_deploy(self, caplog: pytest.LogCaptureFixture) -> None: """Test pre_deploy.""" caplog.set_level(logging.WARNING, logger=MODULE) assert AwsLambdaHook(Mock()).pre_deploy() - assert ( - f"pre_deploy not implimented for {AwsLambdaHook.__name__}" - in caplog.messages - ) + assert f"pre_deploy not implimented for {AwsLambdaHook.__name__}" in caplog.messages - def test_pre_destroy(self, caplog: LogCaptureFixture) -> None: + def test_pre_destroy(self, caplog: pytest.LogCaptureFixture) -> None: """Test pre_destroy.""" caplog.set_level(logging.WARNING, logger=MODULE) assert AwsLambdaHook(Mock()).pre_destroy() - assert ( - f"pre_destroy not implimented for {AwsLambdaHook.__name__}" - in caplog.messages - ) + assert f"pre_destroy not implimented for {AwsLambdaHook.__name__}" in caplog.messages def test_project(self) -> None: """Test project.""" @@ -196,9 +175,7 @@ def test___init__(self, cfngin_context: CfnginContext) -> None: def test_build_directory(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test build_directory.""" - mocker.patch.object( - Project, "source_code", Mock(md5_hash="hash", root_directory=tmp_path) - ) + mocker.patch.object(Project, "source_code", Mock(md5_hash="hash", root_directory=tmp_path)) expected = tmp_path / f"{tmp_path.name}.hash" obj = Project(Mock(), Mock(work_dir=tmp_path)) @@ -218,7 +195,7 @@ def test_cache_dir(self, tmp_path: Path) -> None: ) assert Project(args, Mock()).cache_dir == cache_dir - def test_cache_dir_default(self, mocker: MockerFixture, tmp_path: Path) -> None: + def test_cache_dir_default(self, tmp_path: Path) -> None: """Test cache_dir default.""" cache_dir = tmp_path / Project.DEFAULT_CACHE_DIR_NAME cache_dir.mkdir() @@ -261,23 +238,18 @@ def test_compatible_runtimes(self, mocker: MockerFixture, tmp_path: Path) -> Non AwsLambdaHookArgs(bucket_name="", runtime="test", source_code=tmp_path), Mock(), ).compatible_runtimes - assert Project( - Mock(compatible_runtimes=["foobar"]), Mock() - ).compatible_runtimes == ["foobar"] + assert Project(Mock(compatible_runtimes=["foobar"]), Mock()).compatible_runtimes == [ + "foobar" + ] - def test_compatible_runtimes_raise_value_error( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test_compatible_runtimes_raise_value_error(self, mocker: MockerFixture) -> None: """Test compatible_runtimes raise ValueError.""" mocker.patch.object(Project, "runtime", "foobar") - with pytest.raises(ValueError) as excinfo: - assert Project( - Mock(compatible_runtimes=["foo", "bar"]), Mock() - ).compatible_runtimes - assert ( - str(excinfo.value) - == "runtime (foobar) not in compatible runtimes (foo, bar)" - ) + with pytest.raises( + ValueError, + match=r"runtime \(foobar\) not in compatible runtimes \(foo, bar\)", + ): + assert Project(Mock(compatible_runtimes=["foo", "bar"]), Mock()).compatible_runtimes def test_dependency_directory(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test dependency_directory.""" @@ -304,28 +276,26 @@ def test_license(self, tmp_path: Path) -> None: def test_metadata_files(self) -> None: """Test metadata_files.""" result = Project(Mock(), Mock()).metadata_files - assert not result and isinstance(result, tuple) + assert not result + assert isinstance(result, tuple) def test_project_root(self, tmp_path: Path) -> None: """Test project_root.""" config_path = tmp_path / "config.yml" config_path.touch() assert ( - Project( - Mock(source_code=tmp_path), Mock(config_path=config_path) - ).project_root + Project(Mock(source_code=tmp_path), Mock(config_path=config_path)).project_root == tmp_path ) def test_project_root_config_path_is_dir(self, tmp_path: Path) -> None: """Test project_root ctx.config_path is a directory.""" assert ( - Project(Mock(source_code=tmp_path), Mock(config_path=tmp_path)).project_root - == tmp_path + Project(Mock(source_code=tmp_path), Mock(config_path=tmp_path)).project_root == tmp_path ) def test_project_root_config_path_not_parent_of_source_code( - self, caplog: LogCaptureFixture, tmp_path: Path + self, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: """Test project_root ctx.config_path is not a parent of args.source_code.""" caplog.set_level(logging.INFO) @@ -335,14 +305,11 @@ def test_project_root_config_path_not_parent_of_source_code( config_path.touch() src_path = tmp_path / "src" / "lambda_function" assert ( - Project( - Mock(source_code=src_path), Mock(config_path=config_path) - ).project_root + Project(Mock(source_code=src_path), Mock(config_path=config_path)).project_root == src_path ) assert ( - "ignoring project directory; " - "source code located outside of project directory" + "ignoring project directory; source code located outside of project directory" ) in caplog.messages @pytest.mark.parametrize("create_metadata_file", [False, True]) @@ -360,9 +327,9 @@ def test_project_root_config_path_parent_of_source_code( src_path.mkdir(parents=True) if create_metadata_file: (src_path / "test.txt").touch() - assert Project( - Mock(source_code=src_path), Mock(config_path=config_path) - ).project_root == (src_path if create_metadata_file else tmp_path) + assert Project(Mock(source_code=src_path), Mock(config_path=config_path)).project_root == ( + src_path if create_metadata_file else tmp_path + ) def test_project_type(self) -> None: """Test project_type.""" @@ -371,17 +338,13 @@ def test_project_type(self) -> None: def test_runtime(self, mocker: MockerFixture) -> None: """Test runtime.""" - docker = mocker.patch.object( - Project, "docker", Mock(runtime="foo"), create=True - ) + docker = mocker.patch.object(Project, "docker", Mock(runtime="foo"), create=True) assert Project(Mock(runtime=None), Mock()).runtime == docker.runtime def test_runtime_raise_runtime_mismatch_error(self, mocker: MockerFixture) -> None: """Test runtime raise RuntimeMismatchError.""" args = Mock(runtime="bar") - docker = mocker.patch.object( - Project, "docker", Mock(runtime="foo"), create=True - ) + docker = mocker.patch.object(Project, "docker", Mock(runtime="foo"), create=True) with pytest.raises(RuntimeMismatchError) as excinfo: assert not Project(args, Mock()).runtime assert excinfo.value.detected_runtime == docker.runtime @@ -390,12 +353,10 @@ def test_runtime_raise_runtime_mismatch_error(self, mocker: MockerFixture) -> No def test_runtime_raise_value_error(self, mocker: MockerFixture) -> None: """Test runtime raise ValueError.""" mocker.patch.object(Project, "docker", None, create=True) - with pytest.raises(ValueError) as excinfo: + with pytest.raises( + ValueError, match="runtime could not be determined from the build system" + ): assert not Project(Mock(runtime=None), Mock()).runtime - assert ( - str(excinfo.value) - == "runtime could not be determined from the build system" - ) def test_source_code(self, mocker: MockerFixture) -> None: """Test source_code.""" diff --git a/tests/unit/cfngin/hooks/awslambda/test_deployment_package.py b/tests/unit/cfngin/hooks/awslambda/test_deployment_package.py index 8c2c12725..c8dc45784 100644 --- a/tests/unit/cfngin/hooks/awslambda/test_deployment_package.py +++ b/tests/unit/cfngin/hooks/awslambda/test_deployment_package.py @@ -1,19 +1,15 @@ """Test runway.cfngin.hooks.awslambda.deployment_package.""" -# pylint: disable=protected-access,redefined-outer-name,unused-argument -# pylint: disable=too-many-lines from __future__ import annotations import zipfile -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast +from typing import TYPE_CHECKING, Any, cast +from unittest.mock import MagicMock, Mock, PropertyMock, call from urllib.parse import urlencode import igittigitt import pytest from botocore.exceptions import ClientError -from mock import MagicMock, Mock, PropertyMock, call -from typing_extensions import Literal from runway._logging import LogLevels from runway.cfngin.hooks.awslambda.base_classes import Project @@ -37,10 +33,12 @@ from .factories import MockProject if TYPE_CHECKING: + from pathlib import Path + from botocore.stub import Stubber from mypy_boto3_s3.type_defs import PutObjectOutputTypeDef - from pytest import LogCaptureFixture from pytest_mock import MockerFixture + from typing_extensions import Literal from runway.context import CfnginContext @@ -49,7 +47,7 @@ ProjectTypeAlias = Project[AwsLambdaHookArgs] -@pytest.fixture(scope="function") +@pytest.fixture() def project(cfngin_context: CfnginContext, tmp_path: Path) -> ProjectTypeAlias: """Mock project object.""" args = AwsLambdaHookArgs( @@ -77,12 +75,8 @@ def test__build_fix_file_permissions(self, project: ProjectTypeAlias) -> None: obj = DeploymentPackage(project) obj._build_fix_file_permissions(archive_file) - assert ( - file0.external_attr & DeploymentPackage.ZIPFILE_PERMISSION_MASK - ) >> 16 == 0o755 - assert ( - file0.external_attr & DeploymentPackage.ZIPFILE_PERMISSION_MASK - ) >> 16 == 0o755 + assert (file0.external_attr & DeploymentPackage.ZIPFILE_PERMISSION_MASK) >> 16 == 0o755 + assert (file0.external_attr & DeploymentPackage.ZIPFILE_PERMISSION_MASK) >> 16 == 0o755 @pytest.mark.parametrize("usage_type", ["function", "layer"]) def test__build_zip_dependencies( @@ -176,8 +170,7 @@ def test__build_zip_source_code( if usage_type == "layer": mock_insert_layer_dir.assert_has_calls( [ # type: ignore - call(src_file, project.source_code.root_directory) - for src_file in files + call(src_file, project.source_code.root_directory) for src_file in files ] ) archive_file.write.assert_has_calls( @@ -228,19 +221,15 @@ def test_bucket(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: assert obj.bucket == bucket_class.return_value bucket_class.assert_any_call(project.ctx, project.args.bucket_name) - def test_bucket_forbidden( - self, mocker: MockerFixture, project: ProjectTypeAlias - ) -> None: + def test_bucket_forbidden(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: """Test bucket.""" - mocker.patch( - f"{MODULE}.Bucket", return_value=Mock(forbidden=True, not_found=False) - ) + mocker.patch(f"{MODULE}.Bucket", return_value=Mock(forbidden=True, not_found=False)) with pytest.raises(BucketAccessDeniedError): assert DeploymentPackage(project).bucket def test_build( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, project: ProjectTypeAlias, ) -> None: @@ -264,21 +253,15 @@ def _write_zip(package: DeploymentPackage[Any], archive_file: Mock) -> None: mock_build_fix_file_permissions = mocker.patch.object( DeploymentPackage, "_build_fix_file_permissions" ) - mock_del_cached_property = mocker.patch.object( - DeploymentPackage, "_del_cached_property" - ) + mock_del_cached_property = mocker.patch.object(DeploymentPackage, "_del_cached_property") obj = DeploymentPackage(project) assert obj.build() == obj.archive_file - mock_zipfile_class.assert_called_once_with( - obj.archive_file, "w", zipfile.ZIP_DEFLATED - ) + mock_zipfile_class.assert_called_once_with(obj.archive_file, "w", zipfile.ZIP_DEFLATED) mock_zipfile.__enter__.assert_called_once_with() mock_build_zip_dependencies.assert_called_once_with(mock_zipfile) mock_build_fix_file_permissions.assert_called_once_with(mock_zipfile) - mock_del_cached_property.assert_called_once_with( - "code_sha256", "exists", "md5_checksum" - ) + mock_del_cached_property.assert_called_once_with("code_sha256", "exists", "md5_checksum") assert f"building {obj.archive_file.name} ({obj.runtime})..." in caplog.messages def test_build_file_empty_after_build( @@ -288,7 +271,7 @@ def test_build_file_empty_after_build( archive_file = project.build_directory / "foobar.zip" mocker.patch.object(DeploymentPackage, "archive_file", archive_file) - def _write_zip(package: DeploymentPackage[Any], archive_file: Mock) -> None: + def _write_zip(package: DeploymentPackage[Any], archive_file: Mock) -> None: # noqa: ARG001 package.archive_file.touch() mock_build_zip_dependencies = mocker.patch.object( @@ -306,7 +289,7 @@ def _write_zip(package: DeploymentPackage[Any], archive_file: Mock) -> None: def test_build_file_exists( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, project: ProjectTypeAlias, ) -> None: @@ -320,9 +303,7 @@ def test_build_file_exists( obj.archive_file.write_text("test" * 8) assert obj.build() == obj.archive_file mock_zipfile_class.assert_not_called() - assert ( - f"build skipped; {obj.archive_file.name} already exists" in caplog.messages - ) + assert f"build skipped; {obj.archive_file.name} already exists" in caplog.messages def test_build_raise_runtime_mismatch_error( self, mocker: MockerFixture, project: ProjectTypeAlias @@ -348,7 +329,7 @@ def test_build_raise_runtime_mismatch_error( mock_build_zip_source_code.assert_not_called() mock_build_fix_file_permissions.assert_not_called() - @pytest.mark.parametrize("url_encoded", [False, True, False, True]) + @pytest.mark.parametrize("url_encoded", [False, True]) def test_build_tag_set( self, mocker: MockerFixture, @@ -356,16 +337,10 @@ def test_build_tag_set( url_encoded: bool, ) -> None: """Test build_tag_set.""" - code_sha256 = mocker.patch.object( - DeploymentPackage, "code_sha256", "code_sha256" - ) + code_sha256 = mocker.patch.object(DeploymentPackage, "code_sha256", "code_sha256") mocker.patch.object(project, "compatible_runtimes", ["compatible_runtimes"]) - md5_checksum = mocker.patch.object( - DeploymentPackage, "md5_checksum", "md5_checksum" - ) - source_md5_hash = mocker.patch.object( - project.source_code, "md5_hash", "source_code.hash" - ) + md5_checksum = mocker.patch.object(DeploymentPackage, "md5_checksum", "md5_checksum") + source_md5_hash = mocker.patch.object(project.source_code, "md5_hash", "source_code.hash") expected = { **project.ctx.tags, DeploymentPackage.META_TAGS["code_sha256"]: code_sha256, @@ -380,33 +355,20 @@ def test_build_tag_set( urlencode(expected) if url_encoded else expected ) - def test_bucket_not_found( - self, mocker: MockerFixture, project: ProjectTypeAlias - ) -> None: + def test_bucket_not_found(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: """Test bucket.""" - mocker.patch( - f"{MODULE}.Bucket", return_value=Mock(forbidden=False, not_found=True) - ) + mocker.patch(f"{MODULE}.Bucket", return_value=Mock(forbidden=False, not_found=True)) with pytest.raises(BucketNotFoundError): assert DeploymentPackage(project).bucket - def test_code_sha256( - self, mocker: MockerFixture, project: ProjectTypeAlias - ) -> None: + def test_code_sha256(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: """Test code_sha256.""" - archive_file = mocker.patch.object( - DeploymentPackage, "archive_file", "archive_file" - ) + archive_file = mocker.patch.object(DeploymentPackage, "archive_file", "archive_file") file_hash = Mock(digest="digest") mock_b64encode = mocker.patch("base64.b64encode", return_value=b"success") - mock_file_hash_class = mocker.patch( - f"{MODULE}.FileHash", return_value=file_hash - ) + mock_file_hash_class = mocker.patch(f"{MODULE}.FileHash", return_value=file_hash) mock_sha256 = mocker.patch("hashlib.sha256") - assert ( - DeploymentPackage(project).code_sha256 - == mock_b64encode.return_value.decode() - ) + assert DeploymentPackage(project).code_sha256 == mock_b64encode.return_value.decode() mock_file_hash_class.assert_called_once_with(mock_sha256.return_value) file_hash.add_file.assert_called_once_with(archive_file) mock_b64encode.assert_called_once_with(file_hash.digest) @@ -418,9 +380,7 @@ def test_compatible_architectures( mocker.patch.object(project, "compatible_architectures", ["foobar"]) assert DeploymentPackage(project).compatible_architectures == ["foobar"] - def test_compatible_runtimes( - self, mocker: MockerFixture, project: ProjectTypeAlias - ) -> None: + def test_compatible_runtimes(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: """Test compatible_runtimes.""" mocker.patch.object(project, "compatible_runtimes", ["foobar"]) assert DeploymentPackage(project).compatible_runtimes == ["foobar"] @@ -430,9 +390,7 @@ def test_delete( self, mocker: MockerFixture, project: ProjectTypeAlias, should_exist: bool ) -> None: """Test delete.""" - mock_del_cached_property = mocker.patch.object( - DeploymentPackage, "_del_cached_property" - ) + mock_del_cached_property = mocker.patch.object(DeploymentPackage, "_del_cached_property") obj = DeploymentPackage(project) if should_exist: obj.archive_file.touch() @@ -454,9 +412,7 @@ def test_gitignore_filter(self, project: ProjectTypeAlias) -> None: """Test gitignore_filter.""" assert not DeploymentPackage(project).gitignore_filter - @pytest.mark.parametrize( - "exists_in_s3, usage_type", [(False, "function"), (True, "layer")] - ) + @pytest.mark.parametrize("exists_in_s3, usage_type", [(False, "function"), (True, "layer")]) def test_init( self, exists_in_s3: bool, @@ -466,30 +422,24 @@ def test_init( ) -> None: """Test init where runtime always matches.""" s3_obj = Mock(exists=exists_in_s3, runtime=project.runtime) - s3_obj_class = mocker.patch( - f"{MODULE}.DeploymentPackageS3Object", return_value=s3_obj - ) + s3_obj_class = mocker.patch(f"{MODULE}.DeploymentPackageS3Object", return_value=s3_obj) if exists_in_s3: assert DeploymentPackage.init(project, usage_type) == s3_obj else: - assert isinstance( - DeploymentPackage.init(project, usage_type), DeploymentPackage - ) + assert isinstance(DeploymentPackage.init(project, usage_type), DeploymentPackage) s3_obj_class.assert_called_once_with(project, usage_type) def test_init_runtime_change( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, project: ProjectTypeAlias, ) -> None: """Test init where runtime has changed and object exists in S3.""" caplog.set_level(LogLevels.WARNING, logger=MODULE) s3_obj = Mock(exists=True, runtime="change") - s3_obj_class = mocker.patch( - f"{MODULE}.DeploymentPackageS3Object", return_value=s3_obj - ) + s3_obj_class = mocker.patch(f"{MODULE}.DeploymentPackageS3Object", return_value=s3_obj) assert isinstance(DeploymentPackage.init(project), DeploymentPackage) s3_obj_class.assert_called_once_with(project, "function") s3_obj.delete.assert_called_once_with() @@ -527,23 +477,14 @@ def test_license(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None mocker.patch.object(project, "license", "foobar") assert DeploymentPackage(project).license == "foobar" - def test_md5_checksum( - self, mocker: MockerFixture, project: ProjectTypeAlias - ) -> None: + def test_md5_checksum(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: """Test md5_checksum.""" - archive_file = mocker.patch.object( - DeploymentPackage, "archive_file", "archive_file" - ) + archive_file = mocker.patch.object(DeploymentPackage, "archive_file", "archive_file") file_hash = Mock(digest="digest") mock_b64encode = mocker.patch("base64.b64encode", return_value=b"success") - mock_file_hash_class = mocker.patch( - f"{MODULE}.FileHash", return_value=file_hash - ) + mock_file_hash_class = mocker.patch(f"{MODULE}.FileHash", return_value=file_hash) mock_md5 = mocker.patch("hashlib.md5") - assert ( - DeploymentPackage(project).md5_checksum - == mock_b64encode.return_value.decode() - ) + assert DeploymentPackage(project).md5_checksum == mock_b64encode.return_value.decode() mock_file_hash_class.assert_called_once_with(mock_md5.return_value) file_hash.add_file.assert_called_once_with(archive_file) mock_b64encode.assert_called_once_with(file_hash.digest) @@ -560,16 +501,14 @@ def test_md5_checksum( def test_object_key( self, project: ProjectTypeAlias, - object_prefix: Optional[str], + object_prefix: str | None, usage_type: Literal["function", "layer"], ) -> None: """Test object_key.""" project.args.object_prefix = object_prefix obj = DeploymentPackage(project, usage_type) if object_prefix: - expected_prefix = ( - f"awslambda/{usage_type}s/{object_prefix.lstrip('/').rstrip('/')}" - ) + expected_prefix = f"awslambda/{usage_type}s/{object_prefix.lstrip('/').rstrip('/')}" else: expected_prefix = f"awslambda/{usage_type}s" assert obj.object_key == ( @@ -577,15 +516,13 @@ def test_object_key( f"{project.source_code.md5_hash}.zip" ) - @pytest.mark.parametrize( - "response, expected", [({}, None), ({"VersionId": "foo"}, "foo")] - ) + @pytest.mark.parametrize("response, expected", [({}, None), ({"VersionId": "foo"}, "foo")]) def test_object_version_id( self, - expected: Optional[str], + expected: str | None, mocker: MockerFixture, project: ProjectTypeAlias, - response: Dict[str, Any], + response: dict[str, Any], ) -> None: """Test object_version_id.""" mocker.patch.object(DeploymentPackage, "_put_object_response", response) @@ -597,9 +534,7 @@ def test_runtime(self, project: ProjectTypeAlias) -> None: assert DeploymentPackage(project).runtime == project.runtime @pytest.mark.parametrize("build", [False, True]) - def test_upload( - self, build: bool, mocker: MockerFixture, project: ProjectTypeAlias - ) -> None: + def test_upload(self, build: bool, mocker: MockerFixture, project: ProjectTypeAlias) -> None: """Test upload.""" mocker.patch.object( DeploymentPackage, @@ -613,15 +548,11 @@ def test_upload( "build_tag_set", return_value="foo=bar", ) - mock_del_cached_property = mocker.patch.object( - DeploymentPackage, "_del_cached_property" - ) + mock_del_cached_property = mocker.patch.object(DeploymentPackage, "_del_cached_property") mock_guess_type = mocker.patch( "mimetypes.guess_type", return_value=("application/zip", None) ) - md5_checksum = mocker.patch.object( - DeploymentPackage, "md5_checksum", "checksum" - ) + md5_checksum = mocker.patch.object(DeploymentPackage, "md5_checksum", "checksum") obj = DeploymentPackage(project) obj.archive_file.write_text("foobar") @@ -675,7 +606,7 @@ class TestDeploymentPackageS3Object: def test_build_exists( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, project: ProjectTypeAlias, ) -> None: @@ -689,13 +620,9 @@ def test_build_exists( mocker.patch.object(DeploymentPackageS3Object, "exists", True) obj = DeploymentPackageS3Object(project) assert obj.build() == obj.archive_file - assert ( - f"build skipped; {obj.archive_file.name} already exists" in caplog.messages - ) + assert f"build skipped; {obj.archive_file.name} already exists" in caplog.messages - def test_build_not_exists( - self, mocker: MockerFixture, project: ProjectTypeAlias - ) -> None: + def test_build_not_exists(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: """Test build object doesn't exist raises S3ObjectDoesNotExistError.""" mocker.patch.object(DeploymentPackageS3Object, "exists", False) bucket = Bucket(project.ctx, project.args.bucket_name) @@ -706,9 +633,7 @@ def test_build_not_exists( assert excinfo.value.bucket == bucket.name assert excinfo.value.key == obj.object_key - def test_code_sha256( - self, mocker: MockerFixture, project: ProjectTypeAlias - ) -> None: + def test_code_sha256(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: """Test code_sha256.""" expected = "foobar" mocker.patch.object( @@ -734,13 +659,11 @@ def test_code_sha256_raise_required_tag_not_found( assert DeploymentPackageS3Object(project).code_sha256 bucket.format_bucket_path_uri.assert_called_once_with(key=object_key) assert excinfo.value.resource == bucket.format_bucket_path_uri.return_value - assert ( - excinfo.value.tag_key == DeploymentPackageS3Object.META_TAGS["code_sha256"] - ) + assert excinfo.value.tag_key == DeploymentPackageS3Object.META_TAGS["code_sha256"] @pytest.mark.parametrize("value", ["foobar", None, "foo,bar"]) def test_compatible_architectures( - self, mocker: MockerFixture, project: ProjectTypeAlias, value: Optional[str] + self, mocker: MockerFixture, project: ProjectTypeAlias, value: str | None ) -> None: """Test compatible_architectures.""" mocker.patch.object( @@ -758,17 +681,13 @@ def test_compatible_architectures( @pytest.mark.parametrize("value", ["foobar", None, "foo,bar"]) def test_compatible_runtimes( - self, mocker: MockerFixture, project: ProjectTypeAlias, value: Optional[str] + self, mocker: MockerFixture, project: ProjectTypeAlias, value: str | None ) -> None: """Test compatible_runtimes.""" mocker.patch.object( DeploymentPackageS3Object, "object_tags", - ( - {DeploymentPackageS3Object.META_TAGS["compatible_runtimes"]: value} - if value - else {} - ), + ({DeploymentPackageS3Object.META_TAGS["compatible_runtimes"]: value} if value else {}), ) assert DeploymentPackageS3Object(project).compatible_runtimes == ( value.split(", ") if value else None @@ -824,7 +743,7 @@ def test_delete( def test_exists( self, expected: bool, - head: Dict[str, Any], + head: dict[str, Any], project: ProjectTypeAlias, mocker: MockerFixture, ) -> None: @@ -854,7 +773,7 @@ def test_head(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: def test_head_403( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, project: ProjectTypeAlias, ) -> None: @@ -869,9 +788,7 @@ def test_head_403( object_key = mocker.patch.object(DeploymentPackageS3Object, "object_key", "key") stubber = cast("Stubber", project.ctx.add_stubber("s3")) # type: ignore - stubber.add_client_error( - "head_object", http_status_code=403, service_message="Forbidden" - ) + stubber.add_client_error("head_object", http_status_code=403, service_message="Forbidden") with stubber, pytest.raises(ClientError): assert DeploymentPackageS3Object(project).head stubber.assert_no_pending_responses() @@ -882,7 +799,7 @@ def test_head_403( def test_head_404( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, project: ProjectTypeAlias, ) -> None: @@ -897,20 +814,15 @@ def test_head_404( object_key = mocker.patch.object(DeploymentPackageS3Object, "object_key", "key") stubber = cast("Stubber", project.ctx.add_stubber("s3")) # type: ignore - stubber.add_client_error( - "head_object", http_status_code=404, service_message="Not Found" - ) + stubber.add_client_error("head_object", http_status_code=404, service_message="Not Found") with stubber: assert not DeploymentPackageS3Object(project).head stubber.assert_no_pending_responses() - assert ( - f"{bucket.format_bucket_path_uri(key=object_key)} not found" - in caplog.messages - ) + assert f"{bucket.format_bucket_path_uri(key=object_key)} not found" in caplog.messages @pytest.mark.parametrize("value", ["foobar", None]) def test_license( - self, mocker: MockerFixture, project: ProjectTypeAlias, value: Optional[str] + self, mocker: MockerFixture, project: ProjectTypeAlias, value: str | None ) -> None: """Test license.""" mocker.patch.object( @@ -920,9 +832,7 @@ def test_license( ) assert DeploymentPackageS3Object(project).license == (value) - def test_md5_checksum( - self, project: ProjectTypeAlias, mocker: MockerFixture - ) -> None: + def test_md5_checksum(self, project: ProjectTypeAlias, mocker: MockerFixture) -> None: """Test md5_checksum.""" expected = "foobar" mocker.patch.object( @@ -950,9 +860,7 @@ def test_md5_checksum_raise_required_tag_not_found( assert DeploymentPackageS3Object(project).md5_checksum bucket.format_bucket_path_uri.assert_called_once_with(key=object_key) assert excinfo.value.resource == bucket.format_bucket_path_uri.return_value - assert ( - excinfo.value.tag_key == DeploymentPackageS3Object.META_TAGS["md5_checksum"] - ) + assert excinfo.value.tag_key == DeploymentPackageS3Object.META_TAGS["md5_checksum"] @pytest.mark.parametrize( "response, expected", @@ -963,10 +871,10 @@ def test_md5_checksum_raise_required_tag_not_found( ) def test_object_tags( self, - expected: Dict[str, str], + expected: dict[str, str], mocker: MockerFixture, project: ProjectTypeAlias, - response: Dict[str, List[Dict[str, str]]], + response: dict[str, list[dict[str, str]]], ) -> None: """Test object_tags.""" mocker.patch.object( @@ -992,8 +900,8 @@ def test_object_tags( ) def test_object_version_id( self, - expected: Optional[str], - head: Dict[str, str], + expected: str | None, + head: dict[str, str], mocker: MockerFixture, project: ProjectTypeAlias, ) -> None: @@ -1031,9 +939,7 @@ def test_runtime_raise_required_tag_not_found( assert excinfo.value.resource == bucket.format_bucket_path_uri.return_value assert excinfo.value.tag_key == DeploymentPackageS3Object.META_TAGS["runtime"] - def test_update_tags( - self, mocker: MockerFixture, project: ProjectTypeAlias - ) -> None: + def test_update_tags(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: """Test mock_update_tags.""" bucket = Bucket(project.ctx, project.args.bucket_name) mocker.patch.object(DeploymentPackageS3Object, "bucket", bucket) @@ -1060,7 +966,7 @@ def test_update_tags( def test_update_tags_no_change( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, project: ProjectTypeAlias, ) -> None: @@ -1087,7 +993,7 @@ def test_update_tags_no_change( def test_upload_exists( self, build: bool, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, project: ProjectTypeAlias, ) -> None: @@ -1105,9 +1011,7 @@ def test_upload_exists( ) mock_update_tags.assert_called_once_with() - def test_upload_not_exists( - self, mocker: MockerFixture, project: ProjectTypeAlias - ) -> None: + def test_upload_not_exists(self, mocker: MockerFixture, project: ProjectTypeAlias) -> None: """Test upload object doesn't exist raises S3ObjectDoesNotExistError.""" mocker.patch.object(DeploymentPackageS3Object, "exists", False) bucket = Bucket(project.ctx, project.args.bucket_name) diff --git a/tests/unit/cfngin/hooks/awslambda/test_docker.py b/tests/unit/cfngin/hooks/awslambda/test_docker.py index e3b777839..bed13ad8e 100644 --- a/tests/unit/cfngin/hooks/awslambda/test_docker.py +++ b/tests/unit/cfngin/hooks/awslambda/test_docker.py @@ -3,13 +3,13 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING +from unittest.mock import Mock, call import pytest from docker.errors import DockerException, ImageNotFound from docker.models.images import Image from docker.types.services import Mount -from mock import Mock, call from runway.cfngin.hooks.awslambda.constants import ( AWS_SAM_BUILD_IMAGE_PREFIX, @@ -25,7 +25,6 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from runway.context import CfnginContext @@ -37,13 +36,9 @@ class TestDockerDependencyInstaller: """Test DockerDependencyInstaller.""" - def test___init__( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test___init__(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test __init__.""" - from_env = mocker.patch( - f"{MODULE}.DockerClient.from_env", return_value="success" - ) + from_env = mocker.patch(f"{MODULE}.DockerClient.from_env", return_value="success") options = Mock() project = Mock(args=Mock(docker=options)) obj = DockerDependencyInstaller(project, context=cfngin_context) @@ -56,9 +51,7 @@ def test___init__( def test___init___client(self, mocker: MockerFixture) -> None: """Test __init__ passing client.""" client = Mock() - from_env = mocker.patch( - f"{MODULE}.DockerClient.from_env", return_value="success" - ) + from_env = mocker.patch(f"{MODULE}.DockerClient.from_env", return_value="success") obj = DockerDependencyInstaller(Mock(), client=client) from_env.assert_not_called() assert obj.client == client @@ -72,9 +65,7 @@ def test_bind_mounts(self) -> None: ) obj = DockerDependencyInstaller(project, client=Mock()) assert obj.bind_mounts == [ - Mount( - target="/var/task/lambda", source="dependency_directory", type="bind" - ), + Mount(target="/var/task/lambda", source="dependency_directory", type="bind"), Mount(target="/var/task/project", source="project_root", type="bind"), ] @@ -87,22 +78,18 @@ def test_bind_mounts_cache_dir(self) -> None: ) obj = DockerDependencyInstaller(project, client=Mock()) assert obj.bind_mounts == [ - Mount( - target="/var/task/lambda", source="dependency_directory", type="bind" - ), + Mount(target="/var/task/lambda", source="dependency_directory", type="bind"), Mount(target="/var/task/project", source="project_root", type="bind"), Mount(target="/var/task/cache_dir", source="cache_dir", type="bind"), ] - @pytest.mark.parametrize( - "name, pull, tag", [("foo", False, "bar"), (None, True, None)] - ) + @pytest.mark.parametrize("name, pull, tag", [("foo", False, "bar"), (None, True, None)]) def test_build_image( self, mocker: MockerFixture, - name: Optional[str], + name: str | None, pull: bool, - tag: Optional[str], + tag: str | None, tmp_path: Path, ) -> None: """Test build_image.""" @@ -134,9 +121,7 @@ def test_build_image( pull=pull, ) mock_log_docker_msg_dict.assert_called_once_with(logs) - image.tag.assert_called_once_with( - name or DEFAULT_IMAGE_NAME, tag=tag or DEFAULT_IMAGE_TAG - ) + image.tag.assert_called_once_with(name or DEFAULT_IMAGE_NAME, tag=tag or DEFAULT_IMAGE_TAG) image.reload.assert_called_once_with() def test_build_image_raise_docker_exception(self, tmp_path: Path) -> None: @@ -234,10 +219,10 @@ def test_from_project_raise_docker_error(self, mocker: MockerFixture) -> None: ) def test_image_build_image( self, - image: Optional[str], + image: str | None, mocker: MockerFixture, - name: Optional[str], - runtime: Optional[str], + name: str | None, + runtime: str | None, ) -> None: """Test image build image.""" project = Mock(args=Mock(docker=Mock(file="foo", image=image), runtime=runtime)) @@ -255,15 +240,13 @@ def test_image_build_image( ) def test_image_pull_image( self, - image: Optional[str], + image: str | None, mocker: MockerFixture, pull: bool, - runtime: Optional[str], + runtime: str | None, ) -> None: """Test image pull image.""" - project = Mock( - args=Mock(docker=Mock(file=None, image=image, pull=pull), runtime=runtime) - ) + project = Mock(args=Mock(docker=Mock(file=None, image=image, pull=pull), runtime=runtime)) pull_image = mocker.patch.object( DockerDependencyInstaller, "pull_image", return_value="success" ) @@ -281,17 +264,14 @@ def test_image_pull_image( def test_image_raise_value_error(self, mocker: MockerFixture) -> None: """Test image raise ValueError.""" - project = Mock( - args=Mock(docker=Mock(file=None, image=None, pull=True), runtime=None) - ) + project = Mock(args=Mock(docker=Mock(file=None, image=None, pull=True), runtime=None)) build_image = mocker.patch.object(DockerDependencyInstaller, "build_image") pull_image = mocker.patch.object(DockerDependencyInstaller, "pull_image") obj = DockerDependencyInstaller(project, client=Mock()) - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="docker.file, docker.image, or runtime is required"): assert not obj.image build_image.assert_not_called() pull_image.assert_not_called() - assert str(excinfo.value) == "docker.file, docker.image, or runtime is required" def test_install(self, mocker: MockerFixture) -> None: """Test install.""" @@ -310,7 +290,7 @@ def test_install(self, mocker: MockerFixture) -> None: obj = DockerDependencyInstaller(Mock(), client=Mock()) assert not obj.install() run_command.assert_has_calls( - [ # type: ignore + [ call(pre_install_commands[0]), call(install_commands[0]), call(post_install_commands[0]), @@ -320,7 +300,8 @@ def test_install(self, mocker: MockerFixture) -> None: def test_install_commands(self) -> None: """Test install_commands.""" obj = DockerDependencyInstaller(Mock(), client=Mock()) - assert not obj.install_commands and isinstance(obj.install_commands, list) + assert not obj.install_commands + assert isinstance(obj.install_commands, list) @pytest.mark.parametrize("level", [logging.INFO, logging.DEBUG]) def test_log_docker_msg_bytes(self, level: int, mocker: MockerFixture) -> None: @@ -328,9 +309,7 @@ def test_log_docker_msg_bytes(self, level: int, mocker: MockerFixture) -> None: msg = "foobar" obj = DockerDependencyInstaller(Mock(), client=Mock()) docker_logger = mocker.patch.object(obj, "_docker_logger") - assert obj.log_docker_msg_bytes(iter([f"{msg}\n".encode()]), level=level) == [ - msg - ] + assert obj.log_docker_msg_bytes(iter([f"{msg}\n".encode()]), level=level) == [msg] docker_logger.log.assert_called_once_with(level, msg) @pytest.mark.parametrize("level", [logging.INFO, logging.DEBUG]) @@ -352,12 +331,12 @@ def test_log_docker_msg_dict(self, level: int, mocker: MockerFixture) -> None: ) == msgs[:-1] ) - docker_logger.log.assert_has_calls([call(level, m) for m in msgs[:-1]]) # type: ignore + docker_logger.log.assert_has_calls([call(level, m) for m in msgs[:-1]]) def test_post_install_commands( self, mocker: MockerFixture, - platform_linux: None, # pylint: disable=unused-argument + platform_linux: None, # noqa: ARG002 ) -> None: """Test post_install_commands.""" # these methods don't exist on windows so they need to be mocked @@ -373,15 +352,13 @@ def test_post_install_commands( def test_post_install_commands_cache_dir( self, mocker: MockerFixture, - platform_linux: None, # pylint: disable=unused-argument + platform_linux: None, # noqa: ARG002 ) -> None: """Test post_install_commands with cache_dir.""" # these methods don't exist on windows so they need to be mocked getgid = mocker.patch(f"{MODULE}.os.getgid", create=True, return_value=3) getuid = mocker.patch(f"{MODULE}.os.getuid", create=True, return_value=4) - obj = DockerDependencyInstaller( - Mock(args=Mock(docker=Mock(extra_files=[]))), client=Mock() - ) + obj = DockerDependencyInstaller(Mock(args=Mock(docker=Mock(extra_files=[]))), client=Mock()) assert obj.post_install_commands == [ f"chown -R {getuid.return_value}:{getgid.return_value} /var/task/lambda", f"chown -R {getuid.return_value}:{getgid.return_value} /var/task/cache_dir", @@ -390,7 +367,7 @@ def test_post_install_commands_cache_dir( def test_post_install_commands_extra_files( self, mocker: MockerFixture, - platform_linux: None, # pylint: disable=unused-argument + platform_linux: None, # noqa: ARG002 ) -> None: """Test post_install_commands with extra_files.""" # these methods don't exist on windows so they need to be mocked @@ -406,9 +383,7 @@ def test_post_install_commands_extra_files( f"chown -R {getuid.return_value}:{getgid.return_value} /var/task/lambda", ] - def test_post_install_commands_windows( - self, platform_windows: None # pylint: disable=unused-argument - ) -> None: + def test_post_install_commands_windows(self, platform_windows: None) -> None: # noqa: ARG002 """Test post_install_commands Windows.""" obj = DockerDependencyInstaller( Mock(args=Mock(docker=Mock(extra_files=[])), cache_dir=False), client=Mock() @@ -433,16 +408,15 @@ def test_pre_install_commands_cache_dir(self) -> None: [(False, False), (False, True), (True, True), (True, False)], ) def test_pull_image( - self, caplog: LogCaptureFixture, exists_locally: bool, force: bool + self, caplog: pytest.LogCaptureFixture, exists_locally: bool, force: bool ) -> None: """Test pull_image.""" caplog.set_level(logging.INFO, logger=MODULE) name = "foo:latest" image = Mock(spec=Image, id=FAKE_IMAGE_ID) - if exists_locally: - mock_get = Mock(return_value=image) - else: - mock_get = Mock(side_effect=ImageNotFound("test")) + mock_get = ( + Mock(return_value=image) if exists_locally else Mock(side_effect=ImageNotFound("test")) + ) mock_pull = Mock(return_value=image) assert ( @@ -461,14 +435,10 @@ def test_pull_image( mock_pull.assert_not_called() else: mock_pull.assert_called_once_with(repository=name) - assert caplog.messages == [ - f"image not found; pulling docker image {name}..." - ] + assert caplog.messages == [f"image not found; pulling docker image {name}..."] @pytest.mark.parametrize("command, level", [("foo", logging.DEBUG), ("bar", None)]) - def test_run_command( - self, command: str, level: Optional[int], mocker: MockerFixture - ) -> None: + def test_run_command(self, command: str, level: int | None, mocker: MockerFixture) -> None: """Test run_command.""" container = Mock( logs=Mock(return_value="log-stream"), @@ -478,9 +448,7 @@ def test_run_command( mock_log_docker_msg_bytes = mocker.patch.object( DockerDependencyInstaller, "log_docker_msg_bytes", return_value=["logs"] ) - bind_mounts = mocker.patch.object( - DockerDependencyInstaller, "bind_mounts", ["mount"] - ) + bind_mounts = mocker.patch.object(DockerDependencyInstaller, "bind_mounts", ["mount"]) environment_variables = mocker.patch.object( DockerDependencyInstaller, "environment_variables", {"foo": "bar"} ) @@ -510,9 +478,7 @@ def test_run_command( container.wait.assert_called_once_with() container.remove.assert_called_once_with(force=True) - def test_run_command_container_nonzero_exit_code( - self, mocker: MockerFixture - ) -> None: + def test_run_command_container_nonzero_exit_code(self, mocker: MockerFixture) -> None: """Test run_command container non-zero exit code.""" error_msg = "error msg" container = Mock( @@ -524,9 +490,7 @@ def test_run_command_container_nonzero_exit_code( DockerDependencyInstaller, "log_docker_msg_bytes", return_value=["logs"] ) mocker.patch.object(DockerDependencyInstaller, "bind_mounts", ["mount"]) - mocker.patch.object( - DockerDependencyInstaller, "environment_variables", {"foo": "bar"} - ) + mocker.patch.object(DockerDependencyInstaller, "environment_variables", {"foo": "bar"}) mocker.patch.object(DockerDependencyInstaller, "image", "image") with pytest.raises(DockerExecFailedError) as excinfo: DockerDependencyInstaller( @@ -552,9 +516,7 @@ def test_run_command_container_start_error(self, mocker: MockerFixture) -> None: DockerDependencyInstaller, "log_docker_msg_bytes", return_value=["logs"] ) mocker.patch.object(DockerDependencyInstaller, "bind_mounts", ["mount"]) - mocker.patch.object( - DockerDependencyInstaller, "environment_variables", {"foo": "bar"} - ) + mocker.patch.object(DockerDependencyInstaller, "environment_variables", {"foo": "bar"}) mocker.patch.object(DockerDependencyInstaller, "image", "image") with pytest.raises(DockerException): diff --git a/tests/unit/cfngin/hooks/awslambda/test_source_code.py b/tests/unit/cfngin/hooks/awslambda/test_source_code.py index fe3825061..289cdb2bb 100644 --- a/tests/unit/cfngin/hooks/awslambda/test_source_code.py +++ b/tests/unit/cfngin/hooks/awslambda/test_source_code.py @@ -1,13 +1,12 @@ """Test runway.cfngin.hooks.awslambda.source_code.""" -# pylint: disable=protected-access, unnecessary-dunder-call from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING +from unittest.mock import Mock, call import pytest -from mock import Mock, call from runway.cfngin.hooks.awslambda.source_code import SourceCode @@ -100,9 +99,7 @@ def test___iter__(self, tmp_path: Path) -> None: ) == 1 ) - gitignore_filter.match.assert_has_calls( - [call(file0), call(file1)], any_order=True - ) + gitignore_filter.match.assert_has_calls([call(file0), call(file1)], any_order=True) def test___str__(self, tmp_path: Path) -> None: """Test __str__.""" @@ -117,20 +114,14 @@ def test_add_filter_rule(self, tmp_path: Path) -> None: gitignore_filter = Mock() pattern = "foobar/" src_path = tmp_path / "src" - obj = SourceCode( - src_path, gitignore_filter=gitignore_filter, project_root=tmp_path - ) + obj = SourceCode(src_path, gitignore_filter=gitignore_filter, project_root=tmp_path) assert not obj.add_filter_rule(pattern) - gitignore_filter.add_rule.assert_called_once_with( - pattern=pattern, base_path=src_path - ) + gitignore_filter.add_rule.assert_called_once_with(pattern=pattern, base_path=src_path) def test_md5_hash(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test md5_hash.""" file_hash = Mock(hexdigest="success") - mock_file_hash_class = mocker.patch( - f"{MODULE}.FileHash", return_value=file_hash - ) + mock_file_hash_class = mocker.patch(f"{MODULE}.FileHash", return_value=file_hash) mock_md5 = mocker.patch("hashlib.md5") src_path = tmp_path / "src" src_path.mkdir() diff --git a/tests/unit/cfngin/hooks/docker/image/test_build.py b/tests/unit/cfngin/hooks/docker/image/test__build.py similarity index 75% rename from tests/unit/cfngin/hooks/docker/image/test_build.py rename to tests/unit/cfngin/hooks/docker/image/test__build.py index 95242a65b..1b04cde42 100644 --- a/tests/unit/cfngin/hooks/docker/image/test_build.py +++ b/tests/unit/cfngin/hooks/docker/image/test__build.py @@ -1,15 +1,12 @@ """Test runway.cfngin.hooks.docker.image._build.""" -# pylint: disable=protected-access -# pyright: basic from __future__ import annotations -from pathlib import Path -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING +from unittest.mock import MagicMock import pytest from docker.models.images import Image -from mock import MagicMock from pydantic import ValidationError from runway.cfngin.hooks.docker.data_models import ( @@ -27,15 +24,17 @@ from .....mock_docker.fake_api import FAKE_IMAGE_ID if TYPE_CHECKING: + from pathlib import Path + from pytest_mock import MockerFixture - from .....factories import MockCFNginContext + from .....factories import MockCfnginContext MODULE = "runway.cfngin.hooks.docker.image._build" -@pytest.fixture(scope="function") +@pytest.fixture() def tmp_dockerfile(cd_tmp_path: Path) -> Path: """Create temporary Dockerfile.""" dockerfile = cd_tmp_path / "Dockerfile" @@ -43,20 +42,14 @@ def tmp_dockerfile(cd_tmp_path: Path) -> Path: return dockerfile -def test_build( - cfngin_context: MockCFNginContext, mocker: MockerFixture, tmp_path: Path -) -> None: +def test_build(cfngin_context: MockCfnginContext, mocker: MockerFixture, tmp_path: Path) -> None: """Test build.""" (tmp_path / "Dockerfile").touch() - mock_image = MagicMock( - spec=Image, id=FAKE_IMAGE_ID, tags=MagicMock(return_value=["latest"]) - ) + mock_image = MagicMock(spec=Image, id=FAKE_IMAGE_ID, tags=MagicMock(return_value=["latest"])) mock_logs = [{"stream": "log message\n"}, {"not-stream": "no log"}] - mock_client = MagicMock( - images=MagicMock(build=MagicMock(return_value=(mock_image, mock_logs))) - ) + mock_client = MagicMock(images=MagicMock(build=MagicMock(return_value=(mock_image, mock_logs)))) args = ImageBuildArgs(path=tmp_path) - mocker.patch.object(ImageBuildArgs, "parse_obj", return_value=args) + mocker.patch.object(ImageBuildArgs, "model_validate", return_value=args) mocker.patch.object(DockerHookData, "client", mock_client) docker_hook_data = DockerHookData() mock_from_cfngin_context = mocker.patch.object( @@ -66,10 +59,10 @@ def test_build( DockerHookData, "update_context", return_value=docker_hook_data ) cfngin_context.hook_data["docker"] = docker_hook_data - assert build(context=cfngin_context, **args.dict()) == docker_hook_data + assert build(context=cfngin_context, **args.model_dump()) == docker_hook_data mock_from_cfngin_context.assert_called_once_with(cfngin_context) mock_client.images.build.assert_called_once_with( - path=str(args.path), **args.docker.dict() + path=str(args.path), **args.docker.model_dump() ) mock_image.tag.assert_called_once_with(None, tag="latest") mock_image.reload.assert_called_once() @@ -84,7 +77,8 @@ class TestDockerImageBuildApiOptions: def test_field_defaults(self) -> None: """Test field defaults.""" obj = DockerImageBuildApiOptions() - assert not obj.buildargs and isinstance(obj.buildargs, dict) + assert not obj.buildargs + assert isinstance(obj.buildargs, dict) assert obj.custom_context is False assert not obj.extra_hosts assert obj.forcerm is False @@ -111,14 +105,14 @@ class TestImageBuildArgs: ) def test__set_docker_dict( self, - expected: Optional[str], - repo: Optional[str], - tag: Optional[str], + expected: str | None, + repo: str | None, + tag: str | None, tmp_path: Path, ) -> None: """Test _set_docker.""" assert ( - ImageBuildArgs.parse_obj( + ImageBuildArgs.model_validate( { "docker": {"tag": tag} if tag else {}, "path": tmp_path, @@ -134,9 +128,9 @@ def test__set_docker_dict( ) def test__set_docker_model( self, - expected: Optional[str], - repo: Optional[str], - tag: Optional[str], + expected: str | None, + repo: str | None, + tag: str | None, tmp_path: Path, ) -> None: """Test _set_docker.""" @@ -157,7 +151,7 @@ def test__set_ecr_repo_from_dict(self, tmp_path: Path) -> None: "registry_alias": "bar", "aws_region": "us-west-2", } - obj = ImageBuildArgs.parse_obj({"path": tmp_path, "ecr_repo": args}) + obj = ImageBuildArgs.model_validate({"path": tmp_path, "ecr_repo": args}) assert obj.ecr_repo assert obj.ecr_repo.name == args["repo_name"] assert obj.ecr_repo.registry.account_id == args["account_id"] @@ -171,20 +165,15 @@ def test__set_repo(self, tmp_path: Path) -> None: def test__set_repo_ecr(self, tmp_path: Path) -> None: """Test _set_repo ECR.""" repo = ElasticContainerRegistryRepository( - repo_name="test", - registry=ElasticContainerRegistry( - account_id="123456789012", aws_region="us-east-1" - ), + name="test", + registry=ElasticContainerRegistry(account_id="123456789012", aws_region="us-east-1"), ) assert ImageBuildArgs(path=tmp_path, ecr_repo=repo).repo == repo.fqn def test__validate_dockerfile_raise_value_error(self, tmp_path: Path) -> None: """Test _validate_dockerfile raise ValueError.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises( + ValidationError, + match="dockerfile\n Value error, Dockerfile does not exist at path provided", + ): assert ImageBuildArgs(dockerfile="invalid", path=tmp_path, repo="something") - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("dockerfile",) - assert errors[0]["msg"].startswith( - "Dockerfile does not exist at path provided: " - ) diff --git a/tests/unit/cfngin/hooks/docker/image/test_push.py b/tests/unit/cfngin/hooks/docker/image/test__push.py similarity index 85% rename from tests/unit/cfngin/hooks/docker/image/test_push.py rename to tests/unit/cfngin/hooks/docker/image/test__push.py index 00bd26ede..bca932b0f 100644 --- a/tests/unit/cfngin/hooks/docker/image/test_push.py +++ b/tests/unit/cfngin/hooks/docker/image/test__push.py @@ -1,13 +1,12 @@ """Test runway.cfngin.hooks.docker.image._push.""" -# pylint: disable=no-member -# pyright: basic, reportFunctionMemberAccess=none +# pyright: reportFunctionMemberAccess=none from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import call from docker.models.images import Image -from mock import call from runway.cfngin.hooks.docker.data_models import ( DockerImage, @@ -22,19 +21,19 @@ from docker import DockerClient from pytest_mock import MockerFixture - from .....factories import MockCFNginContext + from .....factories import MockCfnginContext MODULE = "runway.cfngin.hooks.docker.image._push" def test_push( - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, mock_docker_client: DockerClient, mocker: MockerFixture, ) -> None: """Test push.""" args = ImagePushArgs(repo="dkr.test.com/image", tags=["latest", "oldest"]) - mocker.patch.object(ImagePushArgs, "parse_obj", return_value=args) + mocker.patch.object(ImagePushArgs, "model_validate", return_value=args) mocker.patch.object(DockerHookData, "client", mock_docker_client) docker_hook_data = DockerHookData() mock_from_cfngin_context = mocker.patch.object( @@ -44,7 +43,7 @@ def test_push( DockerHookData, "update_context", return_value=docker_hook_data ) cfngin_context.hook_data["docker"] = docker_hook_data - assert push(context=cfngin_context, **args.dict()) == docker_hook_data + assert push(context=cfngin_context, **args.model_dump()) == docker_hook_data mock_from_cfngin_context.assert_called_once_with(cfngin_context) docker_hook_data.client.api.push.assert_has_calls( [call(args.repo, tag=args.tags[0]), call(args.repo, tag=args.tags[1])] @@ -63,7 +62,7 @@ def test__set_ecr_repo_from_dict(self) -> None: "registry_alias": "bar", "aws_region": "us-west-2", } - obj = ImagePushArgs.parse_obj({"ecr_repo": args}) + obj = ImagePushArgs.model_validate({"ecr_repo": args}) assert obj.ecr_repo assert obj.ecr_repo.name == args["repo_name"] assert obj.ecr_repo.registry.account_id == args["account_id"] @@ -78,10 +77,8 @@ def test__set_repo(self) -> None: def test__set_repo_ecr(self) -> None: """Test _set_repo ECR.""" repo = ElasticContainerRegistryRepository( - repo_name="test", - registry=ElasticContainerRegistry( - account_id="123456789012", aws_region="us-east-1" - ), + name="test", + registry=ElasticContainerRegistry(account_id="123456789012", aws_region="us-east-1"), ) assert ImagePushArgs(ecr_repo=repo).repo == repo.fqn diff --git a/tests/unit/cfngin/hooks/docker/image/test_remove.py b/tests/unit/cfngin/hooks/docker/image/test__remove.py similarity index 81% rename from tests/unit/cfngin/hooks/docker/image/test_remove.py rename to tests/unit/cfngin/hooks/docker/image/test__remove.py index fc4419d33..74428ccbb 100644 --- a/tests/unit/cfngin/hooks/docker/image/test_remove.py +++ b/tests/unit/cfngin/hooks/docker/image/test__remove.py @@ -1,13 +1,12 @@ """Test runway.cfngin.hooks.docker.image._remove.""" -# pyright: basic, reportFunctionMemberAccess=none from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import call from docker.errors import ImageNotFound from docker.models.images import Image -from mock import call from runway.cfngin.hooks.docker.data_models import ( DockerImage, @@ -22,13 +21,13 @@ from docker import DockerClient from pytest_mock import MockerFixture - from .....factories import MockCFNginContext + from .....factories import MockCfnginContext MODULE = "runway.cfngin.hooks.docker.image._remove" def test_remove( - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, mock_docker_client: DockerClient, mocker: MockerFixture, ) -> None: @@ -37,7 +36,7 @@ def test_remove( tags = ["latest", "oldest"] image = DockerImage(image=Image({"RepoTags": [f"{repo}:{tag}" for tag in tags]})) args = ImageRemoveArgs(force=True, image=image, tags=["latest", "oldest"]) - mocker.patch.object(ImageRemoveArgs, "parse_obj", return_value=args) + mocker.patch.object(ImageRemoveArgs, "model_validate", return_value=args) mocker.patch.object(DockerHookData, "client", mock_docker_client) docker_hook_data = DockerHookData() docker_hook_data.image = image @@ -54,28 +53,23 @@ def test_remove( ) mock_from_cfngin_context.assert_called_once_with(cfngin_context) docker_hook_data.client.api.remove_image.assert_has_calls( # type: ignore - [ - call(force=True, image=f"{args.repo}:{tag}", noprune=False) - for tag in args.tags - ] + [call(force=True, image=f"{args.repo}:{tag}", noprune=False) for tag in args.tags] ) assert docker_hook_data.image is None mock_update_context.assert_called_once_with(cfngin_context) def test_remove_image_not_found( - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, mock_docker_client: DockerClient, mocker: MockerFixture, ) -> None: """Test remove ImageNotFound.""" args = ImageRemoveArgs(repo="dkr.test.com/image", tags=["latest"]) - mocker.patch.object(ImageRemoveArgs, "parse_obj", return_value=args) + mocker.patch.object(ImageRemoveArgs, "model_validate", return_value=args) mocker.patch.object(DockerHookData, "client", mock_docker_client) docker_hook_data = DockerHookData() - mocker.patch.object( - DockerHookData, "from_cfngin_context", return_value=docker_hook_data - ) + mocker.patch.object(DockerHookData, "from_cfngin_context", return_value=docker_hook_data) mock_update_context = mocker.patch.object( DockerHookData, "update_context", return_value=docker_hook_data ) @@ -83,13 +77,9 @@ def test_remove_image_not_found( docker_hook_data.client.api.remove_image.side_effect = ImageNotFound( # type: ignore f"{args.repo}:latest" ) - assert remove(context=cfngin_context, **args.dict()) == docker_hook_data - # pylint: disable=no-member + assert remove(context=cfngin_context, **args.model_dump()) == docker_hook_data docker_hook_data.client.api.remove_image.assert_has_calls( # type: ignore - [ - call(force=False, image=f"{args.repo}:{tag}", noprune=False) - for tag in args.tags - ] + [call(force=False, image=f"{args.repo}:{tag}", noprune=False) for tag in args.tags] ) mock_update_context.assert_called_once_with(cfngin_context) @@ -105,7 +95,7 @@ def test__set_ecr_repo_from_dict(self) -> None: "registry_alias": "bar", "aws_region": "us-west-2", } - obj = ImageRemoveArgs.parse_obj({"ecr_repo": args}) + obj = ImageRemoveArgs.model_validate({"ecr_repo": args}) assert obj.ecr_repo assert obj.ecr_repo.name == args["repo_name"] assert obj.ecr_repo.registry.account_id == args["account_id"] @@ -120,10 +110,8 @@ def test__set_repo(self) -> None: def test__set_repo_ecr(self) -> None: """Test _set_repo ECR.""" repo = ElasticContainerRegistryRepository( - repo_name="test", - registry=ElasticContainerRegistry( - account_id="123456789012", aws_region="us-east-1" - ), + name="test", + registry=ElasticContainerRegistry(account_id="123456789012", aws_region="us-east-1"), ) assert ImageRemoveArgs(ecr_repo=repo).repo == repo.fqn diff --git a/tests/unit/cfngin/hooks/docker/test_login.py b/tests/unit/cfngin/hooks/docker/test__login.py similarity index 71% rename from tests/unit/cfngin/hooks/docker/test_login.py rename to tests/unit/cfngin/hooks/docker/test__login.py index debd92446..23ba5e2df 100644 --- a/tests/unit/cfngin/hooks/docker/test_login.py +++ b/tests/unit/cfngin/hooks/docker/test__login.py @@ -1,10 +1,9 @@ """Test runway.cfngin.hooks.docker._login.""" -# pyright: basic from __future__ import annotations from copy import deepcopy -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import pytest @@ -17,19 +16,19 @@ from docker import DockerClient from pytest_mock import MockerFixture - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext MODULE = "runway.cfngin.hooks.docker._login" def test_login( - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, mock_docker_client: DockerClient, mocker: MockerFixture, ) -> None: """Test login.""" args = LoginArgs(password="p@ssword", registry="dkr.test.com", username="test-user") - mocker.patch.object(LoginArgs, "parse_obj", return_value=args) + mocker.patch.object(LoginArgs, "model_validate", return_value=args) mock_login = mocker.patch.object(mock_docker_client, "login") mocker.patch.object(DockerHookData, "client", mock_docker_client) docker_hook_data = DockerHookData() @@ -40,9 +39,9 @@ def test_login( DockerHookData, "update_context", return_value=docker_hook_data ) cfngin_context.hook_data["docker"] = docker_hook_data - assert login(context=cfngin_context, **args.dict()) == docker_hook_data + assert login(context=cfngin_context, **args.model_dump()) == docker_hook_data mock_from_cfngin_context.assert_called_once_with(cfngin_context) - mock_login.assert_called_once_with(**args.dict()) + mock_login.assert_called_once_with(**args.model_dump()) mock_update_context.assert_called_once_with(cfngin_context) @@ -52,16 +51,16 @@ class TestLoginArgs: def test__set_ecr(self, mocker: MockerFixture) -> None: """Test _set_ecr.""" expected = ElasticContainerRegistry(alias="foobar") - mock_parse_obj = mocker.patch.object( - ElasticContainerRegistry, "parse_obj", return_value=expected + mock_model_validate = mocker.patch.object( + ElasticContainerRegistry, "model_validate", return_value=expected ) assert ( - LoginArgs.parse_obj( - {"ecr": expected.dict(), "password": "", "username": ""} + LoginArgs.model_validate( + {"ecr": expected.model_dump(), "password": "", "username": ""} ).ecr == expected ) - mock_parse_obj.assert_called_once_with({"context": None, **expected.dict()}) + mock_model_validate.assert_called_once_with({"context": None, **expected.model_dump()}) @pytest.mark.parametrize( "ecr, registry, expected", @@ -72,28 +71,23 @@ def test__set_ecr(self, mocker: MockerFixture) -> None: ( ElasticContainerRegistry(alias="foobar"), None, - ElasticContainerRegistry.PUBLIC_URI_TEMPLATE.format( - registry_alias="foobar" - ), + ElasticContainerRegistry.PUBLIC_URI_TEMPLATE.format(registry_alias="foobar"), ), ], ) def test__set_registry( self, - ecr: Optional[ElasticContainerRegistry], - expected: Optional[str], - registry: Optional[str], + ecr: ElasticContainerRegistry | None, + expected: str | None, + registry: str | None, ) -> None: """Test _set_registry.""" - assert ( - LoginArgs(ecr=ecr, password="", registry=registry, username="").registry - == expected - ) + assert LoginArgs(ecr=ecr, password="", registry=registry, username="").registry == expected def test_field_defaults(self) -> None: """Test field defaults.""" args = {"password": "p@ssword", "username": "test-user"} - obj = LoginArgs.parse_obj(deepcopy(args)) + obj = LoginArgs.model_validate(deepcopy(args)) assert not obj.dockercfg_path assert not obj.ecr assert not obj.email diff --git a/tests/unit/cfngin/hooks/docker/test_data_models.py b/tests/unit/cfngin/hooks/docker/test_data_models.py index 5a2d44c33..1ed2fc731 100644 --- a/tests/unit/cfngin/hooks/docker/test_data_models.py +++ b/tests/unit/cfngin/hooks/docker/test_data_models.py @@ -1,14 +1,12 @@ """Test runway.cfngin.hooks.docker.data_models.""" -# pylint: disable=protected-access,redefined-outer-name -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import MagicMock import pytest from docker.models.images import Image -from mock import MagicMock from pydantic import ValidationError from runway.cfngin.hooks.docker.data_models import ( @@ -19,7 +17,7 @@ from runway.utils import MutableMap if TYPE_CHECKING: - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext MODULE = "runway.cfngin.hooks.docker.data_models" MOCK_IMAGE_REPO = "dkr.test.com/image" @@ -31,7 +29,7 @@ } -@pytest.fixture(scope="function") +@pytest.fixture() def mock_image() -> MagicMock: """Return a mock docker.models.images.Image.""" return MagicMock(spec=Image, **MOCK_IMAGE_PROPS) @@ -71,9 +69,7 @@ class TestElasticContainerRegistry: def test_fqn_private(self) -> None: """Test fqn private.""" - obj = ElasticContainerRegistry( - account_id="123456789012", aws_region="us-east-1" - ) + obj = ElasticContainerRegistry(account_id="123456789012", aws_region="us-east-1") assert obj.fqn == "123456789012.dkr.ecr.us-east-1.amazonaws.com/" def test_fqn_public(self) -> None: @@ -81,7 +77,7 @@ def test_fqn_public(self) -> None: obj = ElasticContainerRegistry(alias="test") assert obj.fqn == "public.ecr.aws/test/" - def test_init_default(self, cfngin_context: MockCFNginContext) -> None: + def test_init_default(self, cfngin_context: MockCfnginContext) -> None: """Test init default values.""" account_id = "123456789012" sts_stubber = cfngin_context.add_stubber("sts") @@ -95,7 +91,7 @@ def test_init_default(self, cfngin_context: MockCFNginContext) -> None: ) with sts_stubber: - obj = ElasticContainerRegistry(context=cfngin_context) + obj = ElasticContainerRegistry.model_validate({"context": cfngin_context}) sts_stubber.assert_no_pending_responses() assert obj.account_id == account_id assert obj.alias is None @@ -104,12 +100,8 @@ def test_init_default(self, cfngin_context: MockCFNginContext) -> None: def test_init_no_context(self) -> None: """Test init with no context.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises(ValidationError, match="context is required to resolve values"): ElasticContainerRegistry() - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("__root__",) - assert errors[0]["msg"] == "context is required to resolve values" def test_init_private(self) -> None: """Test init private.""" @@ -133,15 +125,15 @@ def test_init_public(self) -> None: class TestElasticContainerRegistryRepository: """Test runway.cfngin.hooks.docker._data_models.ElasticContainerRegistryRepository.""" - def test_fqn(self, cfngin_context: MockCFNginContext) -> None: + def test_fqn(self, cfngin_context: MockCfnginContext) -> None: """Test init private.""" account_id = "123456789012" region = "us-east-1" obj = ElasticContainerRegistryRepository( - repo_name="something", - registry=ElasticContainerRegistry( - account_id=account_id, aws_region=region, context=cfngin_context + name="something", + registry=ElasticContainerRegistry.model_validate( + {"account_id": account_id, "aws_region": region, "context": cfngin_context} ), ) assert obj.fqn == f"{obj.registry.fqn}{obj.name}" diff --git a/tests/unit/cfngin/hooks/docker/test_hook_data.py b/tests/unit/cfngin/hooks/docker/test_hook_data.py index 6ce162b4c..49e3613c3 100644 --- a/tests/unit/cfngin/hooks/docker/test_hook_data.py +++ b/tests/unit/cfngin/hooks/docker/test_hook_data.py @@ -1,6 +1,5 @@ """Test runway.cfngin.hooks.docker.hook_data.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING @@ -10,7 +9,7 @@ if TYPE_CHECKING: from pytest_mock import MockerFixture - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext MODULE = "runway.cfngin.hooks.docker.hook_data" @@ -24,7 +23,7 @@ def test_client(self, mocker: MockerFixture) -> None: obj = DockerHookData() assert obj.client == mock_local_client.from_env.return_value - def test_from_cfngin_context(self, cfngin_context: MockCFNginContext) -> None: + def test_from_cfngin_context(self, cfngin_context: MockCfnginContext) -> None: """Test from_cfngin_context.""" obj = DockerHookData.from_cfngin_context(cfngin_context) assert isinstance(obj, DockerHookData) @@ -37,7 +36,7 @@ def test_from_cfngin_context(self, cfngin_context: MockCFNginContext) -> None: # compare instance id as these should NOT be the same instance assert id(obj) != id(new_obj) - def test_update_context(self, cfngin_context: MockCFNginContext) -> None: + def test_update_context(self, cfngin_context: MockCfnginContext) -> None: """Test update_context.""" obj = DockerHookData() assert obj.update_context(cfngin_context) == obj diff --git a/tests/unit/cfngin/hooks/ecr/test__purge_repositroy.py b/tests/unit/cfngin/hooks/ecr/test__purge_repositroy.py index e28c94866..97e4be9f4 100644 --- a/tests/unit/cfngin/hooks/ecr/test__purge_repositroy.py +++ b/tests/unit/cfngin/hooks/ecr/test__purge_repositroy.py @@ -1,9 +1,8 @@ """Test runway.cfngin.hooks.ecr._purge_repository.""" -# pyright: basic from __future__ import annotations -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING import boto3 import pytest @@ -16,7 +15,7 @@ from mypy_boto3_ecr.type_defs import ImageIdentifierTypeDef from pytest_mock import MockerFixture - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext MODULE = "runway.cfngin.hooks.ecr._purge_repository" @@ -25,7 +24,7 @@ def test_delete_ecr_images() -> None: """Test delete_ecr_images.""" client = boto3.client("ecr") stubber = Stubber(client) - image_ids: List[ImageIdentifierTypeDef] = [{"imageDigest": "image0"}] + image_ids: list[ImageIdentifierTypeDef] = [{"imageDigest": "image0"}] repo_name = "test-repo" stubber.add_response( @@ -35,16 +34,14 @@ def test_delete_ecr_images() -> None: ) with stubber: - assert not delete_ecr_images( - client, image_ids=image_ids, repository_name=repo_name - ) + assert not delete_ecr_images(client, image_ids=image_ids, repository_name=repo_name) def test_delete_ecr_images_failures() -> None: """Test delete_ecr_images with failures.""" client = boto3.client("ecr") stubber = Stubber(client) - image_ids: List[ImageIdentifierTypeDef] = [{"imageDigest": "image0"}] + image_ids: list[ImageIdentifierTypeDef] = [{"imageDigest": "image0"}] repo_name = "test-repo" stubber.add_response( @@ -62,7 +59,7 @@ def test_delete_ecr_images_failures() -> None: {"repositoryName": repo_name, "imageIds": image_ids}, ) - with stubber, pytest.raises(ValueError): + with stubber, pytest.raises(ValueError): # noqa: PT011 delete_ecr_images(client, image_ids=image_ids, repository_name=repo_name) @@ -109,9 +106,7 @@ def test_list_ecr_images_repository_not_found() -> None: assert list_ecr_images(client, repository_name="test-repo") == [] -def test_purge_repository( - cfngin_context: MockCFNginContext, mocker: MockerFixture -) -> None: +def test_purge_repository(cfngin_context: MockCfnginContext, mocker: MockerFixture) -> None: """Test purge_repository.""" mock_list_ecr_images = mocker.patch( MODULE + ".list_ecr_images", return_value=[{"imageDigest": "abc123"}] @@ -121,18 +116,14 @@ def test_purge_repository( client = cfngin_context.get_session().client("ecr") repo_name = "test-repo" - assert purge_repository(cfngin_context, repository_name=repo_name) == { - "status": "success" - } + assert purge_repository(cfngin_context, repository_name=repo_name) == {"status": "success"} mock_list_ecr_images.assert_called_once_with(client, repository_name=repo_name) mock_delete_ecr_images.assert_called_once_with( client, image_ids=mock_list_ecr_images.return_value, repository_name=repo_name ) -def test_purge_repository_skip( - cfngin_context: MockCFNginContext, mocker: MockerFixture -) -> None: +def test_purge_repository_skip(cfngin_context: MockCfnginContext, mocker: MockerFixture) -> None: """Test purge_repository.""" mock_list_ecr_images = mocker.patch(MODULE + ".list_ecr_images", return_value=[]) mock_delete_ecr_images = mocker.patch(MODULE + ".delete_ecr_images") @@ -140,8 +131,6 @@ def test_purge_repository_skip( client = cfngin_context.get_session().client("ecr") repo_name = "test-repo" - assert purge_repository(cfngin_context, repository_name=repo_name) == { - "status": "skipped" - } + assert purge_repository(cfngin_context, repository_name=repo_name) == {"status": "skipped"} mock_list_ecr_images.assert_called_once_with(client, repository_name=repo_name) mock_delete_ecr_images.assert_not_called() diff --git a/tests/unit/cfngin/hooks/ssm/conftest.py b/tests/unit/cfngin/hooks/ssm/conftest.py index 6379955c0..3701e1e3a 100644 --- a/tests/unit/cfngin/hooks/ssm/conftest.py +++ b/tests/unit/cfngin/hooks/ssm/conftest.py @@ -1,9 +1,8 @@ """Pytest fixtures and plugins.""" -# pylint: disable=redefined-outer-name,unused-argument from __future__ import annotations -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING import pytest @@ -11,16 +10,18 @@ from botocore.stub import Stubber from mypy_boto3_ssm.client import SSMClient - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext -@pytest.fixture(scope="function") -def ssm_client(cfngin_context: MockCFNginContext, ssm_stubber: Stubber) -> SSMClient: +@pytest.fixture() +def ssm_client( + cfngin_context: MockCfnginContext, ssm_stubber: Stubber # noqa: ARG001 +) -> SSMClient: """Create SSM client.""" - return cast("SSMClient", cfngin_context.get_session().client("ssm")) + return cfngin_context.get_session().client("ssm") -@pytest.fixture(scope="function") -def ssm_stubber(cfngin_context: MockCFNginContext) -> Stubber: +@pytest.fixture() +def ssm_stubber(cfngin_context: MockCfnginContext) -> Stubber: """Create SSM stubber.""" return cfngin_context.add_stubber("ssm") diff --git a/tests/unit/cfngin/hooks/ssm/test_parameter.py b/tests/unit/cfngin/hooks/ssm/test_parameter.py index e9a4353a7..d2504e280 100644 --- a/tests/unit/cfngin/hooks/ssm/test_parameter.py +++ b/tests/unit/cfngin/hooks/ssm/test_parameter.py @@ -17,7 +17,6 @@ if TYPE_CHECKING: from botocore.stub import Stubber from mypy_boto3_ssm.client import SSMClient - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from runway.context import CfnginContext @@ -46,8 +45,8 @@ def test_field_defaults(self) -> None: def test_name_required(self) -> None: """Test name.""" - with pytest.raises(ValidationError, match="Name\n field required"): - ArgsDataModel.parse_obj({"type": "String"}) + with pytest.raises(ValidationError, match="Name\n Field required"): + ArgsDataModel.model_validate({"type": "String"}) def test_policies_raise_type_error(self) -> None: """Test policies.""" @@ -96,45 +95,43 @@ def test_tags_dict(self) -> None: def test_tags_raise_type_error(self) -> None: """Test tags.""" - with pytest.raises(ValidationError, match="Tags"): - assert not ArgsDataModel.parse_obj( - {"name": "test", "tags": "", "type": "String"} - ) + with pytest.raises(ValidationError, match="tags\n Value error, unexpected type"): + assert not ArgsDataModel.model_validate({"name": "test", "tags": "", "type": "String"}) def test_tier_invalid(self) -> None: """Test tier.""" - with pytest.raises(ValidationError, match="Tier\n unexpected value"): - ArgsDataModel.parse_obj( - {"name": "test", "tier": "invalid", "type": "String"} - ) + with pytest.raises( + ValidationError, + match="tier\n Input should be 'Advanced', 'Intelligent-Tiering' or 'Standard'", + ): + ArgsDataModel.model_validate({"name": "test", "tier": "invalid", "type": "String"}) def test_type_invalid(self) -> None: """Test name.""" - with pytest.raises(ValidationError, match="Type\n unexpected value"): - ArgsDataModel.parse_obj({"name": "test", "type": "invalid"}) + with pytest.raises( + ValidationError, + match="type\n Input should be 'String', 'StringList' or 'SecureString'", + ): + ArgsDataModel.model_validate({"name": "test", "type": "invalid"}) def test_type_required(self) -> None: """Test name.""" - with pytest.raises(ValidationError, match="Type\n field required"): - ArgsDataModel.parse_obj({"name": "test"}) + with pytest.raises(ValidationError, match="Type\n Field required"): + ArgsDataModel.model_validate({"name": "test"}) class TestParameter: """Test Parameter.""" - def test___init__( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test___init__(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test __init__.""" args = mocker.patch(f"{MODULE}.ArgsDataModel") - args.parse_obj.return_value = args + args.model_validate.return_value = args data = {"key": "val"} obj = Parameter(cfngin_context, name="test", type="String", **data) assert obj.args == args assert obj.ctx == cfngin_context - args.parse_obj.assert_called_once_with( - {"name": "test", "type": "String", **data} - ) + args.model_validate.assert_called_once_with({"name": "test", "type": "String", **data}) def test_client( self, @@ -144,13 +141,11 @@ def test_client( ) -> None: """Test client.""" mocker.patch(f"{MODULE}.ArgsDataModel") - assert ( - Parameter(cfngin_context, name="test", type="String").client == ssm_client - ) + assert Parameter(cfngin_context, name="test", type="String").client == ssm_client def test_delete( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, ssm_stubber: Stubber, ) -> None: @@ -164,7 +159,7 @@ def test_delete( def test_delete_handle_parameter_not_found( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, ssm_stubber: Stubber, ) -> None: @@ -226,14 +221,11 @@ def test_get_force( }, ) with ssm_stubber: - assert ( - Parameter(cfngin_context, force=True, name="test", type="String").get() - == {} - ) + assert Parameter(cfngin_context, force=True, name="test", type="String").get() == {} def test_get_handle_parameter_not_found( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, ssm_stubber: Stubber, ) -> None: @@ -256,9 +248,7 @@ def test_get_raise_client_error( assert not Parameter(cfngin_context, name="test", type="String").get() ssm_stubber.assert_no_pending_responses() - def test_get_current_tags( - self, cfngin_context: CfnginContext, ssm_stubber: Stubber - ) -> None: + def test_get_current_tags(self, cfngin_context: CfnginContext, ssm_stubber: Stubber) -> None: """Test get_current_tags.""" data = [{"Key": "test-key", "Value": "test-val"}] ssm_stubber.add_response( @@ -267,10 +257,7 @@ def test_get_current_tags( {"ResourceId": "test", "ResourceType": "Parameter"}, ) with ssm_stubber: - assert ( - Parameter(cfngin_context, name="test", type="String").get_current_tags() - == data - ) + assert Parameter(cfngin_context, name="test", type="String").get_current_tags() == data ssm_stubber.assert_no_pending_responses() def test_get_current_tags_empty( @@ -279,10 +266,7 @@ def test_get_current_tags_empty( """Test get_current_tags.""" ssm_stubber.add_response("list_tags_for_resource", {}) with ssm_stubber: - assert ( - Parameter(cfngin_context, name="test", type="String").get_current_tags() - == [] - ) + assert Parameter(cfngin_context, name="test", type="String").get_current_tags() == [] ssm_stubber.assert_no_pending_responses() def test_get_current_tags_handle_invalid_resource_id( @@ -291,10 +275,7 @@ def test_get_current_tags_handle_invalid_resource_id( """Test get_current_tags.""" ssm_stubber.add_client_error("list_tags_for_resource", "InvalidResourceId") with ssm_stubber: - assert ( - Parameter(cfngin_context, name="test", type="String").get_current_tags() - == [] - ) + assert Parameter(cfngin_context, name="test", type="String").get_current_tags() == [] ssm_stubber.assert_no_pending_responses() def test_get_current_tags_handle_parameter_not_found( @@ -303,10 +284,7 @@ def test_get_current_tags_handle_parameter_not_found( """Test get_current_tags.""" ssm_stubber.add_client_error("list_tags_for_resource", "ParameterNotFound") with ssm_stubber: - assert ( - Parameter(cfngin_context, name="test", type="String").get_current_tags() - == [] - ) + assert Parameter(cfngin_context, name="test", type="String").get_current_tags() == [] ssm_stubber.assert_no_pending_responses() def test_get_current_tags_raise_client_error( @@ -315,19 +293,13 @@ def test_get_current_tags_raise_client_error( """Test get_current_tags.""" ssm_stubber.add_client_error("list_tags_for_resource") with ssm_stubber, pytest.raises(ClientError): - assert Parameter( - cfngin_context, name="test", type="String" - ).get_current_tags() + assert Parameter(cfngin_context, name="test", type="String").get_current_tags() ssm_stubber.assert_no_pending_responses() - def test_post_deploy( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_post_deploy(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test post_deploy.""" mock_put = mocker.patch.object(Parameter, "put", return_value="success") - mock_update_tags = mocker.patch.object( - Parameter, "update_tags", return_value=None - ) + mock_update_tags = mocker.patch.object(Parameter, "update_tags", return_value=None) assert ( Parameter(cfngin_context, name="test", type="String").post_deploy() == mock_put.return_value @@ -335,9 +307,7 @@ def test_post_deploy( mock_put.assert_called_once_with() mock_update_tags.assert_called_once_with() - def test_post_destroy( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_post_destroy(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test post_destroy.""" mock_delete = mocker.patch.object(Parameter, "delete", return_value="success") assert ( @@ -346,14 +316,10 @@ def test_post_destroy( ) mock_delete.assert_called_once_with() - def test_pre_deploy( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_pre_deploy(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test pre_deploy.""" mock_put = mocker.patch.object(Parameter, "put", return_value="success") - mock_update_tags = mocker.patch.object( - Parameter, "update_tags", return_value=None - ) + mock_update_tags = mocker.patch.object(Parameter, "update_tags", return_value=None) assert ( Parameter(cfngin_context, name="test", type="String").pre_deploy() == mock_put.return_value @@ -361,9 +327,7 @@ def test_pre_deploy( mock_put.assert_called_once_with() mock_update_tags.assert_called_once_with() - def test_pre_destroy( - self, cfngin_context: CfnginContext, mocker: MockerFixture - ) -> None: + def test_pre_destroy(self, cfngin_context: CfnginContext, mocker: MockerFixture) -> None: """Test pre_destroy.""" mock_delete = mocker.patch.object(Parameter, "delete", return_value="success") assert ( @@ -374,7 +338,7 @@ def test_pre_destroy( def test_put( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, mocker: MockerFixture, ssm_stubber: Stubber, @@ -411,7 +375,7 @@ def test_put( def test_put_handle_parameter_already_exists( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, mocker: MockerFixture, ssm_stubber: Stubber, @@ -423,8 +387,7 @@ def test_put_handle_parameter_already_exists( ssm_stubber.add_client_error("put_parameter", "ParameterAlreadyExists") with ssm_stubber: assert ( - Parameter(cfngin_context, name="test", type="String", value="foo").put() - == expected + Parameter(cfngin_context, name="test", type="String", value="foo").put() == expected ) assert ( "parameter test already exists; to overwrite it's value, " @@ -433,18 +396,16 @@ def test_put_handle_parameter_already_exists( def test_put_no_value( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, ) -> None: """Test put.""" caplog.set_level(LogLevels.INFO, MODULE) - assert Parameter( - cfngin_context, name="test", type="String", value=None - ).put() == {"Tier": "Standard", "Version": 0} - assert ( - "skipped putting SSM Parameter; value provided for test is falsy" - in caplog.messages - ) + assert Parameter(cfngin_context, name="test", type="String", value=None).put() == { + "Tier": "Standard", + "Version": 0, + } + assert "skipped putting SSM Parameter; value provided for test is falsy" in caplog.messages def test_put_raise_client_error( self, cfngin_context: CfnginContext, mocker: MockerFixture, ssm_stubber: Stubber @@ -457,9 +418,7 @@ def test_put_raise_client_error( ) ssm_stubber.add_client_error("put_parameter") with ssm_stubber, pytest.raises(ClientError): - assert not Parameter( - cfngin_context, name="test", type="String", value="foo" - ).put() + assert not Parameter(cfngin_context, name="test", type="String", value="foo").put() def test_put_same_value( self, @@ -473,10 +432,7 @@ def test_put_same_value( "get", return_value={"Value": "foo", **expected}, ) - assert ( - Parameter(cfngin_context, name="test", type="String", value="foo").put() - == expected - ) + assert Parameter(cfngin_context, name="test", type="String", value="foo").put() == expected mock_get.assert_called_once_with() def test_update_tags( @@ -570,9 +526,7 @@ def test_update_tags_delete_only( ) ssm_stubber.add_client_error("add_tags_to_resource") with ssm_stubber: - assert not Parameter( - cfngin_context, name="test", type="String" - ).update_tags() + assert not Parameter(cfngin_context, name="test", type="String").update_tags() def test_update_tags_delete_only_raise_client_error( self, cfngin_context: CfnginContext, mocker: MockerFixture, ssm_stubber: Stubber @@ -590,7 +544,7 @@ def test_update_tags_delete_only_raise_client_error( def test_update_tags_handle_invalid_resource_id( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, cfngin_context: CfnginContext, mocker: MockerFixture, ssm_stubber: Stubber, diff --git a/tests/unit/cfngin/hooks/staticsite/auth_at_edge/test_user_pool_id_retriever.py b/tests/unit/cfngin/hooks/staticsite/auth_at_edge/test_user_pool_id_retriever.py index a622ac36e..ea7060e21 100644 --- a/tests/unit/cfngin/hooks/staticsite/auth_at_edge/test_user_pool_id_retriever.py +++ b/tests/unit/cfngin/hooks/staticsite/auth_at_edge/test_user_pool_id_retriever.py @@ -20,9 +20,7 @@ ), ], ) -def test_hook_args_parse_obj( - provided: dict[str, str], expected: dict[str, str] -) -> None: +def test_hook_args_parse_obj(provided: dict[str, str], expected: dict[str, str]) -> None: """Test HookArgs.parse_obj.""" kwargs = provided args = HookArgs.parse_obj(kwargs) diff --git a/tests/unit/cfngin/hooks/staticsite/test_cleanup.py b/tests/unit/cfngin/hooks/staticsite/test_cleanup.py index 98881c4a4..8e9b1aec6 100644 --- a/tests/unit/cfngin/hooks/staticsite/test_cleanup.py +++ b/tests/unit/cfngin/hooks/staticsite/test_cleanup.py @@ -16,10 +16,9 @@ if TYPE_CHECKING: from mypy_boto3_cloudformation.type_defs import OutputTypeDef - from pytest import LogCaptureFixture from pytest_mock import MockerFixture - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext MODULE = "runway.cfngin.hooks.staticsite.cleanup" @@ -29,24 +28,19 @@ [ ([], []), ( - [ - {"OutputKey": i, "OutputValue": f"{i}Val"} - for i in REPLICATED_FUNCTION_OUTPUTS - ] + [{"OutputKey": i, "OutputValue": f"{i}Val"} for i in REPLICATED_FUNCTION_OUTPUTS] + [{"OutputKey": "foo", "OutputValue": "bar"}], [f"{i}Val" for i in REPLICATED_FUNCTION_OUTPUTS], ), ], ) -def test_get_replicated_function_names( - expected: list[str], outputs: list[OutputTypeDef] -) -> None: +def test_get_replicated_function_names(expected: list[str], outputs: list[OutputTypeDef]) -> None: """Test get_replicated_function_names.""" assert get_replicated_function_names(outputs) == expected def test_warn( - caplog: LogCaptureFixture, cfngin_context: MockCFNginContext, mocker: MockerFixture + caplog: pytest.LogCaptureFixture, cfngin_context: MockCfnginContext, mocker: MockerFixture ) -> None: """Test warn.""" caplog.set_level(LogLevels.WARNING, MODULE) @@ -81,7 +75,7 @@ def test_warn( def test_warn_ignore_client_error( - caplog: LogCaptureFixture, cfngin_context: MockCFNginContext + caplog: pytest.LogCaptureFixture, cfngin_context: MockCfnginContext ) -> None: """Test warn ignore ClientError.""" caplog.set_level(LogLevels.WARNING, MODULE) diff --git a/tests/unit/cfngin/hooks/staticsite/test_upload_staticsite.py b/tests/unit/cfngin/hooks/staticsite/test_upload_staticsite.py index 3bbdefc65..118eb93f7 100644 --- a/tests/unit/cfngin/hooks/staticsite/test_upload_staticsite.py +++ b/tests/unit/cfngin/hooks/staticsite/test_upload_staticsite.py @@ -1,10 +1,9 @@ """Test runway.cfngin.hooks.staticsite.upload_staticsite.""" -# pyright: basic from __future__ import annotations import json -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import pytest import yaml @@ -17,10 +16,10 @@ get_content_type, sync_extra_files, ) -from runway.module.staticsite.options.models import RunwayStaticSiteExtraFileDataModel +from runway.module.staticsite.options import RunwayStaticSiteExtraFileDataModel if TYPE_CHECKING: - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext @pytest.mark.parametrize( @@ -35,7 +34,7 @@ (".test", None), ], ) -def test_auto_detect_content_type(provided: str, expected: Optional[str]) -> None: +def test_auto_detect_content_type(provided: str, expected: str | None) -> None: """Test auto_detect_content_type.""" assert auto_detect_content_type(provided) == expected @@ -44,26 +43,26 @@ def test_auto_detect_content_type(provided: str, expected: Optional[str]) -> Non "provided, expected", [ ( - RunwayStaticSiteExtraFileDataModel.construct( + RunwayStaticSiteExtraFileDataModel.model_construct( content_type="text/plain", name="test.txt" ), "text/plain", ), ( - RunwayStaticSiteExtraFileDataModel.construct( + RunwayStaticSiteExtraFileDataModel.model_construct( name="test.txt", content_type="text/plain" ), "text/plain", ), ( - RunwayStaticSiteExtraFileDataModel.construct(name="test.json"), + RunwayStaticSiteExtraFileDataModel.model_construct(name="test.json"), "application/json", ), - (RunwayStaticSiteExtraFileDataModel.construct(name="test.txt"), None), + (RunwayStaticSiteExtraFileDataModel.model_construct(name="test.txt"), None), ], ) def test_get_content_type( - provided: RunwayStaticSiteExtraFileDataModel, expected: Optional[str] + provided: RunwayStaticSiteExtraFileDataModel, expected: str | None ) -> None: """Test get_content_type.""" assert get_content_type(provided) == expected @@ -88,9 +87,7 @@ def test_get_content_yaml() -> None: content = {"a": 0} actual = get_content( - RunwayStaticSiteExtraFileDataModel( - content_type="text/yaml", content=content, name="" - ) + RunwayStaticSiteExtraFileDataModel(content_type="text/yaml", content=content, name="") ) expected = yaml.safe_dump(content) @@ -99,7 +96,7 @@ def test_get_content_yaml() -> None: def test_get_content_unknown() -> None: """Get content unknown.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 get_content(RunwayStaticSiteExtraFileDataModel(content={"a": 0}, name="")) @@ -113,16 +110,16 @@ def test_get_content_unsupported() -> None: "a, b", [ ( - RunwayStaticSiteExtraFileDataModel.construct(name="a"), - RunwayStaticSiteExtraFileDataModel.construct(name="b"), + RunwayStaticSiteExtraFileDataModel.model_construct(name="a"), + RunwayStaticSiteExtraFileDataModel.model_construct(name="b"), ), ( - RunwayStaticSiteExtraFileDataModel.construct(name="test", content_type="a"), - RunwayStaticSiteExtraFileDataModel.construct(name="test", content_type="b"), + RunwayStaticSiteExtraFileDataModel.model_construct(name="test", content_type="a"), + RunwayStaticSiteExtraFileDataModel.model_construct(name="test", content_type="b"), ), ( - RunwayStaticSiteExtraFileDataModel.construct(name="test", content="a"), - RunwayStaticSiteExtraFileDataModel.construct(name="test", content="b"), + RunwayStaticSiteExtraFileDataModel.model_construct(name="test", content="a"), + RunwayStaticSiteExtraFileDataModel.model_construct(name="test", content="b"), ), ], ) @@ -133,7 +130,7 @@ def test_calculate_hash_of_extra_files( assert calculate_hash_of_extra_files([a]) != calculate_hash_of_extra_files([b]) -def test_sync_extra_files_json_content(cfngin_context: MockCFNginContext) -> None: +def test_sync_extra_files_json_content(cfngin_context: MockCfnginContext) -> None: """Test sync_extra_files json content is put in s3.""" s3_stub = cfngin_context.add_stubber("s3") @@ -153,13 +150,11 @@ def test_sync_extra_files_json_content(cfngin_context: MockCFNginContext) -> Non files = [RunwayStaticSiteExtraFileDataModel(name="test.json", content=content)] with s3_stub as stub: - assert sync_extra_files(cfngin_context, "bucket", extra_files=files) == [ - "test.json" - ] + assert sync_extra_files(cfngin_context, "bucket", extra_files=files) == ["test.json"] stub.assert_no_pending_responses() -def test_sync_extra_files_yaml_content(cfngin_context: MockCFNginContext) -> None: +def test_sync_extra_files_yaml_content(cfngin_context: MockCfnginContext) -> None: """Test sync_extra_files yaml content is put in s3.""" s3_stub = cfngin_context.add_stubber("s3") @@ -176,18 +171,14 @@ def test_sync_extra_files_yaml_content(cfngin_context: MockCFNginContext) -> Non }, ) - files = [ - RunwayStaticSiteExtraFileDataModel.construct(name="test.yaml", content=content) - ] + files = [RunwayStaticSiteExtraFileDataModel.model_construct(name="test.yaml", content=content)] with s3_stub as stub: - assert sync_extra_files(cfngin_context, "bucket", extra_files=files) == [ - "test.yaml" - ] + assert sync_extra_files(cfngin_context, "bucket", extra_files=files) == ["test.yaml"] stub.assert_no_pending_responses() -def test_sync_extra_files_empty_content(cfngin_context: MockCFNginContext) -> None: +def test_sync_extra_files_empty_content(cfngin_context: MockCfnginContext) -> None: """Test sync_extra_files empty content is not uploaded.""" s3_stub = cfngin_context.add_stubber("s3") @@ -196,9 +187,7 @@ def test_sync_extra_files_empty_content(cfngin_context: MockCFNginContext) -> No cfngin_context, "bucket", extra_files=[ - RunwayStaticSiteExtraFileDataModel.construct( - name="test.yaml", content="" - ) + RunwayStaticSiteExtraFileDataModel.model_construct(name="test.yaml", content="") ], ) assert isinstance(result, list) @@ -206,7 +195,7 @@ def test_sync_extra_files_empty_content(cfngin_context: MockCFNginContext) -> No stub.assert_no_pending_responses() -def test_sync_extra_files_file_reference(cfngin_context: MockCFNginContext) -> None: +def test_sync_extra_files_file_reference(cfngin_context: MockCfnginContext) -> None: """Test sync_extra_files file is uploaded.""" s3_stub = cfngin_context.add_stubber("s3") @@ -224,9 +213,7 @@ def test_sync_extra_files_file_reference(cfngin_context: MockCFNginContext) -> N }, ) - files = [ - RunwayStaticSiteExtraFileDataModel.construct(name="test", file=".gitignore") - ] + files = [RunwayStaticSiteExtraFileDataModel.model_construct(name="test", file=".gitignore")] with s3_stub as stub: assert sync_extra_files(cfngin_context, "bucket", extra_files=files) == ["test"] @@ -234,7 +221,7 @@ def test_sync_extra_files_file_reference(cfngin_context: MockCFNginContext) -> N def test_sync_extra_files_file_reference_with_content_type( - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, ) -> None: """Test sync_extra_files file is uploaded with the content type.""" s3_stub = cfngin_context.add_stubber("s3") @@ -251,24 +238,20 @@ def test_sync_extra_files_file_reference_with_content_type( ) files = [ - RunwayStaticSiteExtraFileDataModel.construct( - name="test.json", file=".gitignore" - ) + RunwayStaticSiteExtraFileDataModel.model_construct(name="test.json", file=".gitignore") ] with s3_stub as stub: - assert sync_extra_files(cfngin_context, "bucket", extra_files=files) == [ - "test.json" - ] + assert sync_extra_files(cfngin_context, "bucket", extra_files=files) == ["test.json"] stub.assert_no_pending_responses() -def test_sync_extra_files_hash_unchanged(cfngin_context: MockCFNginContext) -> None: +def test_sync_extra_files_hash_unchanged(cfngin_context: MockCfnginContext) -> None: """Test sync_extra_files upload is skipped if the has was unchanged.""" s3_stub = cfngin_context.add_stubber("s3") ssm_stub = cfngin_context.add_stubber("ssm") - extra = RunwayStaticSiteExtraFileDataModel.construct(name="test", content="test") + extra = RunwayStaticSiteExtraFileDataModel.model_construct(name="test", content="test") extra_hash = calculate_hash_of_extra_files([extra]) ssm_stub.add_response( @@ -290,7 +273,7 @@ def test_sync_extra_files_hash_unchanged(cfngin_context: MockCFNginContext) -> N ssm_stub.assert_no_pending_responses() -def test_sync_extra_files_hash_updated(cfngin_context: MockCFNginContext) -> None: +def test_sync_extra_files_hash_updated(cfngin_context: MockCfnginContext) -> None: """Test sync_extra_files extra files hash is updated.""" s3_stub = cfngin_context.add_stubber("s3") ssm_stub = cfngin_context.add_stubber("ssm") @@ -324,7 +307,7 @@ def test_sync_extra_files_hash_updated(cfngin_context: MockCFNginContext) -> Non { "Bucket": "bucket", "Key": "test", - "Body": "test".encode(), + "Body": b"test", "ContentType": "text/plain", }, ) diff --git a/tests/unit/cfngin/hooks/staticsite/test_utils.py b/tests/unit/cfngin/hooks/staticsite/test_utils.py index 1a97b9eaa..d88ca7651 100644 --- a/tests/unit/cfngin/hooks/staticsite/test_utils.py +++ b/tests/unit/cfngin/hooks/staticsite/test_utils.py @@ -2,11 +2,11 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, cast +from unittest.mock import Mock, call import igittigitt import pytest -from mock import Mock, call from runway.cfngin.hooks.staticsite.utils import ( calculate_hash_of_files, @@ -29,10 +29,7 @@ def test_calculate_hash_of_files(mocker: MockerFixture, tmp_path: Path) -> None: file0 = tmp_path / "nested" / "file0.txt" file1 = tmp_path / "file1.txt" - assert ( - calculate_hash_of_files([file0, file1], tmp_path) - == mock_file_hash_obj.hexdigest - ) + assert calculate_hash_of_files([file0, file1], tmp_path) == mock_file_hash_obj.hexdigest mock_file_hash_obj.add_files.assert_called_once_with( [str(file1), str(file0)], relative_to=tmp_path ) @@ -42,7 +39,7 @@ def test_calculate_hash_of_files(mocker: MockerFixture, tmp_path: Path) -> None: "directories", [None, [{"path": "./"}], [{"path": "./", "exclusions": ["foobar"]}]] ) def test_get_hash_of_files( - directories: Optional[List[Dict[str, Union[List[str], str]]]], + directories: list[dict[str, list[str] | str | None]] | None, mocker: MockerFixture, tmp_path: Path, ) -> None: @@ -74,14 +71,11 @@ def test_get_hash_of_files( gitignore.add_rule("exclude/", tmp_path) if directories: - assert ( - get_hash_of_files(tmp_path, directories) - == mock_calculate_hash_of_files.return_value - ) + assert get_hash_of_files(tmp_path, directories) == mock_calculate_hash_of_files.return_value else: assert get_hash_of_files(tmp_path) == mock_calculate_hash_of_files.return_value mock_get_ignorer.assert_has_calls( - [ # type: ignore + [ call(tmp_path / cast(str, i["path"]), i.get("exclusions")) for i in (directories or [{"path": "./"}]) ] @@ -91,7 +85,7 @@ def test_get_hash_of_files( @pytest.mark.parametrize("additional_exclusions", [None, [], ["foo"], ["foo", "bar"]]) def test_get_ignorer( - additional_exclusions: Optional[List[str]], mocker: MockerFixture, tmp_path: Path + additional_exclusions: list[str] | None, mocker: MockerFixture, tmp_path: Path ) -> None: """Test get_ignorer.""" ignore_parser = mocker.patch(f"{MODULE}.igittigitt.IgnoreParser") @@ -103,8 +97,6 @@ def test_get_ignorer( ignore_parser.parse_rule_files.assert_called_once_with(tmp_path) if additional_exclusions: - ignore_parser.add_rule.assert_has_calls( - [call(i, tmp_path) for i in additional_exclusions] - ) + ignore_parser.add_rule.assert_has_calls([call(i, tmp_path) for i in additional_exclusions]) else: ignore_parser.add_rule.assert_not_called() diff --git a/tests/unit/cfngin/hooks/test_acm.py b/tests/unit/cfngin/hooks/test_acm.py index 5dd431a05..9f2d095d5 100644 --- a/tests/unit/cfngin/hooks/test_acm.py +++ b/tests/unit/cfngin/hooks/test_acm.py @@ -1,20 +1,18 @@ """Tests for runway.cfngin.hooks.acm.""" -# pylint: disable=protected-access,unused-argument -# pyright: basic, reportUnknownArgumentType=none, reportUnknownVariableType=none +# pyright: reportUnknownArgumentType=none, reportUnknownVariableType=none # pyright: reportUnknownLambdaType=none from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, NoReturn, Union, cast +from typing import TYPE_CHECKING, Any, Literal, NoReturn, cast +from unittest.mock import MagicMock import boto3 import pytest from botocore.exceptions import ClientError from botocore.stub import ANY, Stubber -from mock import MagicMock from troposphere.certificatemanager import Certificate as CertificateResource -from typing_extensions import Literal from runway.cfngin.exceptions import ( StackDoesNotExist, @@ -34,21 +32,24 @@ ChangeTypeDef, ResourceRecordSetTypeDef, ) - from pytest import MonkeyPatch - from ...factories import MockCFNginContext + from ...factories import MockCfnginContext STATUS = MutableMap( - **{ - "failed": FAILED, - "new": SubmittedStatus("creating new stack"), - "no": NO_CHANGE, - "recreate": SubmittedStatus("destroying stack for re-creation"), - "update": SubmittedStatus("updating existing stack"), - } + failed=FAILED, + new=SubmittedStatus("creating new stack"), + no=NO_CHANGE, + recreate=SubmittedStatus("destroying stack for re-creation"), + update=SubmittedStatus("updating existing stack"), ) +@pytest.fixture(autouse=True) +def sub_s3(cfngin_context: MockCfnginContext) -> None: + """Sub s3 for MockCfnginContext as this hook uses a ``cached_property`` that creates it.""" + cfngin_context.add_stubber("s3") + + def check_bool_is_true(val: Any) -> bool: """Check if a value is a true bool.""" if val and isinstance(val, bool): @@ -63,7 +64,7 @@ def check_bool_is_false(val: Any) -> bool: raise ValueError(f'Value should be "False"; got {val}') -def gen_certificate(**kwargs: Any) -> Dict[str, Any]: +def gen_certificate(**kwargs: Any) -> dict[str, Any]: """Generate a response to describe_certificate.""" data = { "CertificateArn": kwargs.pop("CertificateArn"), @@ -81,7 +82,7 @@ def gen_change( return {"Action": action, "ResourceRecordSet": record_set} -def gen_change_batch(changes: Any = ANY, comment: Any = ANY) -> Dict[str, Any]: +def gen_change_batch(changes: Any = ANY, comment: Any = ANY) -> dict[str, Any]: """Generate expected change batch.""" return {"Comment": comment, "Changes": changes} @@ -119,9 +120,9 @@ def gen_domain_validation_option(**kwargs: Any) -> DomainValidationTypeDef: def gen_record_set( use_resource_record: bool = False, **kwargs: Any -) -> Union[ResourceRecordSetTypeDef, ResourceRecordTypeDef]: +) -> ResourceRecordSetTypeDef | ResourceRecordTypeDef: """Generate a record set.""" - data: Dict[str, Any] = { + data: dict[str, Any] = { "Name": "placeholder_name", "Type": "CNAME", "Value": "placeholder_value", @@ -152,11 +153,11 @@ def gen_stack_resource(**kwargs: Any) -> StackResourceTypeDef: class TestCertificate: """Tests for runway.cfngin.hooks.acm.Certificate.""" - def test_attributes(self, cfngin_context: MockCFNginContext) -> None: + def test_attributes(self, cfngin_context: MockCfnginContext) -> None: """Test attributes set during __init__.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" result = Certificate( @@ -177,6 +178,7 @@ def test_attributes(self, cfngin_context: MockCFNginContext) -> None: assert result.properties["ValidationMethod"] == "DNS" # blueprint attributes + assert result.blueprint assert result.blueprint.VARIABLES["DomainName"] assert result.blueprint.VARIABLES["ValidateRecordTTL"] @@ -187,9 +189,7 @@ def test_attributes(self, cfngin_context: MockCFNginContext) -> None: assert not template.conditions assert not template.mappings assert template.outputs["DomainName"].Value.to_dict() == {"Ref": "DomainName"} - assert template.outputs["ValidateRecordTTL"].Value.to_dict() == { - "Ref": "ValidateRecordTTL" - } + assert template.outputs["ValidateRecordTTL"].Value.to_dict() == {"Ref": "ValidateRecordTTL"} assert not template.parameters assert isinstance(template.resources["Certificate"], CertificateResource) assert not template.rules @@ -197,14 +197,15 @@ def test_attributes(self, cfngin_context: MockCFNginContext) -> None: assert not template.transform # stack attributes + assert result.stack assert result.stack.fqn == "test-stack-name" - assert result.stack._blueprint == result.blueprint + assert result.stack.blueprint == result.blueprint # type: ignore - def test_domain_changed(self, cfngin_context: MockCFNginContext) -> None: + def test_domain_changed(self, cfngin_context: MockCfnginContext) -> None: """Test for domain_changed.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" provider = MagicMock() @@ -245,12 +246,12 @@ def test_domain_changed(self, cfngin_context: MockCFNginContext) -> None: assert not cert.domain_changed() def test_get_certificate( - self, cfngin_context: MockCFNginContext, patch_time: None + self, cfngin_context: MockCfnginContext, mock_sleep: None # noqa: ARG002 ) -> None: """Test get_certificate.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" provider = MagicMock(cloudformation=boto3.client("cloudformation")) @@ -290,22 +291,20 @@ def test_get_certificate( @pytest.mark.parametrize("status", ["PENDING_VALIDATION", "SUCCESS", "FAILED"]) def test_get_validation_record( self, - cfngin_context: MockCFNginContext, - monkeypatch: MonkeyPatch, - patch_time: None, + cfngin_context: MockCfnginContext, + monkeypatch: pytest.MonkeyPatch, + mock_sleep: None, # noqa: ARG002 status: str, ) -> None: """Test get_validation_record.""" # setup context - acm_stubber = cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + acm_stubber = cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert_arn = "arn:aws:acm:us-east-1:012345678901:certificate/test" expected_request = {"CertificateArn": cert_arn} - validate_option_missing_record = gen_domain_validation_option( - ValidationStatus=status - ) + validate_option_missing_record = gen_domain_validation_option(ValidationStatus=status) del validate_option_missing_record["ResourceRecord"] cert = Certificate( @@ -333,17 +332,15 @@ def test_get_validation_record( "describe_certificate", gen_certificate( CertificateArn=cert_arn, - DomainValidationOptions=[ - gen_domain_validation_option(ValidationStatus=status) - ], + DomainValidationOptions=[gen_domain_validation_option(ValidationStatus=status)], ), expected_request, ) with acm_stubber: - assert cert.get_validation_record( - status=status - ) == gen_domain_validation_option().get("ResourceRecord") + assert cert.get_validation_record(status=status) == gen_domain_validation_option().get( + "ResourceRecord" + ) acm_stubber.assert_no_pending_responses() @pytest.mark.parametrize( @@ -355,12 +352,12 @@ def test_get_validation_record( ], ) def test_get_validation_record_status_mismatch( - self, cfngin_context: MockCFNginContext, check: str, found: str + self, cfngin_context: MockCfnginContext, check: str, found: str ) -> None: """Test get get_validation_record with a mismatched record status.""" # setup context - acm_stubber = cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + acm_stubber = cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert_arn = "arn:aws:acm:us-east-1:012345678901:certificate/test" @@ -377,26 +374,20 @@ def test_get_validation_record_status_mismatch( "describe_certificate", gen_certificate( CertificateArn=cert_arn, - DomainValidationOptions=[ - gen_domain_validation_option(ValidationStatus=found) - ], + DomainValidationOptions=[gen_domain_validation_option(ValidationStatus=found)], ), expected_request, ) - with acm_stubber, pytest.raises(ValueError) as excinfo: + with acm_stubber, pytest.raises(ValueError, match="No validations with status"): cert.get_validation_record(cert_arn=cert_arn, status=check) - - assert "No validations with status" in str(excinfo.value) acm_stubber.assert_no_pending_responses() - def test_get_validation_record_gt_one( - self, cfngin_context: MockCFNginContext - ) -> None: + def test_get_validation_record_gt_one(self, cfngin_context: MockCfnginContext) -> None: """Test get get_validation_record more than one result.""" # setup context - acm_stubber = cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + acm_stubber = cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert_arn = "arn:aws:acm:us-east-1:012345678901:certificate/test" @@ -421,17 +412,18 @@ def test_get_validation_record_gt_one( expected_request, ) - with acm_stubber, pytest.raises(ValueError) as excinfo: + with ( + acm_stubber, + pytest.raises(ValueError, match="only one option is supported"), + ): cert.get_validation_record(cert_arn=cert_arn) - - assert "only one option is supported" in str(excinfo.value) acm_stubber.assert_no_pending_responses() - def test_put_record_set(self, cfngin_context: MockCFNginContext) -> None: + def test_put_record_set(self, cfngin_context: MockCfnginContext) -> None: """Test put_record.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - r53_stubber = cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + r53_stubber = cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert = Certificate( @@ -451,9 +443,7 @@ def test_put_record_set(self, cfngin_context: MockCFNginContext) -> None: gen_change( record_set=cast( "ResourceRecordSetTypeDef", - gen_record_set( - use_resource_record=True, TTL=cert.args.ttl - ), + gen_record_set(use_resource_record=True, TTL=cert.args.ttl), ) ) ] @@ -462,18 +452,16 @@ def test_put_record_set(self, cfngin_context: MockCFNginContext) -> None: ) with r53_stubber: - assert not cert.put_record_set( - cast("ResourceRecordTypeDef", gen_record_set()) - ) + assert not cert.put_record_set(cast("ResourceRecordTypeDef", gen_record_set())) r53_stubber.assert_no_pending_responses() def test_remove_validation_records( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test remove_validation_records.""" # setup context - acm_stubber = cfngin_context.add_stubber("acm", "us-east-1") - r53_stubber = cfngin_context.add_stubber("route53", "us-east-1") + acm_stubber = cfngin_context.add_stubber("acm", region="us-east-1") + r53_stubber = cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert_arn = "arn:aws:acm:us-east-1:012345678901:certificate/test" @@ -499,9 +487,7 @@ def test_remove_validation_records( "describe_certificate", gen_certificate( CertificateArn=cert_arn, - DomainValidationOptions=[ - gen_domain_validation_option(ValidationMethod="EMAIL") - ], + DomainValidationOptions=[gen_domain_validation_option(ValidationMethod="EMAIL")], ), expected_cert_request, ) @@ -520,9 +506,7 @@ def test_remove_validation_records( gen_record_set( use_resource_record=True, TTL=cert.args.ttl, - **gen_domain_validation_option().get( - "ResourceRecord", {} - ), + **gen_domain_validation_option().get("ResourceRecord", {}), ), ), ) @@ -531,19 +515,22 @@ def test_remove_validation_records( }, ) - with acm_stubber, r53_stubber, pytest.raises(ValueError) as excinfo: + with ( # noqa: PT012 + acm_stubber, + r53_stubber, + pytest.raises(ValueError, match="Must provide one of more record sets"), + ): assert not cert.remove_validation_records() cert.remove_validation_records() acm_stubber.assert_no_pending_responses() r53_stubber.assert_no_pending_responses() - assert str(excinfo.value) == "Must provide one of more record sets" - def test_update_record_set(self, cfngin_context: MockCFNginContext) -> None: + def test_update_record_set(self, cfngin_context: MockCfnginContext) -> None: """Test update_record_set.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - r53_stubber = cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + r53_stubber = cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert = Certificate( @@ -564,9 +551,7 @@ def test_update_record_set(self, cfngin_context: MockCFNginContext) -> None: action="UPSERT", record_set=cast( "ResourceRecordSetTypeDef", - gen_record_set( - use_resource_record=True, TTL=cert.args.ttl - ), + gen_record_set(use_resource_record=True, TTL=cert.args.ttl), ), ) ] @@ -575,18 +560,16 @@ def test_update_record_set(self, cfngin_context: MockCFNginContext) -> None: ) with r53_stubber: - assert not cert.update_record_set( - cast("ResourceRecordTypeDef", gen_record_set()) - ) + assert not cert.update_record_set(cast("ResourceRecordTypeDef", gen_record_set())) r53_stubber.assert_no_pending_responses() def test_deploy( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test deploy.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert_arn = "arn:aws:acm:us-east-1:012345678901:certificate/test" @@ -611,17 +594,17 @@ def test_deploy( "put_record_set", lambda x: None if x == "get_validation_record" else ValueError, ) - monkeypatch.setattr(cert, "_wait_for_stack", lambda x, last_status: None) + monkeypatch.setattr(cert, "_wait_for_stack", lambda _, last_status: None) # noqa: ARG005 assert cert.deploy() == expected def test_deploy_update( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test deploy update stack.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert_arn = "arn:aws:acm:us-east-1:012345678901:certificate/test" @@ -640,9 +623,7 @@ def test_deploy_update( cert, "get_validation_record", lambda x, status: ( - "get_validation_record" - if x == cert_arn and status == "SUCCESS" - else ValueError + "get_validation_record" if x == cert_arn and status == "SUCCESS" else ValueError ), ) monkeypatch.setattr( @@ -650,17 +631,17 @@ def test_deploy_update( "update_record_set", lambda x: None if x == "get_validation_record" else ValueError, ) - monkeypatch.setattr(cert, "_wait_for_stack", lambda x, last_status: None) + monkeypatch.setattr(cert, "_wait_for_stack", lambda _, last_status: None) # noqa: ARG005 assert cert.deploy() == expected def test_deploy_no_change( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test deploy no change.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert_arn = "arn:aws:acm:us-east-1:012345678901:certificate/test" @@ -679,12 +660,12 @@ def test_deploy_no_change( assert cert.deploy() == expected def test_deploy_recreate( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch - ): + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch + ) -> None: """Test deploy with stack recreation.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert_arn = "arn:aws:acm:us-east-1:012345678901:certificate/test" @@ -698,9 +679,7 @@ def test_deploy_recreate( ) monkeypatch.setattr(cert, "domain_changed", lambda: False) monkeypatch.setattr(cert, "deploy_stack", lambda: STATUS.recreate) # type: ignore - monkeypatch.setattr( - cert, "get_certificate", MagicMock(side_effect=["old", cert_arn]) - ) + monkeypatch.setattr(cert, "get_certificate", MagicMock(side_effect=["old", cert_arn])) monkeypatch.setattr( cert, "_wait_for_stack", MagicMock(side_effect=[STATUS.new, None]) # type: ignore ) @@ -718,12 +697,12 @@ def test_deploy_recreate( assert cert.deploy() == expected def test_deploy_domain_changed( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test deploy domain changed.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert = Certificate( @@ -737,12 +716,12 @@ def test_deploy_domain_changed( assert not cert.deploy() def test_deploy_error_destroy( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test deploy with errors that result in destroy being called.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert_arn = "arn:aws:acm:us-east-1:012345678901:certificate/test" @@ -774,27 +753,29 @@ def test_deploy_error_destroy( ), ) monkeypatch.setattr( - cert, "destroy", lambda records, skip_r53: check_bool_is_true(skip_r53) - ) - monkeypatch.setattr( - cert, "_wait_for_stack", MagicMock(side_effect=StackFailed("test")) + cert, + "destroy", + lambda records, skip_r53: check_bool_is_true(skip_r53), # noqa: ARG005 ) + monkeypatch.setattr(cert, "_wait_for_stack", MagicMock(side_effect=StackFailed("test"))) assert not cert.deploy() # cert.r53_client.exceptions.InvalidChangeBatch assert not cert.deploy() # cert.r53_client.exceptions.NoSuchHostedZone monkeypatch.setattr( - cert, "destroy", lambda records, skip_r53: check_bool_is_false(skip_r53) + cert, + "destroy", + lambda records, skip_r53: check_bool_is_false(skip_r53), # noqa: ARG005 ) assert not cert.deploy() # StackFailed def test_deploy_error_no_destroy( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test deploy with errors that don't result in destroy being called.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert = Certificate( @@ -813,12 +794,12 @@ def test_deploy_error_no_destroy( assert not cert.deploy() def test_destroy( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test destroy.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert = Certificate( @@ -828,24 +809,20 @@ def test_destroy( hosted_zone_id="test", ) # should only be called once - monkeypatch.setattr( - cert, "remove_validation_records", MagicMock(return_value=None) - ) - monkeypatch.setattr(cert, "destroy_stack", lambda wait: None) + monkeypatch.setattr(cert, "remove_validation_records", MagicMock(return_value=None)) + monkeypatch.setattr(cert, "destroy_stack", lambda wait: None) # noqa: ARG005 assert cert.destroy() assert cert.destroy(skip_r53=True) - assert ( # pylint: disable=no-member - cert.remove_validation_records.call_count == 1 # type: ignore - ) + assert cert.remove_validation_records.call_count == 1 # type: ignore def test_destroy_aws_errors( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test destroy with errors from AWS.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert = Certificate( @@ -866,19 +843,19 @@ def test_destroy_aws_errors( ] ), ) - monkeypatch.setattr(cert, "destroy_stack", lambda wait: None) + monkeypatch.setattr(cert, "destroy_stack", lambda wait: None) # noqa: ARG005 assert cert.destroy() assert cert.destroy() assert cert.destroy() def test_destroy_raise_client_error( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test destroy with ClientError raised.""" # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" def build_client_error(msg: str) -> ClientError: @@ -891,10 +868,11 @@ def build_client_error(msg: str) -> ClientError: domain="example.com", hosted_zone_id="test", ) - monkeypatch.setattr(cert, "destroy_stack", lambda wait: None) + monkeypatch.setattr(cert, "destroy_stack", lambda wait: None) # noqa: ARG005 def raise_stack_not_exist(_records: Any) -> NoReturn: """Raise ClientError mimicking stack not existing.""" + assert cert.stack raise build_client_error(f"Stack with id {cert.stack.fqn} does not exist") def raise_other(_records: Any) -> NoReturn: @@ -920,8 +898,8 @@ def raise_other(_records: Any) -> NoReturn: ) def test_stage_methods( self, - cfngin_context: MockCFNginContext, - monkeypatch: MonkeyPatch, + cfngin_context: MockCfnginContext, + monkeypatch: pytest.MonkeyPatch, stage: str, expected: str, ) -> None: @@ -933,8 +911,8 @@ def test_stage_methods( """ # setup context - cfngin_context.add_stubber("acm", "us-east-1") - cfngin_context.add_stubber("route53", "us-east-1") + cfngin_context.add_stubber("acm", region="us-east-1") + cfngin_context.add_stubber("route53", region="us-east-1") cfngin_context.config.namespace = "test" cert = Certificate( diff --git a/tests/unit/cfngin/hooks/test_aws_lambda.py b/tests/unit/cfngin/hooks/test_awslambda.py similarity index 78% rename from tests/unit/cfngin/hooks/test_aws_lambda.py rename to tests/unit/cfngin/hooks/test_awslambda.py index 74b53e512..66ae10032 100644 --- a/tests/unit/cfngin/hooks/test_aws_lambda.py +++ b/tests/unit/cfngin/hooks/test_awslambda.py @@ -1,6 +1,6 @@ """Tests for runway.cfngin.hooks.aws_lambda.""" -# pyright: basic, reportUnknownArgumentType=none, reportUnknownVariableType=none +# pyright: reportUnknownArgumentType=none, reportUnknownVariableType=none # pyright: reportFunctionMemberAccess=none, reportOptionalMemberAccess=none # pyright: reportOptionalOperand=none from __future__ import annotations @@ -14,14 +14,14 @@ import unittest from io import BytesIO as StringIO from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, cast +from unittest.mock import ANY, MagicMock, patch from zipfile import ZipFile import boto3 import pytest from botocore.exceptions import ClientError -from mock import ANY, MagicMock, patch -from moto import mock_s3 +from moto.core.decorator import mock_aws from testfixtures.comparison import compare from testfixtures.shouldraise import ShouldRaise from testfixtures.tempdirectory import TempDirectory @@ -48,7 +48,6 @@ if TYPE_CHECKING: from mypy_boto3_s3.client import S3Client - from pytest import LogCaptureFixture, MonkeyPatch REGION = "us-east-1" ALL_FILES = ( @@ -72,7 +71,7 @@ class TestLambdaHooks(unittest.TestCase): @classmethod def temp_directory_with_files( - cls, files: Union[List[str], Tuple[str, ...]] = ALL_FILES + cls, files: list[str] | tuple[str, ...] = ALL_FILES ) -> TempDirectory: """Create a temp directory with files.""" temp_dict = TempDirectory() @@ -81,13 +80,13 @@ def temp_directory_with_files( return temp_dict @property - def s3(self) -> S3Client: # pylint: disable=invalid-name + def s3(self) -> S3Client: """Return S3 client.""" if not self._s3: self._s3 = boto3.client("s3", region_name=REGION) return self._s3 - def assert_s3_zip_file_list(self, bucket: str, key: str, files: List[str]) -> None: + def assert_s3_zip_file_list(self, bucket: str, key: str, files: list[str]) -> None: """Assert s3 zip file list.""" object_info = self.s3.get_object(Bucket=bucket, Key=key) zip_data = StringIO(object_info["Body"].read()) @@ -96,9 +95,7 @@ def assert_s3_zip_file_list(self, bucket: str, key: str, files: List[str]) -> No with ZipFile(zip_data, "r") as zip_file: for zip_info in zip_file.infolist(): perms = (zip_info.external_attr & ZIP_PERMS_MASK) >> 16 - self.assertIn( - perms, (0o755, 0o644), "ZIP member permission must be 755 or 644" - ) + assert perms in (493, 420), "ZIP member permission must be 755 or 644" found_files.add(zip_info.filename) compare(found_files, set(files)) @@ -116,13 +113,11 @@ def assert_s3_bucket(self, bucket: str, present: bool = True) -> None: def setUp(self) -> None: """Run before tests.""" self.context = CfnginContext( - config=CfnginConfig.parse_obj( - {"namespace": "test", "cfngin_bucket": "test"} - ) + config=CfnginConfig.parse_obj({"namespace": "test", "cfngin_bucket": "test"}) ) self.provider = mock_provider(region="us-east-1") - def run_hook(self, **kwargs: Any) -> Dict[Any, Any]: + def run_hook(self, **kwargs: Any) -> dict[Any, Any]: """Run hook.""" real_kwargs = { "context": self.context, @@ -132,22 +127,22 @@ def run_hook(self, **kwargs: Any) -> Dict[Any, Any]: return upload_lambda_functions(**real_kwargs) # type: ignore - @mock_s3 + @mock_aws def test_bucket_default(self) -> None: """Test bucket default.""" - self.assertIsNotNone(self.run_hook(functions={})) + assert self.run_hook(functions={}) is not None self.assert_s3_bucket("test") - @mock_s3 + @mock_aws def test_bucket_custom(self) -> None: """Test bucket custom.""" - self.assertIsNotNone(self.run_hook(bucket="custom", functions={})) + assert self.run_hook(bucket="custom", functions={}) is not None self.assert_s3_bucket("test", present=False) self.assert_s3_bucket("custom") - @mock_s3 + @mock_aws def test_prefix(self) -> None: """Test prefix.""" with self.temp_directory_with_files() as temp_dir: @@ -156,64 +151,59 @@ def test_prefix(self) -> None: functions={"MyFunction": {"path": temp_dir.path + "/f1"}}, ) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) + assert isinstance(code, Code) self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, F1_FILES) - self.assertTrue( - code.S3Key.startswith("cloudformation-custom-resources/lambda-MyFunction-") - ) + assert code.S3Key.startswith("cloudformation-custom-resources/lambda-MyFunction-") - @mock_s3 + @mock_aws def test_prefix_missing(self) -> None: """Test prefix missing.""" with self.temp_directory_with_files() as temp_dir: - results = self.run_hook( - functions={"MyFunction": {"path": temp_dir.path + "/f1"}} - ) + results = self.run_hook(functions={"MyFunction": {"path": temp_dir.path + "/f1"}}) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) + assert isinstance(code, Code) self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, F1_FILES) - self.assertTrue(code.S3Key.startswith("lambda-MyFunction-")) + assert code.S3Key.startswith("lambda-MyFunction-") - @mock_s3 + @mock_aws def test_path_missing(self) -> None: """Test path missing.""" msg = "missing required property 'path' in function 'MyFunction'" with ShouldRaise(ValueError(msg)): self.run_hook(functions={"MyFunction": {}}) - @mock_s3 + @mock_aws def test_path_relative(self) -> None: """Test path relative.""" with self.temp_directory_with_files(["test/test.py"]) as temp_dir: results = self.run_hook( functions={"MyFunction": {"path": "test"}}, context=CfnginContext( - config=CfnginConfig.parse_obj( - {"namespace": "test", "cfngin_bucket": "test"} - ), + config=CfnginConfig.parse_obj({"namespace": "test", "cfngin_bucket": "test"}), config_path=Path(str(temp_dir.path)), ), ) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) + assert isinstance(code, Code) self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, ["test.py"]) - @mock_s3 + @mock_aws def test_path_home_relative(self) -> None: """Test path home relative.""" orig_expanduser = os.path.expanduser - with self.temp_directory_with_files(["test.py"]) as temp_dir, patch( - "os.path.expanduser" - ) as mock1: + with ( + self.temp_directory_with_files(["test.py"]) as temp_dir, + patch("os.path.expanduser") as mock1, + ): test_path = "~/test" mock1.side_effect = lambda p: ( # type: ignore @@ -222,13 +212,13 @@ def test_path_home_relative(self) -> None: results = self.run_hook(functions={"MyFunction": {"path": test_path}}) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) + assert isinstance(code, Code) self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, ["test.py"]) - @mock_s3 + @mock_aws def test_multiple_functions(self) -> None: """Test multiple functions.""" with self.temp_directory_with_files() as temp_dir: @@ -239,32 +229,27 @@ def test_multiple_functions(self) -> None: } ) - self.assertIsNotNone(results) + assert results is not None f1_code = results.get("MyFunction") - self.assertIsInstance(f1_code, Code) + assert isinstance(f1_code, Code) self.assert_s3_zip_file_list(f1_code.S3Bucket, f1_code.S3Key, F1_FILES) f2_code = results.get("OtherFunction") - self.assertIsInstance(f2_code, Code) + assert isinstance(f2_code, Code) self.assert_s3_zip_file_list(f2_code.S3Bucket, f2_code.S3Key, F2_FILES) - @mock_s3 + @mock_aws def test_patterns_invalid(self) -> None: """Test patterns invalid.""" - msg = ( - "Invalid file patterns in key 'include': must be a string or " - "list of strings" - ) + msg = "Invalid file patterns in key 'include': must be a string or list of strings" with ShouldRaise(ValueError(msg)): self.run_hook( - functions={ - "MyFunction": {"path": "test", "include": {"invalid": "invalid"}} - } + functions={"MyFunction": {"path": "test", "include": {"invalid": "invalid"}}} ) - @mock_s3 + @mock_aws def test_patterns_include(self) -> None: """Test patterns include.""" with self.temp_directory_with_files() as temp_dir: @@ -277,10 +262,10 @@ def test_patterns_include(self) -> None: } ) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) + assert isinstance(code, Code) self.assert_s3_zip_file_list( code.S3Bucket, code.S3Key, @@ -293,7 +278,7 @@ def test_patterns_include(self) -> None: ], ) - @mock_s3 + @mock_aws def test_patterns_exclude(self) -> None: """Test patterns exclude.""" with self.temp_directory_with_files() as temp_dir: @@ -306,15 +291,15 @@ def test_patterns_exclude(self) -> None: } ) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) + assert isinstance(code, Code) self.assert_s3_zip_file_list( code.S3Bucket, code.S3Key, ["f1.py", "__init__.py", "test2/test.txt"] ) - @mock_s3 + @mock_aws def test_patterns_include_exclude(self) -> None: """Test patterns include exclude.""" with self.temp_directory_with_files() as temp_dir: @@ -328,15 +313,13 @@ def test_patterns_include_exclude(self) -> None: } ) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) - self.assert_s3_zip_file_list( - code.S3Bucket, code.S3Key, ["f1.py", "__init__.py"] - ) + assert isinstance(code, Code) + self.assert_s3_zip_file_list(code.S3Bucket, code.S3Key, ["f1.py", "__init__.py"]) - @mock_s3 + @mock_aws def test_patterns_exclude_all(self) -> None: """Test patterns exclude all.""" msg = ( @@ -344,18 +327,14 @@ def test_patterns_exclude_all(self) -> None: "include/exclude options for errors." ) - with self.temp_directory_with_files() as temp_dir, ShouldRaise( - RuntimeError(msg) - ): + with self.temp_directory_with_files() as temp_dir, ShouldRaise(RuntimeError(msg)): results = self.run_hook( - functions={ - "MyFunction": {"path": temp_dir.path + "/f1", "exclude": ["**"]} - } + functions={"MyFunction": {"path": temp_dir.path + "/f1", "exclude": ["**"]}} ) - self.assertIsNone(results) + assert results is None - @mock_s3 + @mock_aws def test_idempotence(self) -> None: """Test idempotence.""" with self.temp_directory_with_files() as temp_dir: @@ -368,10 +347,10 @@ def test_idempotence(self) -> None: previous = None for _ in range(2): results = self.run_hook(bucket=bucket_name, functions=functions) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) + assert isinstance(code, Code) if not previous: previous = code.S3Key @@ -380,7 +359,7 @@ def test_idempotence(self) -> None: compare( previous, code.S3Key, - prefix="zipfile name should not be modified in " "repeated runs.", + prefix="zipfile name should not be modified in repeated runs.", ) def test_calculate_hash(self) -> None: @@ -395,13 +374,13 @@ def test_calculate_hash(self) -> None: with self.temp_directory_with_files() as temp_dir3: root = cast(str, temp_dir3.path) - with open(os.path.join(root, ALL_FILES[0]), "w", encoding="utf-8") as _file: + with (Path(root) / ALL_FILES[0]).open("w") as _file: _file.write("modified file data") hash3 = _calculate_hash(ALL_FILES, root) - self.assertEqual(hash1, hash2) - self.assertNotEqual(hash1, hash3) - self.assertNotEqual(hash2, hash3) + assert hash1 == hash2 + assert hash1 != hash3 + assert hash2 != hash3 def test_calculate_hash_diff_filename_same_contents(self) -> None: """Test calculate hash diff filename same contents.""" @@ -413,7 +392,7 @@ def test_calculate_hash_diff_filename_same_contents(self) -> None: temp_dir.write(file_name, b"data") hash1 = _calculate_hash([file1], root) hash2 = _calculate_hash([file2], root) - self.assertNotEqual(hash1, hash2) + assert hash1 != hash2 def test_calculate_hash_different_ordering(self) -> None: """Test calculate hash different ordering.""" @@ -429,13 +408,11 @@ def test_calculate_hash_different_ordering(self) -> None: temp_dir2.write(file_name, b"") hash1 = _calculate_hash(files1, root1) hash2 = _calculate_hash(files2, root2) - self.assertEqual(hash1, hash2) + assert hash1 == hash2 def test_select_bucket_region(self) -> None: """Test select bucket region.""" - tests: Tuple[ - Tuple[Tuple[Optional[str], Optional[str], Optional[str], str], str], ... - ] = ( + tests: tuple[tuple[tuple[str | None, str | None, str | None, str], str], ...] = ( (("myBucket", "us-east-1", "us-west-1", "eu-west-1"), "us-east-1"), (("myBucket", None, "us-west-1", "eu-west-1"), "eu-west-1"), ((None, "us-east-1", "us-west-1", "eu-west-1"), "us-west-1"), @@ -443,18 +420,16 @@ def test_select_bucket_region(self) -> None: ) for args, result in tests: - self.assertEqual(select_bucket_region(*args), result) # type: ignore + assert select_bucket_region(*args) == result - @mock_s3 + @mock_aws def test_follow_symlink_nonbool(self) -> None: """Test follow symlink nonbool.""" msg = "follow_symlinks option must be a boolean" with ShouldRaise(ValueError(msg)): - self.run_hook( - follow_symlinks="raiseValueError", functions={"MyFunction": {}} - ) + self.run_hook(follow_symlinks="raiseValueError", functions={"MyFunction": {}}) - @mock_s3 + @mock_aws def test_follow_symlink_true(self) -> None: """Testing if symlinks are followed.""" with self.temp_directory_with_files() as temp_dir1: @@ -465,10 +440,10 @@ def test_follow_symlink_true(self) -> None: results = self.run_hook( follow_symlinks=True, functions={"MyFunction": {"path": root2}} ) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) + assert isinstance(code, Code) self.assert_s3_zip_file_list( code.S3Bucket, code.S3Key, @@ -491,7 +466,7 @@ def test_follow_symlink_true(self) -> None: ], ) - @mock_s3 + @mock_aws def test_follow_symlink_false(self) -> None: """Testing if symlinks are present and not followed.""" with self.temp_directory_with_files() as temp_dir1: @@ -502,10 +477,10 @@ def test_follow_symlink_false(self) -> None: results = self.run_hook( follow_symlinks=False, functions={"MyFunction": {"path": root2}} ) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) + assert isinstance(code, Code) self.assert_s3_zip_file_list( code.S3Bucket, code.S3Key, @@ -521,7 +496,7 @@ def test_follow_symlink_false(self) -> None: ], ) - @mock_s3 + @mock_aws def test_follow_symlink_omitted(self) -> None: """Same as test_follow_symlink_false, but default behavior.""" with self.temp_directory_with_files() as temp_dir1: @@ -530,10 +505,10 @@ def test_follow_symlink_omitted(self) -> None: root2 = temp_dir2.path os.symlink(root1 + "/f1", root2 + "/f3") results = self.run_hook(functions={"MyFunction": {"path": root2}}) - self.assertIsNotNone(results) + assert results is not None code = results.get("MyFunction") - self.assertIsInstance(code, Code) + assert isinstance(code, Code) self.assert_s3_zip_file_list( code.S3Bucket, code.S3Key, @@ -549,7 +524,7 @@ def test_follow_symlink_omitted(self) -> None: ], ) - @mock_s3 + @mock_aws @patch("runway.cfngin.hooks.aws_lambda.subprocess") @patch( "runway.cfngin.hooks.aws_lambda.find_requirements", @@ -587,9 +562,7 @@ def test_frozen(self, mock_sys: MagicMock, mock_proc: MagicMock) -> None: } ) mock_proc.check_call.assert_called_once_with([ANY, "run-python", ANY]) - assert mock_proc.check_call.call_args.args[0][2].endswith( - "__runway_run_pip_install.py" - ) + assert mock_proc.check_call.call_args.args[0][2].endswith("__runway_run_pip_install.py") class TestDockerizePip: @@ -607,9 +580,9 @@ class TestDockerizePip: { "Target": "/var/task", "Source": ( - os.getcwd().replace("\\", "/") + str(Path.cwd()).replace("\\", "/") if platform.system() == "Windows" - else os.getcwd() + else str(Path.cwd()) ), "Type": "bind", "ReadOnly": False, @@ -622,7 +595,7 @@ def test_with_docker_file(self) -> None: client = make_fake_client() with TempDirectory() as tmp_dir: docker_file = tmp_dir.write("Dockerfile", b"") - dockerized_pip(os.getcwd(), client=client, docker_file=docker_file) + dockerized_pip(str(Path.cwd()), client=client, docker_file=docker_file) client.api.build.assert_called_with( path=tmp_dir.path, dockerfile="Dockerfile", forcerm=True @@ -643,7 +616,7 @@ def test_with_docker_image(self) -> None: """Test with docker_image provided.""" client = make_fake_client() image = "alpine" - dockerized_pip(os.getcwd(), client=client, docker_image=image) + dockerized_pip(str(Path.cwd()), client=client, docker_image=image) client.api.create_container.assert_called_with( detach=True, image=image, command=self.command, host_config=self.host_config @@ -658,7 +631,7 @@ def test_with_runtime(self) -> None: """Test with runtime provided.""" client = make_fake_client() runtime = "python3.8" - dockerized_pip(os.getcwd(), client=client, runtime=runtime) + dockerized_pip(str(Path.cwd()), client=client, runtime=runtime) client.api.create_container.assert_called_with( detach=True, @@ -677,7 +650,7 @@ def test_raises_invalid_config(self) -> None: client = make_fake_client() with pytest.raises(InvalidDockerizePipConfiguration): dockerized_pip( - os.getcwd(), + str(Path.cwd()), client=client, docker_file="docker_file", docker_image="docker_image", @@ -685,52 +658,44 @@ def test_raises_invalid_config(self) -> None: ) with pytest.raises(InvalidDockerizePipConfiguration): dockerized_pip( - os.getcwd(), + str(Path.cwd()), client=client, docker_file="docker_file", docker_image="docker_image", ) with pytest.raises(InvalidDockerizePipConfiguration): dockerized_pip( - os.getcwd(), client=client, docker_file="docker_file", runtime="runtime" + str(Path.cwd()), client=client, docker_file="docker_file", runtime="runtime" ) with pytest.raises(InvalidDockerizePipConfiguration): dockerized_pip( - os.getcwd(), + str(Path.cwd()), client=client, docker_image="docker_image", runtime="runtime", ) with pytest.raises(InvalidDockerizePipConfiguration): - dockerized_pip(os.getcwd(), client=client) + dockerized_pip(str(Path.cwd()), client=client) def test_raises_value_error_missing_dockerfile(self) -> None: """ValueError raised when provided Dockerfile is not found.""" client = make_fake_client() - with pytest.raises(ValueError) as excinfo: - dockerized_pip(os.getcwd(), client=client, docker_file="not-a-Dockerfile") - assert "docker_file" in str(excinfo.value) + with pytest.raises(ValueError, match=".*docker_file.*"): + dockerized_pip(str(Path.cwd()), client=client, docker_file="not-a-Dockerfile") def test_raises_value_error_runtime(self) -> None: """ValueError raised if runtime provided is not supported.""" client = make_fake_client() - with pytest.raises(ValueError) as excinfo: - dockerized_pip(os.getcwd(), client=client, runtime="node") - assert "node" in str(excinfo.value) + with pytest.raises(ValueError, match=".*node.*"): + dockerized_pip(str(Path.cwd()), client=client, runtime="node") class TestHandleRequirements: """Test handle_requirements.""" - PIPFILE = "\n".join( - [ - "[[source]]", - 'url = "https://pypi.org/simple"', - "verify_ssl = true", - 'name = "pypi"', - "[packages]", - "[dev-packages]", - ] + PIPFILE = ( + '[[source]]\nurl = "https://pypi.org/simple"\nverify_ssl = true\nname = "pypi"\n' + "[packages]\n[dev-packages]" ) REQUIREMENTS = "-i https://pypi.org/simple\n\n" @@ -743,15 +708,13 @@ def test_default(self) -> None: req_path = handle_requirements( package_root=cast(str, tmp_dir.path), dest_path=cast(str, tmp_dir.path), - requirements=cast( - Dict[str, bool], find_requirements(cast(str, tmp_dir.path)) - ), + requirements=cast(dict[str, bool], find_requirements(cast(str, tmp_dir.path))), ) - assert req_path == os.path.join(cast(str, tmp_dir.path), "requirements.txt") - assert not os.path.isfile( - os.path.join(cast(str, tmp_dir.path), "Pipfile.lock") + assert req_path == os.path.join( # noqa: PTH118 + cast(str, tmp_dir.path), "requirements.txt" ) + assert not (Path(cast(str, tmp_dir.path)) / "Pipfile.lock").is_file() assert tmp_dir.read("requirements.txt") == expected def test_explicit_pipenv(self, tmp_path: Path) -> None: @@ -764,7 +727,7 @@ def test_explicit_pipenv(self, tmp_path: Path) -> None: req_path = handle_requirements( package_root=str(tmp_path), dest_path=str(tmp_path), - requirements=cast(Dict[str, bool], find_requirements(str(tmp_path))), + requirements=cast(dict[str, bool], find_requirements(str(tmp_path))), use_pipenv=True, ) assert req_path == str(requirements_txt) @@ -788,13 +751,11 @@ def test_explicit_pipenv(self, tmp_path: Path) -> None: assert requirements_txt.read_text() == "\n".join(expected_text) + "\n" def test_frozen_pipenv( - self, caplog: LogCaptureFixture, monkeypatch: MonkeyPatch, tmp_path: Path + self, caplog: pytest.LogCaptureFixture, monkeypatch: pytest.MonkeyPatch, tmp_path: Path ) -> None: - """Test use pipenv from Pyinstaller build.""" + """Test use pipenv from Pyinstaller build.""" # cspell:ignore Pyinstaller caplog.set_level(logging.ERROR, logger="runway.cfngin.hooks.aws_lambda") - monkeypatch.setattr( - "runway.cfngin.hooks.aws_lambda.sys.frozen", True, raising=False - ) + monkeypatch.setattr("runway.cfngin.hooks.aws_lambda.sys.frozen", True, raising=False) with pytest.raises(SystemExit) as excinfo: handle_requirements( @@ -807,9 +768,7 @@ def test_frozen_pipenv( }, ) assert excinfo.value.code == 1 - assert [ - "pipenv can only be used with python installed from PyPi" - ] == caplog.messages + assert caplog.messages == ["pipenv can only be used with python installed from PyPi"] def test_implicit_pipenv(self, tmp_path: Path) -> None: """Test implicit use of pipenv.""" @@ -820,7 +779,7 @@ def test_implicit_pipenv(self, tmp_path: Path) -> None: req_path = handle_requirements( package_root=str(tmp_path), dest_path=str(tmp_path), - requirements=cast(Dict[str, bool], find_requirements(str(tmp_path))), + requirements=cast(dict[str, bool], find_requirements(str(tmp_path))), use_pipenv=True, ) assert req_path == str(requirements_txt) @@ -845,17 +804,16 @@ def test_implicit_pipenv(self, tmp_path: Path) -> None: def test_raise_not_implimented(self) -> None: """Test NotImplimentedError is raised when no requirements file.""" - with TempDirectory() as tmp_dir: - with pytest.raises(NotImplementedError): - handle_requirements( - package_root=cast(str, tmp_dir.path), - dest_path=cast(str, tmp_dir.path), - requirements={ - "requirements.txt": False, - "Pipfile": False, - "Pipfile.lock": False, - }, - ) + with TempDirectory() as tmp_dir, pytest.raises(NotImplementedError): + handle_requirements( + package_root=cast(str, tmp_dir.path), + dest_path=cast(str, tmp_dir.path), + requirements={ + "requirements.txt": False, + "Pipfile": False, + "Pipfile.lock": False, + }, + ) class TestShouldUseDocker: @@ -896,14 +854,14 @@ def test_copydir() -> None: with TempDirectory() as tmp_dir: dest_path = tmp_dir.makedir("dest") src_path = tmp_dir.makedir("src") - tmp_dir.makedir(("src", "lib")) + tmp_dir.makedir("src/lib") example_file = b"example file content" - tmp_dir.write(("src", "example_file"), example_file) - tmp_dir.write(("src", "lib", "example_file"), example_file) + tmp_dir.write("src/example_file", example_file) + tmp_dir.write("src/lib/example_file", example_file) copydir(src_path, dest_path, ["**"]) - assert tmp_dir.read(("src", "example_file")) == example_file - assert tmp_dir.read(("src", "lib", "example_file")) == example_file - assert tmp_dir.read(("dest", "example_file")) == example_file - assert tmp_dir.read(("dest", "lib", "example_file")) == example_file + assert tmp_dir.read("src/example_file") == example_file + assert tmp_dir.read("src/lib/example_file") == example_file + assert tmp_dir.read("dest/example_file") == example_file + assert tmp_dir.read("dest/lib/example_file") == example_file diff --git a/tests/unit/cfngin/hooks/test_base.py b/tests/unit/cfngin/hooks/test_base.py index a9f497520..2b9d0df14 100644 --- a/tests/unit/cfngin/hooks/test_base.py +++ b/tests/unit/cfngin/hooks/test_base.py @@ -1,13 +1,12 @@ """Tests for runway.cfngin.hooks.base.""" -# pyright: basic from __future__ import annotations import logging from typing import TYPE_CHECKING +from unittest.mock import MagicMock, call, patch import pytest -from mock import MagicMock, call, patch from runway.cfngin.exceptions import StackFailed from runway.cfngin.hooks.base import Hook, HookDeployAction, HookDestroyAction @@ -21,9 +20,8 @@ ) if TYPE_CHECKING: - from pytest import LogCaptureFixture, MonkeyPatch - from ...factories import MockCFNginContext + from ...factories import MockCfnginContext COMPLETE_W_REASON = CompleteStatus("test successful") @@ -31,7 +29,7 @@ class TestHook: """Tests for runway.cfngin.hooks.base.Hook.""" - def test_attributes(self, cfngin_context: MockCFNginContext) -> None: + def test_attributes(self, cfngin_context: MockCfnginContext) -> None: """Test attributes set during __init__.""" provider = MagicMock() args = {"tags": {"key": "val"}} @@ -45,18 +43,18 @@ def test_attributes(self, cfngin_context: MockCFNginContext) -> None: assert not result.stack assert result.stack_name == "stack" - def test_tags(self, cfngin_context: MockCFNginContext) -> None: + def test_tags(self, cfngin_context: MockCfnginContext) -> None: """Test tags property.""" cfngin_context.config.tags = {"context_tag": "val"} - hook = Hook(cfngin_context, MagicMock(), **{"tags": {"arg_tag": "val"}}) + hook = Hook(cfngin_context, MagicMock(), tags={"arg_tag": "val"}) assert hook.tags.to_dict() == [ {"Key": "arg_tag", "Value": "val"}, {"Key": "context_tag", "Value": "val"}, ] - def test_get_template_description(self, cfngin_context: MockCFNginContext) -> None: + def test_get_template_description(self, cfngin_context: MockCfnginContext) -> None: """Test for get_template_description.""" hook = Hook(cfngin_context, MagicMock()) @@ -70,7 +68,7 @@ def test_get_template_description(self, cfngin_context: MockCFNginContext) -> No MagicMock(return_value=COMPLETE), ) def test_deploy_stack( - self, cfngin_context: MockCFNginContext, caplog: LogCaptureFixture + self, cfngin_context: MockCfnginContext, caplog: pytest.LogCaptureFixture ) -> None: """Test for deploy_stack.""" hook = Hook(cfngin_context, MagicMock()) @@ -87,7 +85,7 @@ def test_deploy_stack( MagicMock(side_effect=[SUBMITTED, COMPLETE]), ) def test_deploy_stack_wait( - self, cfngin_context: MockCFNginContext, caplog: LogCaptureFixture + self, cfngin_context: MockCfnginContext, caplog: pytest.LogCaptureFixture ) -> None: """Test for deploy_stack with wait.""" hook = Hook(cfngin_context, MagicMock()) @@ -106,7 +104,7 @@ def test_deploy_stack_wait( MagicMock(side_effect=[SKIPPED]), ) def test_deploy_stack_wait_skipped( - self, cfngin_context: MockCFNginContext, caplog: LogCaptureFixture + self, cfngin_context: MockCfnginContext, caplog: pytest.LogCaptureFixture ) -> None: """Test for deploy_stack with wait and skip.""" hook = Hook(cfngin_context, MagicMock()) @@ -118,10 +116,8 @@ def test_deploy_stack_wait_skipped( assert caplog.records[0].message == f"{stack.name}:{SKIPPED.name}" - @patch( - "runway.cfngin.hooks.base.HookDeployAction.run", MagicMock(side_effect=[FAILED]) - ) - def test_deploy_stack_wait_failed(self, cfngin_context: MockCFNginContext) -> None: + @patch("runway.cfngin.hooks.base.HookDeployAction.run", MagicMock(side_effect=[FAILED])) + def test_deploy_stack_wait_failed(self, cfngin_context: MockCfnginContext) -> None: """Test for deploy_stack with wait and skip.""" hook = Hook(cfngin_context, MagicMock()) stack = MagicMock() @@ -135,7 +131,7 @@ def test_deploy_stack_wait_failed(self, cfngin_context: MockCFNginContext) -> No MagicMock(side_effect=[SUBMITTED, COMPLETE_W_REASON]), ) def test_destroy_stack( - self, cfngin_context: MockCFNginContext, caplog: LogCaptureFixture + self, cfngin_context: MockCfnginContext, caplog: pytest.LogCaptureFixture ) -> None: """Test for destroy_stack with wait.""" hook = Hook(cfngin_context, MagicMock()) @@ -152,9 +148,7 @@ def test_destroy_stack( == f"{stack.name}:{COMPLETE_W_REASON.name} ({COMPLETE_W_REASON.reason})" ) - def test_wait_for_stack_till_reason( - self, cfngin_context: MockCFNginContext - ) -> None: + def test_wait_for_stack_till_reason(self, cfngin_context: MockCfnginContext) -> None: """Test _wait_for_stack till_reason option.""" hook = Hook(cfngin_context, MagicMock()) stack = MagicMock(fqn="test-stack", name="stack") @@ -166,14 +160,12 @@ def test_wait_for_stack_till_reason( COMPLETE, ] - result = hook._wait_for_stack( # pylint: disable=protected-access - action, stack=stack, till_reason="catch" - ) + result = hook._wait_for_stack(action, stack=stack, till_reason="catch") assert result == SUBMITTED assert result.reason == "catch" def test_wait_for_stack_log_change( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch + self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch ) -> None: """Test _wait_for_stack log status change.""" hook = Hook(cfngin_context, MagicMock()) @@ -185,7 +177,7 @@ def test_wait_for_stack_log_change( monkeypatch.setattr(hook, "_log_stack", mock_log) - hook._wait_for_stack( # pylint: disable=protected-access + hook._wait_for_stack( action, last_status=SubmittedStatus("original"), stack=stack, @@ -195,28 +187,28 @@ def test_wait_for_stack_log_change( mock_log.assert_has_calls([call(stack, new_status), call(stack, COMPLETE)]) assert mock_log.call_count == 2 - def test_post_deploy(self, cfngin_context: MockCFNginContext) -> None: + def test_post_deploy(self, cfngin_context: MockCfnginContext) -> None: """Test post_deploy.""" hook = Hook(cfngin_context, MagicMock()) with pytest.raises(NotImplementedError): hook.post_deploy() - def test_post_destroy(self, cfngin_context: MockCFNginContext) -> None: + def test_post_destroy(self, cfngin_context: MockCfnginContext) -> None: """Test post_destroy.""" hook = Hook(cfngin_context, MagicMock()) with pytest.raises(NotImplementedError): hook.post_destroy() - def test_pre_deploy(self, cfngin_context: MockCFNginContext) -> None: + def test_pre_deploy(self, cfngin_context: MockCfnginContext) -> None: """Test pre_deploy.""" hook = Hook(cfngin_context, MagicMock()) with pytest.raises(NotImplementedError): hook.pre_deploy() - def test_pre_destroy(self, cfngin_context: MockCFNginContext): + def test_pre_destroy(self, cfngin_context: MockCfnginContext) -> None: """Test pre_destroy.""" hook = Hook(cfngin_context, MagicMock()) @@ -227,23 +219,21 @@ def test_pre_destroy(self, cfngin_context: MockCFNginContext): class TestHookDeployAction: """Tests for runway.cfngin.hooks.base.HookDeployAction.""" - def test_provider(self, cfngin_context: MockCFNginContext) -> None: + def test_provider(self, cfngin_context: MockCfnginContext) -> None: """Test provider property.""" provider = MagicMock() obj = HookDeployAction(cfngin_context, provider) assert obj.provider == provider - def test_build_provider(self, cfngin_context: MockCFNginContext) -> None: + def test_build_provider(self, cfngin_context: MockCfnginContext) -> None: """Test build_provider.""" provider = MagicMock() obj = HookDeployAction(cfngin_context, provider) assert obj.build_provider() == provider - def test_run( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch - ) -> None: + def test_run(self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch) -> None: """Test run.""" obj = HookDeployAction(cfngin_context, MagicMock()) monkeypatch.setattr(obj, "_launch_stack", lambda: "success") @@ -254,9 +244,7 @@ def test_run( class TestHookDestroyAction: """Tests for runway.cfngin.hooks.base.HookDestroyAction.""" - def test_run( - self, cfngin_context: MockCFNginContext, monkeypatch: MonkeyPatch - ) -> None: + def test_run(self, cfngin_context: MockCfnginContext, monkeypatch: pytest.MonkeyPatch) -> None: """Test run.""" obj = HookDestroyAction(cfngin_context, MagicMock()) monkeypatch.setattr(obj, "_destroy_stack", lambda: "success") diff --git a/tests/unit/cfngin/hooks/test_cleanup_s3.py b/tests/unit/cfngin/hooks/test_cleanup_s3.py index 7c5b9422c..b9f9256cb 100644 --- a/tests/unit/cfngin/hooks/test_cleanup_s3.py +++ b/tests/unit/cfngin/hooks/test_cleanup_s3.py @@ -10,10 +10,10 @@ from runway.cfngin.hooks.cleanup_s3 import purge_bucket if TYPE_CHECKING: - from ...factories import MockCFNginContext + from ...factories import MockCfnginContext -def test_purge_bucket(cfngin_context: MockCFNginContext) -> None: +def test_purge_bucket(cfngin_context: MockCfnginContext) -> None: """Test purge_bucket.""" stub = cfngin_context.add_stubber("s3") @@ -24,7 +24,7 @@ def test_purge_bucket(cfngin_context: MockCFNginContext) -> None: stub.assert_no_pending_responses() -def test_purge_bucket_does_not_exist(cfngin_context: MockCFNginContext) -> None: +def test_purge_bucket_does_not_exist(cfngin_context: MockCfnginContext) -> None: """Test purge_bucket Bucket doesn't exist.""" stub = cfngin_context.add_stubber("s3") @@ -34,7 +34,7 @@ def test_purge_bucket_does_not_exist(cfngin_context: MockCFNginContext) -> None: stub.assert_no_pending_responses() -def test_purge_bucket_unhandled_exception(cfngin_context: MockCFNginContext) -> None: +def test_purge_bucket_unhandled_exception(cfngin_context: MockCfnginContext) -> None: """Test purge_bucket with unhandled exception.""" stub = cfngin_context.add_stubber("s3") diff --git a/tests/unit/cfngin/hooks/test_cleanup_ssm.py b/tests/unit/cfngin/hooks/test_cleanup_ssm.py index d62211c7d..9c2a5f5ac 100644 --- a/tests/unit/cfngin/hooks/test_cleanup_ssm.py +++ b/tests/unit/cfngin/hooks/test_cleanup_ssm.py @@ -7,10 +7,10 @@ from runway.cfngin.hooks.cleanup_ssm import delete_param if TYPE_CHECKING: - from ...factories import MockCFNginContext + from ...factories import MockCfnginContext -def test_delete_param(cfngin_context: MockCFNginContext) -> None: +def test_delete_param(cfngin_context: MockCfnginContext) -> None: """Test delete_param.""" stub = cfngin_context.add_stubber("ssm") @@ -19,7 +19,7 @@ def test_delete_param(cfngin_context: MockCFNginContext) -> None: assert delete_param(cfngin_context, parameter_name="foo") -def test_delete_param_not_found(cfngin_context: MockCFNginContext) -> None: +def test_delete_param_not_found(cfngin_context: MockCfnginContext) -> None: """Test delete_param.""" stub = cfngin_context.add_stubber("ssm") diff --git a/tests/unit/cfngin/hooks/test_command.py b/tests/unit/cfngin/hooks/test_command.py index 99aed4c7e..56977beed 100644 --- a/tests/unit/cfngin/hooks/test_command.py +++ b/tests/unit/cfngin/hooks/test_command.py @@ -1,6 +1,5 @@ """Tests for runway.cfngin.hooks.command.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING @@ -26,9 +25,7 @@ def test_run_command(fake_process: FakeProcess) -> None: def test_run_command_capture(fake_process: FakeProcess) -> None: """Test run_command with ``capture``.""" - fake_process.register_subprocess( - ["foo"], returncode=0, stderr="bar", stdout="foobar" - ) + fake_process.register_subprocess(["foo"], returncode=0, stderr="bar", stdout="foobar") assert run_command(command=["foo"], capture=True) == { "returncode": 0, "stderr": b"bar", # for some reason, pytest-subprocess returns these as bytes diff --git a/tests/unit/cfngin/hooks/test_ecs.py b/tests/unit/cfngin/hooks/test_ecs.py index a2ca502be..4c6c33deb 100644 --- a/tests/unit/cfngin/hooks/test_ecs.py +++ b/tests/unit/cfngin/hooks/test_ecs.py @@ -1,6 +1,5 @@ """Tests for runway.cfngin.hooks.ecs.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING @@ -9,16 +8,16 @@ from runway.cfngin.hooks.ecs import create_clusters if TYPE_CHECKING: + import pytest from mypy_boto3_ecs.type_defs import ClusterTypeDef - from pytest import LogCaptureFixture - from ...factories import MockCFNginContext + from ...factories import MockCfnginContext MODULE = "runway.cfngin.hooks.ecs" def test_create_clusters( - caplog: LogCaptureFixture, cfngin_context: MockCFNginContext + caplog: pytest.LogCaptureFixture, cfngin_context: MockCfnginContext ) -> None: """Test create_clusters.""" caplog.set_level(LogLevels.DEBUG, MODULE) @@ -28,12 +27,8 @@ def test_create_clusters( "bar": {"clusterName": "bar"}, } - stub.add_response( - "create_cluster", {"cluster": clusters["foo"]}, {"clusterName": "foo"} - ) - stub.add_response( - "create_cluster", {"cluster": clusters["bar"]}, {"clusterName": "bar"} - ) + stub.add_response("create_cluster", {"cluster": clusters["foo"]}, {"clusterName": "foo"}) + stub.add_response("create_cluster", {"cluster": clusters["bar"]}, {"clusterName": "bar"}) with stub: assert create_clusters(cfngin_context, clusters=list(clusters)) == { @@ -45,7 +40,7 @@ def test_create_clusters( assert f"creating ECS cluster: {cluster}" in caplog.messages -def test_create_clusters_str(cfngin_context: MockCFNginContext) -> None: +def test_create_clusters_str(cfngin_context: MockCfnginContext) -> None: """Test create_clusters with ``clusters`` provided as str.""" stub = cfngin_context.add_stubber("ecs") cluster_name = "foo" diff --git a/tests/unit/cfngin/hooks/test_iam.py b/tests/unit/cfngin/hooks/test_iam.py index 4da3c9649..150096090 100644 --- a/tests/unit/cfngin/hooks/test_iam.py +++ b/tests/unit/cfngin/hooks/test_iam.py @@ -1,6 +1,5 @@ """Tests for runway.cfngin.hooks.iam.""" -# pyright: basic from __future__ import annotations from datetime import datetime @@ -22,13 +21,13 @@ from pytest_mock import MockerFixture - from ...factories import MockCFNginContext + from ...factories import MockCfnginContext CREATE_DATE = datetime(2015, 1, 1) MODULE = "runway.cfngin.hooks.iam" -def test_create_ecs_service_role(cfngin_context: MockCFNginContext) -> None: +def test_create_ecs_service_role(cfngin_context: MockCfnginContext) -> None: """Test create_ecs_service_role.""" stub = cfngin_context.add_stubber("iam") @@ -64,7 +63,7 @@ def test_create_ecs_service_role(cfngin_context: MockCFNginContext) -> None: def test_create_ecs_service_role_already_exists( - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, ) -> None: """Test create_ecs_service_role already exists.""" stub = cfngin_context.add_stubber("iam") @@ -86,7 +85,7 @@ def test_create_ecs_service_role_already_exists( def test_create_ecs_service_role_raise_client_error( - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, ) -> None: """Test create_ecs_service_role raise ClientError.""" stub = cfngin_context.add_stubber("iam") @@ -99,7 +98,7 @@ def test_create_ecs_service_role_raise_client_error( def test_ensure_server_cert_exists( - cfngin_context: MockCFNginContext, mocker: MockerFixture, tmp_path: Path + cfngin_context: MockCfnginContext, mocker: MockerFixture, tmp_path: Path ) -> None: """Test ensure_server_cert_exists.""" cert_name = "foo" @@ -155,7 +154,7 @@ def test_ensure_server_cert_exists( def test_ensure_server_cert_exists_already_exists( - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, ) -> None: """Test ensure_server_cert_exists already exists.""" cert_name = "foo" @@ -188,7 +187,7 @@ def test_ensure_server_cert_exists_already_exists( def test_ensure_server_cert_exists_no_prompt_no_parameters( - cfngin_context: MockCFNginContext, mocker: MockerFixture + cfngin_context: MockCfnginContext, mocker: MockerFixture ) -> None: """Test ensure_server_cert_exists no prompt, not parameters.""" mocker.patch( @@ -200,14 +199,12 @@ def test_ensure_server_cert_exists_no_prompt_no_parameters( stub.add_client_error("get_server_certificate") with stub: - assert not ensure_server_cert_exists( - cfngin_context, cert_name="foo", prompt=False - ) + assert not ensure_server_cert_exists(cfngin_context, cert_name="foo", prompt=False) stub.assert_no_pending_responses() def test_ensure_server_cert_exists_prompt_no( - cfngin_context: MockCFNginContext, mocker: MockerFixture + cfngin_context: MockCfnginContext, mocker: MockerFixture ) -> None: """Test ensure_server_cert_exists prompt input no.""" mocker.patch( diff --git a/tests/unit/cfngin/hooks/test_keypair.py b/tests/unit/cfngin/hooks/test_keypair.py index 1a9c217ea..13aa19b8c 100644 --- a/tests/unit/cfngin/hooks/test_keypair.py +++ b/tests/unit/cfngin/hooks/test_keypair.py @@ -1,26 +1,27 @@ """Tests for runway.cfngin.hooks.keypair.""" -# pylint: disable=redefined-outer-name -# pyright: basic from __future__ import annotations import os import sys from contextlib import contextmanager -from typing import TYPE_CHECKING, Iterator, NamedTuple, Tuple +from typing import TYPE_CHECKING, NamedTuple +from unittest import mock import boto3 -import mock import pytest -from moto import mock_ec2, mock_ssm +from moto.core.decorator import mock_aws from runway.cfngin.hooks.keypair import KeyPairInfo, ensure_keypair_exists from ..factories import mock_context if TYPE_CHECKING: + from collections.abc import Iterator from pathlib import Path + from pytest_mock import MockerFixture + from runway.context import CfnginContext REGION = "us-east-1" @@ -46,50 +47,42 @@ def ssh_key(cfngin_fixtures: Path) -> SSHKey: ) -@pytest.fixture +@pytest.fixture() def context() -> CfnginContext: """Mock context.""" return mock_context(namespace="fake") @pytest.fixture(autouse=True) -def ec2(ssh_key: SSHKey) -> Iterator[None]: - """Mock EC2.""" - # Force moto to generate a deterministic key pair on creation. - # Can be replaced by something more sensible when - # https://github.com/spulec/moto/pull/2108 is merged - - key_pair = { - "fingerprint": ssh_key.fingerprint, - "material": ssh_key.private_key.decode("ascii"), - } - with mock.patch("moto.ec2.models.random_key_pair", side_effect=[key_pair]): - with mock_ec2(): - yield - - -@pytest.fixture(autouse=True) -def ssm() -> Iterator[None]: - """Mock SSM.""" - with mock_ssm(): +def patch_ssh_key(mocker: MockerFixture, ssh_key: SSHKey) -> Iterator[None]: + """Force moto to generate a deterministic key pair on creation.""" + mocker.patch( + "moto.ec2.models.key_pairs.random_rsa_key_pair", + side_effect=[ + { + "fingerprint": ssh_key.fingerprint, + "material": ssh_key.private_key.decode("ascii"), + "material_public": ssh_key.public_key.decode("ascii"), + } + ], + ) + with mock_aws(): yield @contextmanager -def mock_input( - lines: Tuple[str, ...] = (), isatty: bool = True -) -> Iterator[mock.MagicMock]: +def mock_input(lines: tuple[str, ...] = (), isatty: bool = True) -> Iterator[mock.MagicMock]: """Mock input.""" - with mock.patch( - "runway.cfngin.hooks.keypair.get_raw_input", side_effect=lines - ) as mock_get_raw_input: - with mock.patch.object(sys.stdin, "isatty", return_value=isatty): - yield mock_get_raw_input + with ( + mock.patch( + "runway.cfngin.hooks.keypair.get_raw_input", side_effect=lines + ) as mock_get_raw_input, + mock.patch.object(sys.stdin, "isatty", return_value=isatty), + ): + yield mock_get_raw_input -def assert_key_present( - hook_result: KeyPairInfo, key_name: str, fingerprint: str -) -> None: +def assert_key_present(hook_result: KeyPairInfo, key_name: str, fingerprint: str) -> None: """Assert key present.""" assert hook_result.get("key_name") == key_name assert hook_result.get("fingerprint") == fingerprint @@ -133,9 +126,7 @@ def test_import_file(tmp_path: Path, context: CfnginContext, ssh_key: SSHKey) -> pub_key = tmp_path / "id_rsa.pub" pub_key.write_bytes(ssh_key.public_key) - result = ensure_keypair_exists( - context, keypair=KEY_PAIR_NAME, public_key_path=str(pub_key) - ) + result = ensure_keypair_exists(context, keypair=KEY_PAIR_NAME, public_key_path=str(pub_key)) assert_key_present(result, KEY_PAIR_NAME, ssh_key.fingerprint) assert result.get("status") == "imported" @@ -145,16 +136,12 @@ def test_import_bad_key_data(tmp_path: Path, context: CfnginContext) -> None: pub_key = tmp_path / "id_rsa.pub" pub_key.write_text("garbage") - result = ensure_keypair_exists( - context, keypair=KEY_PAIR_NAME, public_key_path=str(pub_key) - ) + result = ensure_keypair_exists(context, keypair=KEY_PAIR_NAME, public_key_path=str(pub_key)) assert result == {} @pytest.mark.parametrize("ssm_key_id", ["my-key"]) -def test_create_in_ssm( - context: CfnginContext, ssh_key: SSHKey, ssm_key_id: str -) -> None: +def test_create_in_ssm(context: CfnginContext, ssh_key: SSHKey, ssm_key_id: str) -> None: """Test create in ssm.""" result = ensure_keypair_exists( context, @@ -168,9 +155,9 @@ def test_create_in_ssm( ssm = boto3.client("ssm") param = ssm.get_parameter(Name="param", WithDecryption=True).get("Parameter", {}) - assert param.get("Value", "").replace("\n", "") == ssh_key.private_key.decode( - "ascii" - ).replace(os.linesep, "") + assert param.get("Value", "").replace("\n", "") == ssh_key.private_key.decode("ascii").replace( + os.linesep, "" + ) assert param.get("Type") == "SecureString" params = ssm.describe_parameters().get("Parameters", []) @@ -199,9 +186,7 @@ def test_interactive_retry_cancel(context: CfnginContext) -> None: assert result == {} -def test_interactive_import( - tmp_path: Path, context: CfnginContext, ssh_key: SSHKey -) -> None: +def test_interactive_import(tmp_path: Path, context: CfnginContext, ssh_key: SSHKey) -> None: """.""" key_file = tmp_path / "id_rsa.pub" key_file.write_bytes(ssh_key.public_key) @@ -214,9 +199,7 @@ def test_interactive_import( assert result.get("status") == "imported" -def test_interactive_create( - tmp_path: Path, context: CfnginContext, ssh_key: SSHKey -) -> None: +def test_interactive_create(tmp_path: Path, context: CfnginContext, ssh_key: SSHKey) -> None: """Test interactive create.""" key_dir = tmp_path / "keys" key_dir.mkdir(parents=True, exist_ok=True) @@ -243,9 +226,7 @@ def test_interactive_create_bad_dir(tmp_path: Path, context: CfnginContext) -> N assert result == {} -def test_interactive_create_existing_file( - tmp_path: Path, context: CfnginContext -) -> None: +def test_interactive_create_existing_file(tmp_path: Path, context: CfnginContext) -> None: """Test interactive create existing file.""" key_dir = tmp_path / "keys" key_dir.mkdir(exist_ok=True, parents=True) diff --git a/tests/unit/cfngin/hooks/test_route53.py b/tests/unit/cfngin/hooks/test_route53.py index 96c9a969c..0c2ed79dd 100644 --- a/tests/unit/cfngin/hooks/test_route53.py +++ b/tests/unit/cfngin/hooks/test_route53.py @@ -9,26 +9,21 @@ if TYPE_CHECKING: from pytest_mock import MockerFixture - from ...factories import MockCFNginContext + from ...factories import MockCfnginContext MODULE = "runway.cfngin.hooks.route53" -def test_create_domain( - cfngin_context: MockCFNginContext, mocker: MockerFixture -) -> None: +def test_create_domain(cfngin_context: MockCfnginContext, mocker: MockerFixture) -> None: """Test create_domain.""" domain = "foo" - create_route53_zone = mocker.patch( - f"{MODULE}.create_route53_zone", return_value="bar" - ) + create_route53_zone = mocker.patch(f"{MODULE}.create_route53_zone", return_value="bar") _ = cfngin_context.add_stubber("route53") assert create_domain(cfngin_context, domain=domain) == { "domain": domain, "zone_id": create_route53_zone.return_value, } - # pylint: disable=protected-access create_route53_zone.assert_called_once_with( - cfngin_context._boto3_test_client[f"route53.{cfngin_context.env.aws_region}"], + cfngin_context.get_stubbed_client("route53"), domain, ) diff --git a/tests/unit/cfngin/hooks/test_utils.py b/tests/unit/cfngin/hooks/test_utils.py index 28b3976fa..1ba1ee152 100644 --- a/tests/unit/cfngin/hooks/test_utils.py +++ b/tests/unit/cfngin/hooks/test_utils.py @@ -1,15 +1,16 @@ """Tests for runway.cfngin.hooks.utils.""" -# pylint: disable=unused-argument, broad-exception-raised -# pyright: basic, reportUnknownArgumentType=none, reportUnknownVariableType=none +# pyright: reportUnknownArgumentType=none, reportUnknownVariableType=none from __future__ import annotations import queue import unittest -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any, ClassVar +from unittest.mock import call, patch -from mock import call, patch +import pytest +from runway.cfngin.hooks.base import HookArgsBaseModel from runway.cfngin.hooks.protocols import CfnginHookProtocol from runway.cfngin.hooks.utils import handle_hooks from runway.config.models.cfngin import CfnginHookDefinitionModel @@ -17,7 +18,7 @@ from ..factories import mock_context, mock_provider if TYPE_CHECKING: - from mock import MagicMock + from unittest.mock import MagicMock HOOK_QUEUE = queue.Queue() @@ -34,36 +35,34 @@ def test_empty_hook_stage(self) -> None: """Test empty hook stage.""" hooks = [] handle_hooks("fake", hooks, self.provider, self.context) - self.assertTrue(HOOK_QUEUE.empty()) + assert HOOK_QUEUE.empty() def test_missing_required_hook(self) -> None: """Test missing required hook.""" hooks = [CfnginHookDefinitionModel(path="not.a.real.path", required=True)] - with self.assertRaises(ImportError): + with pytest.raises(ImportError): handle_hooks("missing", hooks, self.provider, self.context) def test_missing_required_hook_method(self) -> None: """Test missing required hook method.""" - with self.assertRaises(AttributeError): - hooks = [ - CfnginHookDefinitionModel( - path="runway.cfngin.hooks.blah", required=True - ) - ] - handle_hooks("missing", hooks, self.provider, self.context) + with pytest.raises(AttributeError): + handle_hooks( + "missing", + [CfnginHookDefinitionModel(path="runway.cfngin.hooks.blah", required=True)], + self.provider, + self.context, + ) def test_missing_non_required_hook_method(self) -> None: """Test missing non required hook method.""" - hooks = [ - CfnginHookDefinitionModel(path="runway.cfngin.hooks.blah", required=False) - ] + hooks = [CfnginHookDefinitionModel(path="runway.cfngin.hooks.blah", required=False)] handle_hooks("missing", hooks, self.provider, self.context) - self.assertTrue(HOOK_QUEUE.empty()) + assert HOOK_QUEUE.empty() def test_default_required_hook(self) -> None: """Test default required hook.""" - hooks = [CfnginHookDefinitionModel(**{"path": "runway.cfngin.hooks.blah"})] - with self.assertRaises(AttributeError): + hooks = [CfnginHookDefinitionModel(path="runway.cfngin.hooks.blah")] + with pytest.raises(AttributeError): handle_hooks("missing", hooks, self.provider, self.context) @patch("runway.cfngin.hooks.utils.load_object_from_string") @@ -86,8 +85,8 @@ def test_valid_hook(self, mock_load: MagicMock) -> None: [call(hooks[0].path, try_reload=True), call(hooks[1].path, try_reload=True)] ) good = HOOK_QUEUE.get_nowait() - self.assertEqual(good["provider"].region, "us-east-1") - with self.assertRaises(queue.Empty): + assert good["provider"].region == "us-east-1" + with pytest.raises(queue.Empty): HOOK_QUEUE.get_nowait() def test_valid_enabled_hook(self) -> None: @@ -101,8 +100,8 @@ def test_valid_enabled_hook(self) -> None: ] handle_hooks("missing", hooks, self.provider, self.context) good = HOOK_QUEUE.get_nowait() - self.assertEqual(good["provider"].region, "us-east-1") - with self.assertRaises(queue.Empty): + assert good["provider"].region == "us-east-1" + with pytest.raises(queue.Empty): HOOK_QUEUE.get_nowait() def test_valid_enabled_false_hook(self) -> None: @@ -115,7 +114,7 @@ def test_valid_enabled_false_hook(self) -> None: ) ] handle_hooks("missing", hooks, self.provider, self.context) - self.assertTrue(HOOK_QUEUE.empty()) + assert HOOK_QUEUE.empty() def test_context_provided_to_hook(self) -> None: """Test context provided to hook.""" @@ -135,7 +134,7 @@ def test_hook_failure(self) -> None: required=True, ) ] - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): handle_hooks("fail", hooks, self.provider, self.context) hooks = [ CfnginHookDefinitionModel( @@ -143,7 +142,7 @@ def test_hook_failure(self) -> None: required=True, ) ] - with self.assertRaises(Exception): # noqa: B017 + with pytest.raises(Exception): # noqa: B017, PT011 handle_hooks("fail", hooks, self.provider, self.context) hooks = [ CfnginHookDefinitionModel( @@ -162,15 +161,13 @@ def test_return_data_hook(self) -> None: data_key="my_hook_results", ), # Shouldn't return data - CfnginHookDefinitionModel( - path="tests.unit.cfngin.hooks.test_utils.context_hook" - ), + CfnginHookDefinitionModel(path="tests.unit.cfngin.hooks.test_utils.context_hook"), ] handle_hooks("result", hooks, self.provider, self.context) - self.assertEqual(self.context.hook_data["my_hook_results"]["foo"], "bar") + assert self.context.hook_data["my_hook_results"]["foo"] == "bar" # Verify only the first hook resulted in stored data - self.assertEqual(list(self.context.hook_data.keys()), ["my_hook_results"]) + assert list(self.context.hook_data.keys()) == ["my_hook_results"] def test_return_data_hook_duplicate_key(self) -> None: """Test return data hook duplicate key.""" @@ -185,7 +182,7 @@ def test_return_data_hook_duplicate_key(self) -> None: ), ] - with self.assertRaises(KeyError): + with pytest.raises(KeyError): handle_hooks("result", hooks, self.provider, self.context) def test_resolve_lookups_in_args(self) -> None: @@ -199,63 +196,63 @@ def test_resolve_lookups_in_args(self) -> None: ] handle_hooks("lookups", hooks, self.provider, self.context) - self.assertEqual( - self.context.hook_data["my_hook_results"]["default_lookup"], "default_value" - ) + assert self.context.hook_data["my_hook_results"]["default_lookup"] == "default_value" class MockHook(CfnginHookProtocol): """Mock hook class.""" - args: Dict[str, Any] + ARGS_PARSER: ClassVar[type[HookArgsBaseModel]] = HookArgsBaseModel + + args: dict[str, Any] - def __init__(self, **kwargs: Any) -> None: # pylint: disable=super-init-not-called + def __init__(self, **_kwargs: Any) -> None: """Instantiate class.""" - self.args = {} + self.args = {} # type: ignore - def post_deploy(self) -> Dict[str, str]: + def post_deploy(self) -> dict[str, str]: """Run during the **post_deploy** stage.""" return {"status": "success"} - def post_destroy(self) -> Dict[str, str]: + def post_destroy(self) -> dict[str, str]: """Run during the **post_destroy** stage.""" return {"status": "success"} - def pre_deploy(self) -> Dict[str, str]: + def pre_deploy(self) -> dict[str, str]: """Run during the **pre_deploy** stage.""" return {"status": "success"} - def pre_destroy(self) -> Dict[str, str]: + def pre_destroy(self) -> dict[str, str]: """Run during the **pre_destroy** stage.""" return {"status": "success"} -def mock_hook(*args: Any, **kwargs: Any) -> bool: +def mock_hook(*_args: Any, **kwargs: Any) -> bool: """Mock hook.""" HOOK_QUEUE.put(kwargs) return True -def fail_hook(*args: Any, **kwargs: Any) -> None: +def fail_hook(*_args: Any, **_kwargs: Any) -> None: """Fail hook.""" - return None + return -def exception_hook(*args: Any, **kwargs: Any) -> None: +def exception_hook(*_args: Any, **_kwargs: Any) -> None: """Exception hook.""" raise Exception -def context_hook(*args: Any, **kwargs: Any) -> bool: +def context_hook(*_args: Any, **kwargs: Any) -> bool: """Context hook.""" return "context" in kwargs -def result_hook(*args: Any, **kwargs: Any) -> Dict[str, str]: +def result_hook(*_args: Any, **_kwargs: Any) -> dict[str, str]: """Results hook.""" return {"foo": "bar"} -def kwargs_hook(*args: Any, **kwargs: Any) -> Any: +def kwargs_hook(*_args: Any, **kwargs: Any) -> Any: """Kwargs hook.""" return kwargs diff --git a/tests/unit/cfngin/lookups/handlers/test_ami.py b/tests/unit/cfngin/lookups/handlers/test_ami.py index b1b09756c..567fe6816 100644 --- a/tests/unit/cfngin/lookups/handlers/test_ami.py +++ b/tests/unit/cfngin/lookups/handlers/test_ami.py @@ -1,6 +1,5 @@ """Tests for runway.cfngin.lookups.handlers.ami.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING @@ -10,7 +9,7 @@ from runway.cfngin.lookups.handlers.ami import AmiLookup, ImageNotFound if TYPE_CHECKING: - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext REGION = "us-east-1" @@ -18,7 +17,7 @@ class TestAMILookup: """Tests for runway.cfngin.lookups.handlers.ami.AmiLookup.""" - def test_basic_lookup_single_image(self, cfngin_context: MockCFNginContext) -> None: + def test_basic_lookup_single_image(self, cfngin_context: MockCfnginContext) -> None: """Test basic lookup single image.""" executable_users = ["123456789012", "234567890123"] stubber = cfngin_context.add_stubber("ec2") @@ -55,7 +54,7 @@ def test_basic_lookup_single_image(self, cfngin_context: MockCFNginContext) -> N == image_id ) - def test_basic_lookup_with_region(self, cfngin_context: MockCFNginContext) -> None: + def test_basic_lookup_with_region(self, cfngin_context: MockCfnginContext) -> None: """Test basic lookup with region.""" stubber = cfngin_context.add_stubber("ec2", region="us-west-1") image_id = "ami-fffccc111" @@ -86,9 +85,7 @@ def test_basic_lookup_with_region(self, cfngin_context: MockCFNginContext) -> No == image_id ) - def test_basic_lookup_multiple_images( - self, cfngin_context: MockCFNginContext - ) -> None: + def test_basic_lookup_multiple_images(self, cfngin_context: MockCfnginContext) -> None: """Test basic lookup multiple images.""" stubber = cfngin_context.add_stubber("ec2") image_id = "ami-fffccc111" @@ -139,7 +136,7 @@ def test_basic_lookup_multiple_images( ) def test_basic_lookup_multiple_images_name_match( - self, cfngin_context: MockCFNginContext + self, cfngin_context: MockCfnginContext ) -> None: """Test basic lookup multiple images name match.""" stubber = cfngin_context.add_stubber("ec2") @@ -180,9 +177,7 @@ def test_basic_lookup_multiple_images_name_match( == image_id ) - def test_basic_lookup_no_matching_images( - self, cfngin_context: MockCFNginContext - ) -> None: + def test_basic_lookup_no_matching_images(self, cfngin_context: MockCfnginContext) -> None: """Test basic lookup no matching images.""" stubber = cfngin_context.add_stubber("ec2") stubber.add_response("describe_images", {"Images": []}) @@ -193,7 +188,7 @@ def test_basic_lookup_no_matching_images( ) def test_basic_lookup_no_matching_images_from_name( - self, cfngin_context: MockCFNginContext + self, cfngin_context: MockCfnginContext ) -> None: """Test basic lookup no matching images from name.""" stubber = cfngin_context.add_stubber("ec2") @@ -216,6 +211,4 @@ def test_basic_lookup_no_matching_images_from_name( ) with stubber, pytest.raises(ImageNotFound): - AmiLookup.handle( - value=r"owners:self name_regex:MyImage\s\d", context=cfngin_context - ) + AmiLookup.handle(value=r"owners:self name_regex:MyImage\s\d", context=cfngin_context) diff --git a/tests/unit/cfngin/lookups/handlers/test_awslambda.py b/tests/unit/cfngin/lookups/handlers/test_awslambda.py index 40b72678c..c38bc8501 100644 --- a/tests/unit/cfngin/lookups/handlers/test_awslambda.py +++ b/tests/unit/cfngin/lookups/handlers/test_awslambda.py @@ -1,35 +1,28 @@ """Test runway.cfngin.lookups.handlers.awslambda.""" -# pylint: disable=redefined-outer-name from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import Mock import pytest -from mock import Mock from troposphere.awslambda import Code, Content -from runway.cfngin.exceptions import CfnginOnlyLookupError from runway.cfngin.hooks.awslambda.base_classes import AwsLambdaHook from runway.cfngin.hooks.awslambda.models.responses import AwsLambdaHookDeployResponse from runway.cfngin.lookups.handlers.awslambda import AwsLambdaLookup -from runway.config import CfnginConfig -from runway.config.models.cfngin import ( - CfnginConfigDefinitionModel, - CfnginHookDefinitionModel, -) from runway.lookups.handlers.base import LookupHandler if TYPE_CHECKING: from pytest_mock import MockerFixture - from runway.context import CfnginContext, RunwayContext + from runway.context import CfnginContext MODULE = "runway.cfngin.lookups.handlers.awslambda" QUERY = "test::foo=bar" -@pytest.fixture(scope="function") +@pytest.fixture() def hook_data() -> AwsLambdaHookDeployResponse: """Fixture for hook response data.""" return AwsLambdaHookDeployResponse( @@ -47,14 +40,13 @@ def hook_data() -> AwsLambdaHookDeployResponse: class TestAwsLambdaLookup: """Test AwsLambdaLookup.""" - def test_get_deployment_package_data( - self, hook_data: AwsLambdaHookDeployResponse - ) -> None: + def test_get_deployment_package_data(self, hook_data: AwsLambdaHookDeployResponse) -> None: """Test get_deployment_package_data.""" data_key = "test.key" assert ( AwsLambdaLookup.get_deployment_package_data( - Mock(hook_data={data_key: hook_data.dict(by_alias=True)}), data_key + Mock(hook_data={data_key: hook_data.model_dump(by_alias=True)}), + data_key, ) == hook_data ) @@ -67,25 +59,16 @@ def test_get_deployment_package_data_set_hook_data( ) -> None: """Test get_deployment_package_data set hook_data when it's missing.""" data_key = "test.key" - hook = Mock(plan=Mock(return_value=hook_data.dict(by_alias=True))) - init_hook_class = mocker.patch.object( - AwsLambdaLookup, "init_hook_class", return_value=hook - ) - get_required_hook_definition = mocker.patch.object( + hook = Mock(plan=Mock(return_value=hook_data.model_dump(by_alias=True))) + init_hook_class = mocker.patch.object(AwsLambdaLookup, "init_hook_class", return_value=hook) + get_hook_definition = mocker.patch.object( AwsLambdaLookup, "get_required_hook_definition", return_value="hook_def" ) - assert ( - AwsLambdaLookup.get_deployment_package_data(cfngin_context, data_key) - == hook_data - ) - get_required_hook_definition.assert_called_once_with( - cfngin_context.config, data_key - ) - init_hook_class.assert_called_once_with( - cfngin_context, get_required_hook_definition.return_value - ) + assert AwsLambdaLookup.get_deployment_package_data(cfngin_context, data_key) == hook_data + get_hook_definition.assert_called_once_with(cfngin_context.config, data_key) + init_hook_class.assert_called_once_with(cfngin_context, get_hook_definition.return_value) hook.plan.assert_called_once_with() - assert cfngin_context.hook_data[data_key] == hook_data.dict(by_alias=True) + assert cfngin_context.hook_data[data_key] == hook_data.model_dump(by_alias=True) def test_get_deployment_package_data_raise_type_error(self) -> None: """Test get_deployment_package_data.""" @@ -93,79 +76,7 @@ def test_get_deployment_package_data_raise_type_error(self) -> None: assert not AwsLambdaLookup.get_deployment_package_data( Mock(hook_data={"test": {"invalid": True}}), "test" ) - assert "expected AwsLambdaHookDeployResponseTypedDict, not " in str( - excinfo.value - ) - - def test_get_required_hook_definition(self) -> None: - """Test get_required_hook_definition.""" - data_key = "test.data" - expected_hook = CfnginHookDefinitionModel(data_key=data_key, path="foo.bar") - config = CfnginConfig( - CfnginConfigDefinitionModel( - namespace="test", - pre_deploy=[ - expected_hook, - CfnginHookDefinitionModel(data_key="foo", path="foo"), - ], - pre_destroy=[ - CfnginHookDefinitionModel(data_key=data_key, path="pre_destroy") - ], - post_deploy=[ - CfnginHookDefinitionModel(data_key=data_key, path="post_deploy") - ], - post_destroy=[ - CfnginHookDefinitionModel(data_key=data_key, path="post_destroy") - ], - ) - ) - assert ( - AwsLambdaLookup.get_required_hook_definition(config, data_key) - == expected_hook - ) - - def test_get_required_hook_definition_raise_value_error_more_than_one(self) -> None: - """Test get_required_hook_definition raise ValueError for more than one.""" - data_key = "test.data" - expected_hook = CfnginHookDefinitionModel(data_key=data_key, path="foo.bar") - config = CfnginConfig( - CfnginConfigDefinitionModel( - namespace="test", - pre_deploy=[expected_hook, expected_hook], - ) - ) - with pytest.raises(ValueError) as excinfo: - assert not AwsLambdaLookup.get_required_hook_definition(config, data_key) - assert ( - str(excinfo.value) - == f"more than one hook definition found with data_key {data_key}" - ) - - def test_get_required_hook_definition_raise_value_error_none(self) -> None: - """Test get_required_hook_definition raise ValueError none found.""" - data_key = "test.data" - config = CfnginConfig( - CfnginConfigDefinitionModel( - namespace="test", - pre_deploy=[ - CfnginHookDefinitionModel(data_key="foo", path="foo"), - ], - pre_destroy=[ - CfnginHookDefinitionModel(data_key=data_key, path="pre_destroy") - ], - post_deploy=[ - CfnginHookDefinitionModel(data_key=data_key, path="post_deploy") - ], - post_destroy=[ - CfnginHookDefinitionModel(data_key=data_key, path="post_destroy") - ], - ) - ) - with pytest.raises(ValueError) as excinfo: - assert not AwsLambdaLookup.get_required_hook_definition(config, data_key) - assert ( - str(excinfo.value) == f"no hook definition found with data_key {data_key}" - ) + assert "expected AwsLambdaHookDeployResponseTypedDict, not " in str(excinfo.value) def test_handle(self, mocker: MockerFixture) -> None: """Test handle.""" @@ -176,12 +87,9 @@ def test_handle(self, mocker: MockerFixture) -> None: mock_get_deployment_package_data = mocker.patch.object( AwsLambdaLookup, "get_deployment_package_data", return_value="success" ) - mock_parse = mocker.patch.object( - AwsLambdaLookup, "parse", return_value=("query", {}) - ) + mock_parse = mocker.patch.object(AwsLambdaLookup, "parse", return_value=("query", {})) assert ( - AwsLambdaLookup.handle(QUERY, context) - == mock_get_deployment_package_data.return_value + AwsLambdaLookup.handle(QUERY, context) == mock_get_deployment_package_data.return_value ) mock_parse.assert_called_once_with(QUERY) mock_get_deployment_package_data.assert_called_once_with( @@ -189,13 +97,6 @@ def test_handle(self, mocker: MockerFixture) -> None: ) mock_format_results.assert_not_called() - def test_handle_raise_cfngin_only_lookup_error( - self, runway_context: RunwayContext - ) -> None: - """Test handle raise CfnginOnlyLookupError.""" - with pytest.raises(CfnginOnlyLookupError): - AwsLambdaLookup.handle("test", runway_context) - def test_init_hook_class(self, mocker: MockerFixture) -> None: """Test init_hook_class.""" context = Mock() @@ -207,19 +108,14 @@ def test_init_hook_class(self, mocker: MockerFixture) -> None: mock_isinstance = mocker.patch(f"{MODULE}.isinstance", return_value=True) mock_hasattr = mocker.patch(f"{MODULE}.hasattr", return_value=True) mock_issubclass = mocker.patch(f"{MODULE}.issubclass", return_value=True) - assert ( - AwsLambdaLookup.init_hook_class(context, hook_def) - == hook_class.return_value - ) + assert AwsLambdaLookup.init_hook_class(context, hook_def) == hook_class.return_value load_object_from_string.assert_called_once_with(hook_def.path) mock_isinstance.assert_called_once_with(hook_class, type) mock_hasattr.assert_called_once_with(hook_class, "__subclasscheck__") mock_issubclass.assert_called_once_with(hook_class, AwsLambdaHook) hook_class.assert_called_once_with(context, **hook_def.args) - def test_init_hook_class_raise_type_error_not_class( - self, mocker: MockerFixture - ) -> None: + def test_init_hook_class_raise_type_error_not_class(self, mocker: MockerFixture) -> None: """Test init_hook_class raise TypeError not a class.""" def _test_func() -> None: @@ -235,9 +131,7 @@ def _test_func() -> None: "must be a subclass of AwsLambdaHook to use this lookup" ) - def test_init_hook_class_raise_type_error_not_subclass( - self, mocker: MockerFixture - ) -> None: + def test_init_hook_class_raise_type_error_not_subclass(self, mocker: MockerFixture) -> None: """Test init_hook_class raise TypeError not a class.""" hook_class = Mock(return_value="success") context = Mock() @@ -256,299 +150,241 @@ def test_init_hook_class_raise_type_error_not_subclass( class TestAwsLambdaLookupCode: """Test TestAwsLambdaLookup.Code.""" - def test_handle( - self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture - ) -> None: + def test_handle(self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture) -> None: """Test handle.""" context = Mock() mock_format_results = mocker.patch.object( LookupHandler, "format_results", return_value="success" ) - mock_handle = mocker.patch.object( - AwsLambdaLookup, "handle", return_value=hook_data - ) - result = AwsLambdaLookup.Code.handle(QUERY, context, "arg", foo="bar") + mock_handle = mocker.patch.object(AwsLambdaLookup, "handle", return_value=hook_data) + result = AwsLambdaLookup.Code.handle(QUERY, context, foo="bar") assert isinstance(result, Code) assert not hasattr(result, "ImageUri") assert result.S3Bucket == hook_data.bucket_name assert result.S3Key == hook_data.object_key assert result.S3ObjectVersion == hook_data.object_version_id assert not hasattr(result, "ZipFile") - mock_handle.assert_called_once_with(QUERY, context, "arg", foo="bar") + mock_handle.assert_called_once_with(QUERY, context, foo="bar") mock_format_results.assert_not_called() def test_type_name(self) -> None: """Test TYPE_NAME.""" assert ( - AwsLambdaLookup.Code.TYPE_NAME - == f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.Code.__name__}" + f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.Code.__name__}" + == AwsLambdaLookup.Code.TYPE_NAME ) class TestAwsLambdaLookupCodeSha256: """Test TestAwsLambdaLookup.CodeSha256.""" - def test_handle( - self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture - ) -> None: + def test_handle(self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture) -> None: """Test handle.""" context = Mock() mock_format_results = mocker.patch.object( LookupHandler, "format_results", return_value="success" ) - mock_handle = mocker.patch.object( - AwsLambdaLookup, "handle", return_value=hook_data - ) - assert ( - AwsLambdaLookup.CodeSha256.handle(QUERY, context, "arg", foo="bar") - == hook_data.code_sha256 - ) - mock_handle.assert_called_once_with(QUERY, context, "arg", foo="bar") + mock_handle = mocker.patch.object(AwsLambdaLookup, "handle", return_value=hook_data) + assert AwsLambdaLookup.CodeSha256.handle(QUERY, context, foo="bar") == hook_data.code_sha256 + mock_handle.assert_called_once_with(QUERY, context, foo="bar") mock_format_results.assert_not_called() def test_type_name(self) -> None: """Test TYPE_NAME.""" assert ( - AwsLambdaLookup.CodeSha256.TYPE_NAME - == f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.CodeSha256.__name__}" + f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.CodeSha256.__name__}" + == AwsLambdaLookup.CodeSha256.TYPE_NAME ) class TestAwsLambdaLookupCompatibleArchitectures: """Test TestAwsLambdaLookup.CompatibleArchitectures.""" - def test_handle( - self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture - ) -> None: + def test_handle(self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture) -> None: """Test handle.""" context = Mock() mock_format_results = mocker.patch.object( LookupHandler, "format_results", return_value="success" ) - mock_handle = mocker.patch.object( - AwsLambdaLookup, "handle", return_value=hook_data - ) + mock_handle = mocker.patch.object(AwsLambdaLookup, "handle", return_value=hook_data) assert ( - AwsLambdaLookup.CompatibleArchitectures.handle( - QUERY, context, "arg", foo="bar" - ) + AwsLambdaLookup.CompatibleArchitectures.handle(QUERY, context, foo="bar") == mock_format_results.return_value ) - mock_handle.assert_called_once_with(QUERY, context, "arg", foo="bar") - mock_format_results.assert_called_once_with( - hook_data.compatible_architectures, foo="bar" - ) + mock_handle.assert_called_once_with(QUERY, context, foo="bar") + mock_format_results.assert_called_once_with(hook_data.compatible_architectures, foo="bar") def test_type_name(self) -> None: """Test TYPE_NAME.""" assert ( - AwsLambdaLookup.CompatibleArchitectures.TYPE_NAME - == f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.CompatibleArchitectures.__name__}" + f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.CompatibleArchitectures.__name__}" + == AwsLambdaLookup.CompatibleArchitectures.TYPE_NAME ) class TestAwsLambdaLookupCompatibleRuntimes: """Test TestAwsLambdaLookup.CompatibleRuntimes.""" - def test_handle( - self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture - ) -> None: + def test_handle(self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture) -> None: """Test handle.""" context = Mock() mock_format_results = mocker.patch.object( LookupHandler, "format_results", return_value="success" ) - mock_handle = mocker.patch.object( - AwsLambdaLookup, "handle", return_value=hook_data - ) + mock_handle = mocker.patch.object(AwsLambdaLookup, "handle", return_value=hook_data) assert ( - AwsLambdaLookup.CompatibleRuntimes.handle(QUERY, context, "arg", foo="bar") + AwsLambdaLookup.CompatibleRuntimes.handle(QUERY, context, foo="bar") == mock_format_results.return_value ) - mock_handle.assert_called_once_with(QUERY, context, "arg", foo="bar") - mock_format_results.assert_called_once_with( - hook_data.compatible_runtimes, foo="bar" - ) + mock_handle.assert_called_once_with(QUERY, context, foo="bar") + mock_format_results.assert_called_once_with(hook_data.compatible_runtimes, foo="bar") def test_type_name(self) -> None: """Test TYPE_NAME.""" assert ( - AwsLambdaLookup.CompatibleRuntimes.TYPE_NAME - == f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.CompatibleRuntimes.__name__}" + f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.CompatibleRuntimes.__name__}" + == AwsLambdaLookup.CompatibleRuntimes.TYPE_NAME ) class TestAwsLambdaLookupContent: """Test TestAwsLambdaLookup.Content.""" - def test_handle( - self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture - ) -> None: + def test_handle(self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture) -> None: """Test handle.""" context = Mock() mock_format_results = mocker.patch.object( LookupHandler, "format_results", return_value="success" ) - mock_handle = mocker.patch.object( - AwsLambdaLookup, "handle", return_value=hook_data - ) - result = AwsLambdaLookup.Content.handle(QUERY, context, "arg", foo="bar") + mock_handle = mocker.patch.object(AwsLambdaLookup, "handle", return_value=hook_data) + result = AwsLambdaLookup.Content.handle(QUERY, context, foo="bar") assert isinstance(result, Content) assert not hasattr(result, "ImageUri") assert result.S3Bucket == hook_data.bucket_name assert result.S3Key == hook_data.object_key assert result.S3ObjectVersion == hook_data.object_version_id - mock_handle.assert_called_once_with(QUERY, context, "arg", foo="bar") + mock_handle.assert_called_once_with(QUERY, context, foo="bar") mock_format_results.assert_not_called() def test_type_name(self) -> None: """Test TYPE_NAME.""" assert ( - AwsLambdaLookup.Content.TYPE_NAME - == f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.Content.__name__}" + f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.Content.__name__}" + == AwsLambdaLookup.Content.TYPE_NAME ) class TestAwsLambdaLookupLicenseInfo: """Test TestAwsLambdaLookup.LicenseInfo.""" - def test_handle( - self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture - ) -> None: + def test_handle(self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture) -> None: """Test handle.""" context = Mock() mock_format_results = mocker.patch.object( LookupHandler, "format_results", return_value="success" ) - mock_handle = mocker.patch.object( - AwsLambdaLookup, "handle", return_value=hook_data - ) + mock_handle = mocker.patch.object(AwsLambdaLookup, "handle", return_value=hook_data) assert ( - AwsLambdaLookup.LicenseInfo.handle(QUERY, context, "arg", foo="bar") + AwsLambdaLookup.LicenseInfo.handle(QUERY, context, foo="bar") == mock_format_results.return_value ) - mock_handle.assert_called_once_with(QUERY, context, "arg", foo="bar") + mock_handle.assert_called_once_with(QUERY, context, foo="bar") mock_format_results.assert_called_once_with(hook_data.license, foo="bar") def test_type_name(self) -> None: """Test TYPE_NAME.""" assert ( - AwsLambdaLookup.LicenseInfo.TYPE_NAME - == f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.LicenseInfo.__name__}" + f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.LicenseInfo.__name__}" + == AwsLambdaLookup.LicenseInfo.TYPE_NAME ) class TestAwsLambdaLookupRuntime: """Test TestAwsLambdaLookup.Runtime.""" - def test_handle( - self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture - ) -> None: + def test_handle(self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture) -> None: """Test handle.""" context = Mock() mock_format_results = mocker.patch.object( LookupHandler, "format_results", return_value="success" ) - mock_handle = mocker.patch.object( - AwsLambdaLookup, "handle", return_value=hook_data - ) - assert ( - AwsLambdaLookup.Runtime.handle(QUERY, context, "arg", foo="bar") - == hook_data.runtime - ) - mock_handle.assert_called_once_with(QUERY, context, "arg", foo="bar") + mock_handle = mocker.patch.object(AwsLambdaLookup, "handle", return_value=hook_data) + assert AwsLambdaLookup.Runtime.handle(QUERY, context, foo="bar") == hook_data.runtime + mock_handle.assert_called_once_with(QUERY, context, foo="bar") mock_format_results.assert_not_called() def test_type_name(self) -> None: """Test TYPE_NAME.""" assert ( - AwsLambdaLookup.Runtime.TYPE_NAME - == f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.Runtime.__name__}" + f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.Runtime.__name__}" + == AwsLambdaLookup.Runtime.TYPE_NAME ) class TestAwsLambdaLookupS3Bucket: """Test TestAwsLambdaLookup.S3Bucket.""" - def test_handle( - self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture - ) -> None: + def test_handle(self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture) -> None: """Test handle.""" context = Mock() mock_format_results = mocker.patch.object( LookupHandler, "format_results", return_value="success" ) - mock_handle = mocker.patch.object( - AwsLambdaLookup, "handle", return_value=hook_data - ) - assert ( - AwsLambdaLookup.S3Bucket.handle(QUERY, context, "arg", foo="bar") - == hook_data.bucket_name - ) - mock_handle.assert_called_once_with(QUERY, context, "arg", foo="bar") + mock_handle = mocker.patch.object(AwsLambdaLookup, "handle", return_value=hook_data) + assert AwsLambdaLookup.S3Bucket.handle(QUERY, context, foo="bar") == hook_data.bucket_name + mock_handle.assert_called_once_with(QUERY, context, foo="bar") mock_format_results.assert_not_called() def test_type_name(self) -> None: """Test TYPE_NAME.""" assert ( - AwsLambdaLookup.S3Bucket.TYPE_NAME - == f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.S3Bucket.__name__}" + f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.S3Bucket.__name__}" + == AwsLambdaLookup.S3Bucket.TYPE_NAME ) class TestAwsLambdaLookupS3Key: """Test TestAwsLambdaLookup.S3Key.""" - def test_handle( - self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture - ) -> None: + def test_handle(self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture) -> None: """Test handle.""" context = Mock() mock_format_results = mocker.patch.object( LookupHandler, "format_results", return_value="success" ) - mock_handle = mocker.patch.object( - AwsLambdaLookup, "handle", return_value=hook_data - ) - assert ( - AwsLambdaLookup.S3Key.handle(QUERY, context, "arg", foo="bar") - == hook_data.object_key - ) - mock_handle.assert_called_once_with(QUERY, context, "arg", foo="bar") + mock_handle = mocker.patch.object(AwsLambdaLookup, "handle", return_value=hook_data) + assert AwsLambdaLookup.S3Key.handle(QUERY, context, foo="bar") == hook_data.object_key + mock_handle.assert_called_once_with(QUERY, context, foo="bar") mock_format_results.assert_not_called() def test_type_name(self) -> None: """Test TYPE_NAME.""" assert ( - AwsLambdaLookup.S3Key.TYPE_NAME - == f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.S3Key.__name__}" + f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.S3Key.__name__}" + == AwsLambdaLookup.S3Key.TYPE_NAME ) class TestAwsLambdaLookupS3ObjectVersion: """Test TestAwsLambdaLookup.S3ObjectVersion.""" - def test_handle( - self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture - ) -> None: + def test_handle(self, hook_data: AwsLambdaHookDeployResponse, mocker: MockerFixture) -> None: """Test handle.""" context = Mock() mock_format_results = mocker.patch.object( LookupHandler, "format_results", return_value="success" ) - mock_handle = mocker.patch.object( - AwsLambdaLookup, "handle", return_value=hook_data - ) + mock_handle = mocker.patch.object(AwsLambdaLookup, "handle", return_value=hook_data) assert ( - AwsLambdaLookup.S3ObjectVersion.handle(QUERY, context, "arg", foo="bar") + AwsLambdaLookup.S3ObjectVersion.handle(QUERY, context, foo="bar") == hook_data.object_version_id ) - mock_handle.assert_called_once_with(QUERY, context, "arg", foo="bar") + mock_handle.assert_called_once_with(QUERY, context, foo="bar") mock_format_results.assert_not_called() def test_type_name(self) -> None: """Test TYPE_NAME.""" assert ( - AwsLambdaLookup.S3ObjectVersion.TYPE_NAME - == f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.S3ObjectVersion.__name__}" + f"{AwsLambdaLookup.TYPE_NAME}.{AwsLambdaLookup.S3ObjectVersion.__name__}" + == AwsLambdaLookup.S3ObjectVersion.TYPE_NAME ) diff --git a/tests/unit/cfngin/lookups/handlers/test_default.py b/tests/unit/cfngin/lookups/handlers/test_default.py index 173766fb2..ea77f9783 100644 --- a/tests/unit/cfngin/lookups/handlers/test_default.py +++ b/tests/unit/cfngin/lookups/handlers/test_default.py @@ -1,9 +1,9 @@ """Tests for runway.cfngin.lookups.handlers.default.""" -# pyright: basic import unittest +from unittest.mock import MagicMock -from mock import MagicMock +import pytest from runway.cfngin.lookups.handlers.default import DefaultLookup from runway.context import CfnginContext @@ -15,28 +15,21 @@ class TestDefaultLookup(unittest.TestCase): def setUp(self) -> None: """Run before tests.""" self.provider = MagicMock() - self.context = CfnginContext( - parameters={"namespace": "test", "env_var": "val_in_env"} - ) + self.context = CfnginContext(parameters={"namespace": "test", "env_var": "val_in_env"}) def test_env_var_present(self) -> None: """Test env var present.""" lookup_val = "env_var::fallback" - value = DefaultLookup.handle( - lookup_val, provider=self.provider, context=self.context - ) + value = DefaultLookup.handle(lookup_val, provider=self.provider, context=self.context) assert value == "val_in_env" def test_env_var_missing(self) -> None: """Test env var missing.""" lookup_val = "bad_env_var::fallback" - value = DefaultLookup.handle( - lookup_val, provider=self.provider, context=self.context - ) + value = DefaultLookup.handle(lookup_val, provider=self.provider, context=self.context) assert value == "fallback" def test_invalid_value(self) -> None: """Test invalid value.""" - with self.assertRaises(ValueError): - value = "env_var:fallback" - DefaultLookup.handle(value, provider=self.provider, context=self.context) + with pytest.raises(ValueError): # noqa: PT011 + DefaultLookup.handle("env_var:fallback", provider=self.provider, context=self.context) diff --git a/tests/unit/cfngin/lookups/handlers/test_dynamodb.py b/tests/unit/cfngin/lookups/handlers/test_dynamodb.py index 3cba59350..c0600680f 100644 --- a/tests/unit/cfngin/lookups/handlers/test_dynamodb.py +++ b/tests/unit/cfngin/lookups/handlers/test_dynamodb.py @@ -1,16 +1,15 @@ """Tests for runway.cfngin.lookups.handlers.dynamodb.""" -# pyright: basic from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any import pytest from runway.cfngin.lookups.handlers.dynamodb import DynamodbLookup, QueryDataModel if TYPE_CHECKING: - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext GET_ITEM_RESPONSE = { "Item": { @@ -57,7 +56,7 @@ class TestDynamoDBHandler: ) def test_handle( self, - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, expected_projection: str, expected_result: str, query: str, @@ -71,12 +70,10 @@ def test_handle( } stubber.add_response("get_item", GET_ITEM_RESPONSE, expected_params) with stubber: - assert ( - DynamodbLookup.handle(query, context=cfngin_context) == expected_result - ) + assert DynamodbLookup.handle(query, context=cfngin_context) == expected_result stubber.assert_no_pending_responses() - def test_handle_client_error(self, cfngin_context: MockCFNginContext) -> None: + def test_handle_client_error(self, cfngin_context: MockCfnginContext) -> None: """Test handle ClientError.""" stubber = cfngin_context.add_stubber("dynamodb") expected_params = { @@ -89,23 +86,20 @@ def test_handle_client_error(self, cfngin_context: MockCFNginContext) -> None: expected_params=expected_params, ) query = "TestTable@FakeKey:TestVal.TestMap[M].String1" - with stubber, pytest.raises(ValueError) as excinfo: + with ( + stubber, + pytest.raises(ValueError, match="The DynamoDB lookup '.*' encountered an error: .*"), + ): DynamodbLookup.handle(query, context=cfngin_context) stubber.assert_no_pending_responses() - assert str(excinfo.value).startswith( - f"The DynamoDB lookup '{query}' encountered an error: " - ) - def test_handle_empty_table_name(self, cfngin_context: MockCFNginContext) -> None: + def test_handle_empty_table_name(self, cfngin_context: MockCfnginContext) -> None: """Test handle with empty table_name.""" query = "@TestKey:TestVal.TestMap[M].String1" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="Query '.*' doesn't match regex:"): DynamodbLookup.handle(query, context=cfngin_context) - assert str(excinfo.value).startswith(f"Query '{query}' doesn't match regex:") - def test_handle_invalid_partition_key( - self, cfngin_context: MockCFNginContext - ) -> None: + def test_handle_invalid_partition_key(self, cfngin_context: MockCfnginContext) -> None: """Test handle with invalid partition key.""" stubber = cfngin_context.add_stubber("dynamodb") expected_params = { @@ -120,19 +114,19 @@ def test_handle_invalid_partition_key( expected_params=expected_params, ) - with stubber, pytest.raises(ValueError) as excinfo: + with ( + stubber, + pytest.raises( + ValueError, + match="No DynamoDB record matched the partition key: FakeKey", + ), + ): DynamodbLookup.handle( "TestTable@FakeKey:TestVal.TestMap[M].String1", context=cfngin_context ) stubber.assert_no_pending_responses() - assert ( - str(excinfo.value) - == "No DynamoDB record matched the partition key: FakeKey" - ) - def test_handle_invalid_partition_value( - self, cfngin_context: MockCFNginContext - ) -> None: + def test_handle_invalid_partition_value(self, cfngin_context: MockCfnginContext) -> None: """Test handle with invalid partition value.""" stubber = cfngin_context.add_stubber("dynamodb") expected_params = { @@ -140,19 +134,21 @@ def test_handle_invalid_partition_value( "Key": {"TestKey": {"S": "FakeVal"}}, "ProjectionExpression": "TestKey,TestMap,String1", } - empty_response: Dict[str, Any] = {"ResponseMetadata": {}} + empty_response: dict[str, Any] = {"ResponseMetadata": {}} stubber.add_response("get_item", empty_response, expected_params) - with stubber, pytest.raises(ValueError) as excinfo: + with ( + stubber, + pytest.raises( + ValueError, + match="The DynamoDB record could not be found using the following: " + "{'TestKey': {'S': 'FakeVal'}}", + ), + ): DynamodbLookup.handle( "TestTable@TestKey:FakeVal.TestMap[M].String1", context=cfngin_context ) - assert ( - str(excinfo.value) - == "The DynamoDB record could not be found using the following: " - "{'TestKey': {'S': 'FakeVal'}}" - ) - def test_handle_list(self, cfngin_context: MockCFNginContext) -> None: + def test_handle_list(self, cfngin_context: MockCfnginContext) -> None: """Test handle return list.""" stubber = cfngin_context.add_stubber("dynamodb") expected_params = { @@ -167,16 +163,13 @@ def test_handle_list(self, cfngin_context: MockCFNginContext) -> None: ) == ["ListVal1", "ListVal2"] stubber.assert_no_pending_responses() - def test_handle_missing_table_name(self, cfngin_context: MockCFNginContext) -> None: + def test_handle_missing_table_name(self, cfngin_context: MockCfnginContext) -> None: """Test handle missing table_name.""" query = "TestKey:TestVal.TestMap[M].String1" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="'.*' missing delimiter for DynamoDB Table name:"): DynamodbLookup.handle(query, context=cfngin_context) - assert str(excinfo.value).startswith( - f"'{query}' missing delimiter for DynamoDB Table name:" - ) - def test_handle_number(self, cfngin_context: MockCFNginContext) -> None: + def test_handle_number(self, cfngin_context: MockCfnginContext) -> None: """Test handle return number.""" stubber = cfngin_context.add_stubber("dynamodb") expected_params = { @@ -196,7 +189,7 @@ def test_handle_number(self, cfngin_context: MockCFNginContext) -> None: ) stubber.assert_no_pending_responses() - def test_handle_table_not_found(self, cfngin_context: MockCFNginContext) -> None: + def test_handle_table_not_found(self, cfngin_context: MockCfnginContext) -> None: """Test handle DDB Table not found.""" stubber = cfngin_context.add_stubber("dynamodb") expected_params = { @@ -210,24 +203,21 @@ def test_handle_table_not_found(self, cfngin_context: MockCFNginContext) -> None service_error_code=service_error_code, expected_params=expected_params, ) - with stubber, pytest.raises(ValueError) as excinfo: + with ( + stubber, + pytest.raises(ValueError, match="Can't find the DynamoDB table: FakeTable"), + ): DynamodbLookup.handle( "FakeTable@TestKey:TestVal.TestMap[M].String1", context=cfngin_context ) stubber.assert_no_pending_responses() - assert str(excinfo.value) == "Can't find the DynamoDB table: FakeTable" - def test_handle_unsupported_data_type( - self, cfngin_context: MockCFNginContext - ) -> None: + def test_handle_unsupported_data_type(self, cfngin_context: MockCfnginContext) -> None: """Test handle with unsupported data type.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="CFNgin does not support looking up the data type: B"): DynamodbLookup.handle( "TestTable@TestKey:FakeVal.TestStringSet[B]", context=cfngin_context ) - assert ( - str(excinfo.value) == "CFNgin does not support looking up the data type: B" - ) class TestQueryDataModel: @@ -242,7 +232,7 @@ class TestQueryDataModel: ("TestVal[S]", {"S": "TestVal"}), ], ) - def test_item_key(self, expected: Dict[str, Any], value: str) -> None: + def test_item_key(self, expected: dict[str, Any], value: str) -> None: """Test item_key.""" assert QueryDataModel( attribute="", @@ -259,8 +249,8 @@ def test_item_key_no_match(self) -> None: partition_key_value="TestVal[L]", table_name="", ) - with pytest.raises(ValueError) as excinfo: + with pytest.raises( + ValueError, + match="Partition key value '.*' doesn't match regex: .*", + ): assert obj.item_key - assert str(excinfo.value).startswith( - f"Partition key value '{obj.partition_key_value}' doesn't match regex:" - ) diff --git a/tests/unit/cfngin/lookups/handlers/test_envvar.py b/tests/unit/cfngin/lookups/handlers/test_envvar.py index f5d011d5a..c7e3f6703 100644 --- a/tests/unit/cfngin/lookups/handlers/test_envvar.py +++ b/tests/unit/cfngin/lookups/handlers/test_envvar.py @@ -1,9 +1,10 @@ """Tests for runway.cfngin.lookups.handlers.envvar.""" -# pyright: basic import os import unittest +import pytest + from runway.cfngin.lookups.handlers.envvar import EnvvarLookup @@ -20,9 +21,9 @@ def setUp(self) -> None: def test_valid_envvar(self) -> None: """Test valid envvar.""" value = EnvvarLookup.handle(self.testkey) - self.assertEqual(value, self.testval) + assert value == self.testval def test_invalid_envvar(self) -> None: """Test invalid envvar.""" - with self.assertRaises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 EnvvarLookup.handle(self.invalidtestkey) diff --git a/tests/unit/cfngin/lookups/handlers/test_file.py b/tests/unit/cfngin/lookups/handlers/test_file.py index fcd5aff37..8ec9a3311 100644 --- a/tests/unit/cfngin/lookups/handlers/test_file.py +++ b/tests/unit/cfngin/lookups/handlers/test_file.py @@ -1,6 +1,6 @@ """Tests for runway.cfngin.lookups.handlers.file.""" -# pyright: basic, reportUnknownArgumentType=none, reportUnknownVariableType=none +# pyright: reportUnknownArgumentType=none, reportUnknownVariableType=none from __future__ import annotations import base64 @@ -39,15 +39,11 @@ class TestArgsDataModel: def test__validate_supported_codec_raise_value_error(self) -> None: """Test _validate_supported_codec raise ValueError.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises( + ValidationError, + match=rf".*Value error, Codec 'foo' must be one of: {', '.join(CODECS)}", + ): ArgsDataModel(codec="foo") - assert excinfo.value.errors() == [ - { - "loc": ("codec",), - "msg": f"Codec 'foo' must be one of: {', '.join(CODECS)}", - "type": "value_error", - } - ] class TestFileLookup: @@ -91,9 +87,7 @@ def test_handle_json_parameterized(self, tmp_path: Path) -> None: tmp_file = tmp_path / "test" tmp_file.write_text(data, encoding="utf-8") - assert_template_dicts( - FileLookup.handle(f"json-parameterized:file://{tmp_file}"), expected - ) + assert_template_dicts(FileLookup.handle(f"json-parameterized:file://{tmp_file}"), expected) assert_template_dicts(FileLookup.handle(f"json-parameterized:{data}"), expected) @pytest.mark.parametrize( @@ -106,16 +100,12 @@ def test_handle_json_parameterized(self, tmp_path: Path) -> None: ("Test Without Interpolation Here", "Test Without Interpolation Here"), ], ) - def test_handle_parameterized( - self, data: str, expected: Any, tmp_path: Path - ) -> None: + def test_handle_parameterized(self, data: str, expected: Any, tmp_path: Path) -> None: """Test handle parameterized.""" tmp_file = tmp_path / "test" tmp_file.write_text(data, encoding="utf-8") - assert_template_dicts( - FileLookup.handle(f"parameterized:file://{tmp_file}"), expected - ) + assert_template_dicts(FileLookup.handle(f"parameterized:file://{tmp_file}"), expected) assert_template_dicts(FileLookup.handle(f"parameterized:{data}"), expected) @pytest.mark.parametrize( @@ -131,16 +121,12 @@ def test_handle_parameterized( ), ], ) - def test_handle_parameterized_b64( - self, data: str, expected: Base64, tmp_path: Path - ) -> None: + def test_handle_parameterized_b64(self, data: str, expected: Base64, tmp_path: Path) -> None: """Test handle parameterized-b64.""" tmp_file = tmp_path / "test" tmp_file.write_text(data, encoding="utf-8") - assert_template_dicts( - FileLookup.handle(f"parameterized-b64:file://{tmp_file}"), expected - ) + assert_template_dicts(FileLookup.handle(f"parameterized-b64:file://{tmp_file}"), expected) assert_template_dicts(FileLookup.handle(f"parameterized-b64:{data}"), expected) def test_handle_plain(self, tmp_path: Path) -> None: @@ -154,25 +140,16 @@ def test_handle_plain(self, tmp_path: Path) -> None: def test_handle_raise_validation_error(self) -> None: """Test handle raise ValidationError.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises( + ValidationError, + match=rf".*Value error, Codec 'foo' must be one of: {', '.join(CODECS)}", + ): FileLookup.handle("foo:bar") - assert excinfo.value.errors() == [ - { - "loc": ("codec",), - "msg": f"Codec 'foo' must be one of: {', '.join(CODECS)}", - "type": "value_error", - } - ] def test_handle_raise_value_error(self) -> None: """Test handle raise ValueError.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="Query 'foo' doesn't match regex: "): FileLookup.handle("foo") - assert ( - str(excinfo.value) == "Query 'foo' doesn't match regex: " - "^(?P[base64|json|json-parameterized|parameterized|" - "parameterized-b64|plain|yaml|yaml-parameterized]:.+$)" - ) def test_handle_yaml(self, tmp_path: Path) -> None: """Test handle yaml.""" @@ -199,7 +176,5 @@ def test_handle_yaml_parameterized(self, tmp_path: Path) -> None: tmp_file = tmp_path / "test" tmp_file.write_text(data, encoding="utf-8") - assert_template_dicts( - FileLookup.handle(f"yaml-parameterized:file://{tmp_file}"), expected - ) + assert_template_dicts(FileLookup.handle(f"yaml-parameterized:file://{tmp_file}"), expected) assert_template_dicts(FileLookup.handle(f"yaml-parameterized:{data}"), expected) diff --git a/tests/unit/cfngin/lookups/handlers/test_hook_data.py b/tests/unit/cfngin/lookups/handlers/test_hook_data.py index 33fcd1136..b2fe1ffa4 100644 --- a/tests/unit/cfngin/lookups/handlers/test_hook_data.py +++ b/tests/unit/cfngin/lookups/handlers/test_hook_data.py @@ -1,7 +1,5 @@ """Tests for runway.cfngin.lookups.handlers.hook_data.""" -# pylint: disable=protected-access -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING @@ -13,13 +11,13 @@ from runway.variables import Variable if TYPE_CHECKING: - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext class TestHookDataLookup: """Tests for runway.cfngin.lookups.handlers.hook_data.HookDataLookup.""" - def test_handle(self, cfngin_context: MockCFNginContext) -> None: + def test_handle(self, cfngin_context: MockCfnginContext) -> None: """Test handle with simple usage.""" cfngin_context.set_hook_data("fake_hook", {"nested": {"result": "good"}}) var_top = Variable("test", "${hook_data fake_hook}", variable_type="cfngin") @@ -32,7 +30,7 @@ def test_handle(self, cfngin_context: MockCFNginContext) -> None: assert var_top.value == {"nested": {"result": "good"}} assert var_nested.value == "good" - def test_default(self, cfngin_context: MockCFNginContext) -> None: + def test_default(self, cfngin_context: MockCfnginContext) -> None: """Test handle with a default value.""" cfngin_context.set_hook_data("fake_hook", {"nested": {"result": "good"}}) var_top = Variable( @@ -40,8 +38,7 @@ def test_default(self, cfngin_context: MockCFNginContext) -> None: ) var_nested = Variable( "test", - "${hook_data fake_hook.bad." - + "result::default=something,load=json,get=key}", + "${hook_data fake_hook.bad." + "result::default=something,load=json,get=key}", variable_type="cfngin", ) var_top.resolve(cfngin_context) @@ -50,11 +47,9 @@ def test_default(self, cfngin_context: MockCFNginContext) -> None: assert var_top.value == "something" assert var_nested.value == "something" - def test_not_found(self, cfngin_context: MockCFNginContext) -> None: + def test_not_found(self, cfngin_context: MockCfnginContext) -> None: """Test value not found and no default.""" - variable = Variable( - "test", "${hook_data fake_hook.bad.result}", variable_type="cfngin" - ) + variable = Variable("test", "${hook_data fake_hook.bad.result}", variable_type="cfngin") with pytest.raises(FailedVariableLookup) as err: variable.resolve(cfngin_context) @@ -63,13 +58,11 @@ def test_not_found(self, cfngin_context: MockCFNginContext) -> None: ) assert "Could not find a value for" in str(err.value.__cause__) - def test_troposphere(self, cfngin_context: MockCFNginContext) -> None: + def test_troposphere(self, cfngin_context: MockCfnginContext) -> None: """Test with troposphere object like returned from lambda hook.""" bucket = "test-bucket" s3_key = "lambda_functions/my_function" - cfngin_context.set_hook_data( - "lambda", {"my_function": Code(S3Bucket=bucket, S3Key=s3_key)} - ) + cfngin_context.set_hook_data("lambda", {"my_function": Code(S3Bucket=bucket, S3Key=s3_key)}) var_bucket = Variable( "test", "${hook_data lambda.my_function::" + "load=troposphere,get=S3Bucket}", diff --git a/tests/unit/cfngin/lookups/handlers/test_kms.py b/tests/unit/cfngin/lookups/handlers/test_kms.py index 513b71b20..8106c2a09 100644 --- a/tests/unit/cfngin/lookups/handlers/test_kms.py +++ b/tests/unit/cfngin/lookups/handlers/test_kms.py @@ -1,6 +1,5 @@ """Tests for runway.cfngin.lookups.handlers.kms.""" -# pyright: basic from __future__ import annotations import codecs @@ -12,7 +11,7 @@ from runway.cfngin.lookups.handlers.kms import KmsLookup if TYPE_CHECKING: - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext SECRET = "my secret" @@ -20,7 +19,7 @@ class TestKMSHandler: """Tests for runway.cfngin.lookups.handlers.kms.KmsLookup.""" - def test_handle(self, cfngin_context: MockCFNginContext) -> None: + def test_handle(self, cfngin_context: MockCfnginContext) -> None: """Test handle.""" stubber = cfngin_context.add_stubber("kms") stubber.add_response( @@ -33,12 +32,8 @@ def test_handle(self, cfngin_context: MockCFNginContext) -> None: assert KmsLookup.handle(SECRET, context=cfngin_context) == SECRET stubber.assert_no_pending_responses() - @pytest.mark.parametrize( - "template", ["${region}@${blob}", "${blob}::region=${region}"] - ) - def test_handle_with_region( - self, cfngin_context: MockCFNginContext, template: str - ) -> None: + @pytest.mark.parametrize("template", ["${region}@${blob}", "${blob}::region=${region}"]) + def test_handle_with_region(self, cfngin_context: MockCfnginContext, template: str) -> None: """Test handle with region.""" region = "us-west-2" query = string.Template(template).substitute({"blob": SECRET, "region": region}) diff --git a/tests/unit/cfngin/lookups/handlers/test_output.py b/tests/unit/cfngin/lookups/handlers/test_output.py index 1229753ad..d2fa6592e 100644 --- a/tests/unit/cfngin/lookups/handlers/test_output.py +++ b/tests/unit/cfngin/lookups/handlers/test_output.py @@ -1,12 +1,11 @@ """Tests for runway.cfngin.lookups.handlers.output.""" -# pylint: disable=protected-access from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import MagicMock import pytest -from mock import MagicMock from runway._logging import LogLevels from runway.cfngin.exceptions import StackDoesNotExist @@ -18,10 +17,9 @@ from ...factories import generate_definition if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext MODULE = "runway.cfngin.lookups.handlers.output" @@ -62,27 +60,19 @@ def test_dependencies_not_resolved(self) -> None: ("stack-name.foo::default=bar", "bar"), ], ) - def test_handle( - self, cfngin_context: MockCFNginContext, expected: str, provided: str - ) -> None: + def test_handle(self, cfngin_context: MockCfnginContext, expected: str, provided: str) -> None: """Test handle.""" - stack = Stack( - definition=generate_definition("stack-name"), context=cfngin_context - ) + stack = Stack(definition=generate_definition("stack-name"), context=cfngin_context) stack.set_outputs({"Output": "output-val"}) cfngin_context.stacks_dict[cfngin_context.get_fqn(stack.name)] = stack assert OutputLookup.handle(provided, context=cfngin_context) == expected - @pytest.mark.parametrize( - "provided", ["stack-name.MissingOutput", "stack-name::MissingOutput"] - ) + @pytest.mark.parametrize("provided", ["stack-name.MissingOutput", "stack-name::MissingOutput"]) def test_handle_raise_output_does_not_exist( - self, cfngin_context: MockCFNginContext, provided: str + self, cfngin_context: MockCfnginContext, provided: str ) -> None: """Test handle raise OutputDoesNotExist.""" - stack = Stack( - definition=generate_definition("stack-name"), context=cfngin_context - ) + stack = Stack(definition=generate_definition("stack-name"), context=cfngin_context) stack.set_outputs({"Output": "output-val"}) cfngin_context.stacks_dict[cfngin_context.get_fqn(stack.name)] = stack with pytest.raises( @@ -94,7 +84,7 @@ def test_handle_raise_output_does_not_exist( @pytest.mark.parametrize("provided", ["stack-name.Output", "stack-name::Output"]) def test_handle_raise_stack_does_not_exist( - self, cfngin_context: MockCFNginContext, provided: str + self, cfngin_context: MockCfnginContext, provided: str ) -> None: """Test handle raise StackDoesNotExist.""" with pytest.raises( @@ -103,9 +93,7 @@ def test_handle_raise_stack_does_not_exist( ): OutputLookup.handle(provided, context=cfngin_context) - def test_legacy_parse( - self, caplog: LogCaptureFixture, mocker: MockerFixture - ) -> None: + def test_legacy_parse(self, caplog: pytest.LogCaptureFixture, mocker: MockerFixture) -> None: """Test legacy_parse.""" query = "foo" caplog.set_level(LogLevels.WARNING, MODULE) diff --git a/tests/unit/cfngin/lookups/handlers/test_rxref.py b/tests/unit/cfngin/lookups/handlers/test_rxref.py index 1c0571d2d..43bb4a62d 100644 --- a/tests/unit/cfngin/lookups/handlers/test_rxref.py +++ b/tests/unit/cfngin/lookups/handlers/test_rxref.py @@ -1,21 +1,19 @@ """Tests for runway.cfngin.lookups.handlers.rxref.""" -# pylint: disable=protected-access from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import Mock import pytest -from mock import Mock from runway._logging import LogLevels from runway.cfngin.lookups.handlers.rxref import RxrefLookup if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture - from ....factories import MockCFNginContext + from ....factories import MockCfnginContext MODULE = "runway.cfngin.lookups.handlers.rxref" @@ -36,7 +34,7 @@ class TestRxrefLookup: ) def test_handle( self, - cfngin_context: MockCFNginContext, + cfngin_context: MockCfnginContext, expected: str, mocker: MockerFixture, provided: str, @@ -47,13 +45,9 @@ def test_handle( cfn.handle.return_value = "success" provider = Mock(name="provider") assert RxrefLookup.handle(provided, context=cfngin_context, provider=provider) - cfn.handle.assert_called_once_with( - expected, context=cfngin_context, provider=provider - ) + cfn.handle.assert_called_once_with(expected, context=cfngin_context, provider=provider) - def test_legacy_parse( - self, caplog: LogCaptureFixture, mocker: MockerFixture - ) -> None: + def test_legacy_parse(self, caplog: pytest.LogCaptureFixture, mocker: MockerFixture) -> None: """Test legacy_parse.""" query = "foo" caplog.set_level(LogLevels.WARNING, MODULE) diff --git a/tests/unit/cfngin/lookups/handlers/test_split.py b/tests/unit/cfngin/lookups/handlers/test_split.py index aea2141bd..68beb1386 100644 --- a/tests/unit/cfngin/lookups/handlers/test_split.py +++ b/tests/unit/cfngin/lookups/handlers/test_split.py @@ -1,8 +1,9 @@ """Tests for runway.cfngin.lookups.handlers.split.""" -# pyright: basic import unittest +import pytest + from runway.cfngin.lookups.handlers.split import SplitLookup @@ -24,5 +25,5 @@ def test_multi_character_split(self) -> None: def test_invalid_value_split(self) -> None: """Test invalid value split.""" value = ",:a,b,c" - with self.assertRaises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 SplitLookup.handle(value) diff --git a/tests/unit/cfngin/lookups/handlers/test_xref.py b/tests/unit/cfngin/lookups/handlers/test_xref.py index dd7472058..e1f4b2e57 100644 --- a/tests/unit/cfngin/lookups/handlers/test_xref.py +++ b/tests/unit/cfngin/lookups/handlers/test_xref.py @@ -1,9 +1,8 @@ """Tests for runway.cfngin.lookups.handlers.xref.""" -# pyright: basic, reportUnknownArgumentType=none, reportUnknownVariableType=none +# pyright: reportUnknownArgumentType=none, reportUnknownVariableType=none import unittest - -from mock import MagicMock +from unittest.mock import MagicMock from runway.cfngin.lookups.handlers.xref import XrefLookup @@ -24,8 +23,8 @@ def test_xref_handler(self) -> None: provider=self.provider, context=self.context, ) - self.assertEqual(value, "Test Output") - self.assertEqual(self.context.get_fqn.call_count, 0) + assert value == "Test Output" + assert self.context.get_fqn.call_count == 0 args = self.provider.get_output.call_args - self.assertEqual(args[0][0], "fully-qualified-stack-name") - self.assertEqual(args[0][1], "SomeOutput") + assert args[0][0] == "fully-qualified-stack-name" + assert args[0][1] == "SomeOutput" diff --git a/tests/unit/cfngin/lookups/test_registry.py b/tests/unit/cfngin/lookups/test_registry.py index e44aa4f6f..ff57a145b 100644 --- a/tests/unit/cfngin/lookups/test_registry.py +++ b/tests/unit/cfngin/lookups/test_registry.py @@ -1,6 +1,5 @@ """Tests for runway.cfngin.lookups.registry.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING, Any @@ -51,9 +50,7 @@ def test_autoloaded_lookup_handlers(mocker: MockerFixture) -> None: "xref", ] for handler in handlers: - assert ( - handler in CFNGIN_LOOKUP_HANDLERS - ), f'Lookup handler: "{handler}" not registered' + assert handler in CFNGIN_LOOKUP_HANDLERS, f'Lookup handler: "{handler}" not registered' assert len(CFNGIN_LOOKUP_HANDLERS) == len( handlers ), f"expected {len(handlers)} autoloaded handlers but found {len(CFNGIN_LOOKUP_HANDLERS)}" @@ -82,9 +79,7 @@ class FakeLookup: def test_register_lookup_handler_str(mocker: MockerFixture) -> None: """Test register_lookup_handler from string.""" mocker.patch.dict(CFNGIN_LOOKUP_HANDLERS, {}) - register_lookup_handler( - "test", "runway.cfngin.lookups.handlers.default.DefaultLookup" - ) + register_lookup_handler("test", "runway.cfngin.lookups.handlers.default.DefaultLookup") assert "test" in CFNGIN_LOOKUP_HANDLERS assert CFNGIN_LOOKUP_HANDLERS["test"] == DefaultLookup diff --git a/tests/unit/cfngin/providers/aws/test_default.py b/tests/unit/cfngin/providers/aws/test_default.py index 9d7093aab..d89232b38 100644 --- a/tests/unit/cfngin/providers/aws/test_default.py +++ b/tests/unit/cfngin/providers/aws/test_default.py @@ -1,26 +1,23 @@ """Tests for runway.cfngin.providers.aws.default.""" -# pylint: disable=too-many-lines -# pyright: basic from __future__ import annotations import copy import locale -import os.path import random import string import threading import unittest +from contextlib import suppress from datetime import datetime from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any +from unittest.mock import MagicMock, patch import boto3 import pytest from botocore.exceptions import ClientError, UnStubbedResponseError from botocore.stub import Stubber -from mock import MagicMock, patch -from typing_extensions import Literal from runway.cfngin import exceptions from runway.cfngin.actions.diff import DictValue @@ -49,6 +46,7 @@ StackTypeDef, ) from pytest_mock import MockerFixture + from typing_extensions import Literal from runway.core.providers.aws.type_defs import TagSetTypeDef @@ -60,12 +58,12 @@ def random_string(length: int = 12) -> str: length: The # of characters to use in the random string. """ - return "".join([random.choice(string.ascii_letters) for _ in range(length)]) + return "".join([random.choice(string.ascii_letters) for _ in range(length)]) # noqa: S311 def generate_describe_stacks_stack( stack_name: str, - creation_time: Optional[datetime] = None, + creation_time: datetime | None = None, stack_status: Literal[ "CREATE_IN_PROGRESS", "CREATE_FAILED", @@ -90,7 +88,7 @@ def generate_describe_stacks_stack( "IMPORT_ROLLBACK_FAILED", "IMPORT_ROLLBACK_COMPLETE", ] = "CREATE_COMPLETE", - tags: Optional[TagSetTypeDef] = None, + tags: TagSetTypeDef | None = None, termination_protection: bool = False, ) -> StackTypeDef: """Generate describe stacks stack.""" @@ -106,20 +104,16 @@ def generate_describe_stacks_stack( def generate_get_template( - file_name: str = "cfn_template.json", stages_available: Optional[List[str]] = None -) -> Dict[str, Any]: + file_name: str = "cfn_template.json", stages_available: list[str] | None = None +) -> dict[str, Any]: """Generate get template.""" - fixture_dir = os.path.join(os.path.dirname(__file__), "../../fixtures") - with open(os.path.join(fixture_dir, file_name), "r", encoding="utf-8") as _file: - return { - "StagesAvailable": stages_available or ["Original"], - "TemplateBody": _file.read(), - } + return { + "StagesAvailable": stages_available or ["Original"], + "TemplateBody": (Path(__file__).parent.parent.parent / "fixtures" / file_name).read_text(), + } -def generate_stack_object( - stack_name: str, outputs: Optional[Dict[str, Any]] = None -) -> MagicMock: +def generate_stack_object(stack_name: str, outputs: dict[str, Any] | None = None) -> MagicMock: """Generate stack object.""" mock_stack = MagicMock(["name", "fqn", "blueprint"]) if not outputs: @@ -151,9 +145,9 @@ def generate_resource_change(replacement: bool = True) -> ChangeTypeDef: def generate_change_set_response( status: str, execution_status: str = "AVAILABLE", - changes: Optional[List[Dict[str, Any]]] = None, + changes: list[dict[str, Any]] | None = None, status_reason: str = "FAKE", -) -> Dict[str, Any]: +) -> dict[str, Any]: """Generate change set response.""" return { "ChangeSetName": "string", @@ -185,7 +179,7 @@ def generate_change( resource_type: str = "EC2::Instance", replacement: str = "False", requires_recreation: str = "Never", -) -> Dict[str, Any]: +) -> dict[str, Any]: """Generate a minimal change for a changeset.""" return { "Type": "Resource", @@ -228,18 +222,14 @@ def test_requires_replacement(self) -> None: generate_resource_change(), ] replacement = requires_replacement(changeset) - self.assertEqual(len(replacement), 2) + assert len(replacement) == 2 for resource in replacement: - self.assertEqual( - resource.get("ResourceChange", {}).get("Replacement"), "True" - ) + assert resource.get("ResourceChange", {}).get("Replacement") == "True" def test_summarize_params_diff(self) -> None: """Test summarize params diff.""" unmodified_param = DictValue("ParamA", "new-param-value", "new-param-value") - modified_param = DictValue( - "ParamB", "param-b-old-value", "param-b-new-value-delta" - ) + modified_param = DictValue("ParamB", "param-b-old-value", "param-b-new-value-delta") added_param = DictValue("ParamC", None, "param-c-new-value") removed_param = DictValue("ParamD", "param-d-old-value", None) @@ -249,54 +239,39 @@ def test_summarize_params_diff(self) -> None: added_param, removed_param, ] - self.assertEqual(summarize_params_diff([]), "") - self.assertEqual( - summarize_params_diff(params_diff), - "\n".join( - [ - "Parameters Added: ParamC", - "Parameters Removed: ParamD", - "Parameters Modified: ParamB\n", - ] - ), + assert summarize_params_diff([]) == "" + assert ( + summarize_params_diff(params_diff) + == "Parameters Added: ParamC\nParameters Removed: ParamD\nParameters Modified: ParamB\n" ) only_modified_params_diff = [modified_param] - self.assertEqual( - summarize_params_diff(only_modified_params_diff), - "Parameters Modified: ParamB\n", - ) + assert summarize_params_diff(only_modified_params_diff) == "Parameters Modified: ParamB\n" only_added_params_diff = [added_param] - self.assertEqual( - summarize_params_diff(only_added_params_diff), "Parameters Added: ParamC\n" - ) + assert summarize_params_diff(only_added_params_diff) == "Parameters Added: ParamC\n" only_removed_params_diff = [removed_param] - self.assertEqual( - summarize_params_diff(only_removed_params_diff), - "Parameters Removed: ParamD\n", - ) + assert summarize_params_diff(only_removed_params_diff) == "Parameters Removed: ParamD\n" def test_ask_for_approval(self) -> None: """Test ask for approval.""" get_input_path = "runway.cfngin.ui.get_raw_input" with patch(get_input_path, return_value="y"): - self.assertIsNone(ask_for_approval([], [], False)) + assert ask_for_approval([], [], False) is None for v in ("n", "N", "x", "\n"): - with patch(get_input_path, return_value=v): - with self.assertRaises(exceptions.CancelExecution): - ask_for_approval([], []) + with patch(get_input_path, return_value=v), pytest.raises(exceptions.CancelExecution): + ask_for_approval([], []) with patch(get_input_path, side_effect=["v", "n"]) as mock_get_input: with patch( "runway.cfngin.providers.aws.default.output_full_changeset" ) as mock_full_changeset: - with self.assertRaises(exceptions.CancelExecution): + with pytest.raises(exceptions.CancelExecution): ask_for_approval([], [], True) - self.assertEqual(mock_full_changeset.call_count, 1) - self.assertEqual(mock_get_input.call_count, 2) + assert mock_full_changeset.call_count == 1 + assert mock_get_input.call_count == 2 def test_ask_for_approval_with_params_diff(self) -> None: """Test ask for approval with params diff.""" @@ -306,21 +281,20 @@ def test_ask_for_approval_with_params_diff(self) -> None: DictValue("ParamB", "param-b-old-value", "param-b-new-value-delta"), ] with patch(get_input_path, return_value="y"): - self.assertIsNone(ask_for_approval([], params_diff, False)) + assert ask_for_approval([], params_diff, False) is None for v in ("n", "N", "x", "\n"): - with patch(get_input_path, return_value=v): - with self.assertRaises(exceptions.CancelExecution): - ask_for_approval([], params_diff) + with patch(get_input_path, return_value=v), pytest.raises(exceptions.CancelExecution): + ask_for_approval([], params_diff) with patch(get_input_path, side_effect=["v", "n"]) as mock_get_input: with patch( "runway.cfngin.providers.aws.default.output_full_changeset" ) as mock_full_changeset: - with self.assertRaises(exceptions.CancelExecution): + with pytest.raises(exceptions.CancelExecution): ask_for_approval([], params_diff, True) - self.assertEqual(mock_full_changeset.call_count, 1) - self.assertEqual(mock_get_input.call_count, 2) + assert mock_full_changeset.call_count == 1 + assert mock_get_input.call_count == 2 @patch("runway.cfngin.providers.aws.default.format_params_diff") @patch("runway.cfngin.providers.aws.default.yaml.safe_dump") @@ -334,27 +308,21 @@ def test_output_full_changeset( for v in ["y", "v", "Y", "V"]: with patch(get_input_path, return_value=v) as prompt: - self.assertIsNone( - output_full_changeset(full_changeset=[], params_diff=[], fqn=None) - ) - self.assertEqual(prompt.call_count, 1) + assert output_full_changeset(full_changeset=[], params_diff=[], fqn=None) is None + assert prompt.call_count == 1 safe_dump_counter += 1 - self.assertEqual(mock_safe_dump.call_count, safe_dump_counter) - self.assertEqual(patched_format.call_count, 0) + assert mock_safe_dump.call_count == safe_dump_counter + assert patched_format.call_count == 0 for v in ["n", "N"]: with patch(get_input_path, return_value=v) as prompt: - output_full_changeset( - full_changeset=[], params_diff=[], answer=None, fqn=None - ) - self.assertEqual(prompt.call_count, 1) - self.assertEqual(mock_safe_dump.call_count, safe_dump_counter) - self.assertEqual(patched_format.call_count, 0) + output_full_changeset(full_changeset=[], params_diff=[], answer=None, fqn=None) + assert prompt.call_count == 1 + assert mock_safe_dump.call_count == safe_dump_counter + assert patched_format.call_count == 0 - with self.assertRaises(exceptions.CancelExecution): - output_full_changeset( - full_changeset=[], params_diff=[], answer="x", fqn=None - ) + with pytest.raises(exceptions.CancelExecution): + output_full_changeset(full_changeset=[], params_diff=[], answer="x", fqn=None) output_full_changeset( full_changeset=[], @@ -363,8 +331,8 @@ def test_output_full_changeset( fqn=None, ) safe_dump_counter += 1 - self.assertEqual(mock_safe_dump.call_count, safe_dump_counter) - self.assertEqual(patched_format.call_count, 1) + assert mock_safe_dump.call_count == safe_dump_counter + assert patched_format.call_count == 1 def test_wait_till_change_set_complete_success(self) -> None: """Test wait till change set complete success.""" @@ -374,9 +342,7 @@ def test_wait_till_change_set_complete_success(self) -> None: with self.stubber: wait_till_change_set_complete(self.cfn, "FAKEID") - self.stubber.add_response( - "describe_change_set", generate_change_set_response("FAILED") - ) + self.stubber.add_response("describe_change_set", generate_change_set_response("FAILED")) with self.stubber: wait_till_change_set_complete(self.cfn, "FAKEID") @@ -387,97 +353,79 @@ def test_wait_till_change_set_complete_failed(self) -> None: self.stubber.add_response( "describe_change_set", generate_change_set_response("CREATE_PENDING") ) - with self.stubber: - with self.assertRaises(exceptions.ChangesetDidNotStabilize): - wait_till_change_set_complete( - self.cfn, "FAKEID", try_count=2, sleep_time=0.1 - ) + with self.stubber, pytest.raises(exceptions.ChangesetDidNotStabilize): + wait_till_change_set_complete(self.cfn, "FAKEID", try_count=2, sleep_time=0.1) def test_create_change_set_stack_did_not_change(self) -> None: """Test create change set stack did not change.""" - self.stubber.add_response( - "create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"} - ) + self.stubber.add_response("create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"}) self.stubber.add_response( "describe_change_set", - generate_change_set_response( - "FAILED", status_reason="Stack didn't contain changes." - ), + generate_change_set_response("FAILED", status_reason="Stack didn't contain changes."), ) self.stubber.add_response( "delete_change_set", {}, expected_params={"ChangeSetName": "CHANGESETID"} ) - with self.stubber: - with self.assertRaises(exceptions.StackDidNotChange): - create_change_set( - cfn_client=self.cfn, - fqn="my-fake-stack", - template=Template(url="http://fake.template.url.com/"), - parameters=[], - tags=[], - ) + with self.stubber, pytest.raises(exceptions.StackDidNotChange): + create_change_set( + cfn_client=self.cfn, + fqn="my-fake-stack", + template=Template(url="http://fake.template.url.com/"), + parameters=[], + tags=[], + ) def test_create_change_set_unhandled_failed_status(self) -> None: """Test create change set unhandled failed status.""" - self.stubber.add_response( - "create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"} - ) + self.stubber.add_response("create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"}) self.stubber.add_response( "describe_change_set", - generate_change_set_response( - "FAILED", status_reason="Some random bad thing." - ), + generate_change_set_response("FAILED", status_reason="Some random bad thing."), ) - with self.stubber: - with self.assertRaises(exceptions.UnhandledChangeSetStatus): - create_change_set( - cfn_client=self.cfn, - fqn="my-fake-stack", - template=Template(url="http://fake.template.url.com/"), - parameters=[], - tags=[], - ) + with self.stubber, pytest.raises(exceptions.UnhandledChangeSetStatus): + create_change_set( + cfn_client=self.cfn, + fqn="my-fake-stack", + template=Template(url="http://fake.template.url.com/"), + parameters=[], + tags=[], + ) def test_create_change_set_bad_execution_status(self) -> None: """Test create change set bad execution status.""" - self.stubber.add_response( - "create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"} - ) + self.stubber.add_response("create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"}) self.stubber.add_response( "describe_change_set", - generate_change_set_response( - status="CREATE_COMPLETE", execution_status="UNAVAILABLE" - ), + generate_change_set_response(status="CREATE_COMPLETE", execution_status="UNAVAILABLE"), ) - with self.stubber: - with self.assertRaises(exceptions.UnableToExecuteChangeSet): - create_change_set( - cfn_client=self.cfn, - fqn="my-fake-stack", - template=Template(url="http://fake.template.url.com/"), - parameters=[], - tags=[], - ) + with self.stubber, pytest.raises(exceptions.UnableToExecuteChangeSet): + create_change_set( + cfn_client=self.cfn, + fqn="my-fake-stack", + template=Template(url="http://fake.template.url.com/"), + parameters=[], + tags=[], + ) def test_generate_cloudformation_args(self) -> None: """Test generate cloudformation args.""" stack_name = "mystack" template_url = "http://fake.s3url.com/blah.json" template_body = '{"fake_body": "woot"}' - std_args: Dict[str, Any] = { + std_args: dict[str, Any] = { "stack_name": stack_name, "parameters": [], "tags": [], "template": Template(url=template_url), } - std_return: Dict[str, Any] = { + std_return: dict[str, Any] = { "StackName": stack_name, "Parameters": [], "Tags": [], @@ -485,24 +433,24 @@ def test_generate_cloudformation_args(self) -> None: "TemplateURL": template_url, } result = generate_cloudformation_args(**std_args) - self.assertEqual(result, std_return) + assert result == std_return result = generate_cloudformation_args(service_role="FakeRole", **std_args) service_role_result = copy.deepcopy(std_return) service_role_result["RoleARN"] = "FakeRole" - self.assertEqual(result, service_role_result) + assert result == service_role_result result = generate_cloudformation_args(change_set_name="MyChanges", **std_args) change_set_result = copy.deepcopy(std_return) change_set_result["ChangeSetName"] = "MyChanges" - self.assertEqual(result, change_set_result) + assert result == change_set_result # Check stack policy stack_policy = Template(body="{}") result = generate_cloudformation_args(stack_policy=stack_policy, **std_args) stack_policy_result = copy.deepcopy(std_return) stack_policy_result["StackPolicyBody"] = "{}" - self.assertEqual(result, stack_policy_result) + assert result == stack_policy_result # If not TemplateURL is provided, use TemplateBody std_args["template"] = Template(body=template_body) @@ -510,7 +458,7 @@ def test_generate_cloudformation_args(self) -> None: del template_body_result["TemplateURL"] template_body_result["TemplateBody"] = template_body result = generate_cloudformation_args(**std_args) - self.assertEqual(result, template_body_result) + assert result == template_body_result class TestProvider: @@ -538,9 +486,7 @@ def test_get_event_by_resource_status(self, mocker: MockerFixture) -> None: {"StackName": "2", "ResourceStatus": "match"}, {"StackName": "3", "ResourceStatus": "match"}, ] - mock_get_events = mocker.patch.object( - Provider, "get_events", return_value=events - ) + mock_get_events = mocker.patch.object(Provider, "get_events", return_value=events) obj = Provider(MagicMock()) result = obj.get_event_by_resource_status("test", "match") @@ -548,9 +494,7 @@ def test_get_event_by_resource_status(self, mocker: MockerFixture) -> None: assert result["StackName"] == "2" mock_get_events.assert_called_once_with("test", chronological=True) - assert not obj.get_event_by_resource_status( - "test", "missing", chronological=False - ) + assert not obj.get_event_by_resource_status("test", "missing", chronological=False) mock_get_events.assert_called_with("test", chronological=False) def test_get_rollback_status_reason(self, mocker: MockerFixture) -> None: @@ -612,23 +556,17 @@ def test_create_stack_no_changeset(self) -> None: """Test create_stack, no changeset, template url.""" stack_name = "fake_stack" template = Template(url="http://fake.template.url.com/") - parameters: List[Any] = [] - tags: List[Any] = [] + parameters: list[Any] = [] + tags: list[Any] = [] - expected_args = generate_cloudformation_args( - stack_name, parameters, tags, template - ) + expected_args = generate_cloudformation_args(stack_name, parameters, tags, template) expected_args["EnableTerminationProtection"] = False expected_args["TimeoutInMinutes"] = 60 - self.stubber.add_response( - "create_stack", {"StackId": stack_name}, expected_args - ) + self.stubber.add_response("create_stack", {"StackId": stack_name}, expected_args) with self.stubber: - self.provider.create_stack( - stack_name, template, parameters, tags, timeout=60 - ) + self.provider.create_stack(stack_name, template, parameters, tags, timeout=60) self.stubber.assert_no_pending_responses() @patch("runway.cfngin.providers.aws.default.Provider.update_termination_protection") @@ -640,20 +578,16 @@ def test_create_stack_with_changeset( stack_name = "fake_stack" template_path = Path("./tests/unit/cfngin/fixtures/cfn_template.yaml") template = Template( - body=template_path.read_text( - encoding=locale.getpreferredencoding(do_setlocale=False) - ) + body=template_path.read_text(encoding=locale.getpreferredencoding(do_setlocale=False)) ) - parameters: List[Any] = [] - tags: List[Any] = [] + parameters: list[Any] = [] + tags: list[Any] = [] changeset_id = "CHANGESETID" patched_create_change_set.return_value = ([], changeset_id) - self.stubber.add_response( - "execute_change_set", {}, {"ChangeSetName": changeset_id} - ) + self.stubber.add_response("execute_change_set", {}, {"ChangeSetName": changeset_id}) with self.stubber: self.provider.create_stack( @@ -684,7 +618,7 @@ def test_destroy_stack(self) -> None: self.stubber.add_response("delete_stack", {}, stack) with self.stubber: - self.assertIsNone(self.provider.destroy_stack(stack)) # type: ignore + assert self.provider.destroy_stack(stack) is None # type: ignore self.stubber.assert_no_pending_responses() def test_get_stack_stack_does_not_exist(self) -> None: @@ -697,9 +631,8 @@ def test_get_stack_stack_does_not_exist(self) -> None: expected_params={"StackName": stack_name}, ) - with self.assertRaises(exceptions.StackDoesNotExist): - with self.stubber: - self.provider.get_stack(stack_name) + with pytest.raises(exceptions.StackDoesNotExist), self.stubber: + self.provider.get_stack(stack_name) def test_get_stack_stack_exists(self) -> None: """Test get stack stack exists.""" @@ -712,7 +645,7 @@ def test_get_stack_stack_exists(self) -> None: with self.stubber: response = self.provider.get_stack(stack_name) - self.assertEqual(response["StackName"], stack_name) + assert response["StackName"] == stack_name def test_select_destroy_method(self) -> None: """Test select destroy method.""" @@ -720,9 +653,7 @@ def test_select_destroy_method(self) -> None: [{"force_interactive": False}, self.provider.noninteractive_destroy_stack], [{"force_interactive": True}, self.provider.interactive_destroy_stack], ]: - self.assertEqual( - self.provider.select_destroy_method(**i[0]), i[1] # type: ignore - ) + assert self.provider.select_destroy_method(**i[0]) == i[1] # type: ignore def test_select_update_method(self) -> None: """Test select update method.""" @@ -744,99 +675,77 @@ def test_select_update_method(self) -> None: self.provider.interactive_update_stack, ], ]: - self.assertEqual( - self.provider.select_update_method(**i[0]), i[1] # type: ignore - ) + assert self.provider.select_update_method(**i[0]) == i[1] # type: ignore def test_prepare_stack_for_update_completed(self) -> None: """Test prepare stack for update completed.""" with self.stubber: stack_name = "MockStack" - stack = generate_describe_stacks_stack( - stack_name, stack_status="UPDATE_COMPLETE" - ) + stack = generate_describe_stacks_stack(stack_name, stack_status="UPDATE_COMPLETE") - self.assertTrue(self.provider.prepare_stack_for_update(stack, [])) + assert self.provider.prepare_stack_for_update(stack, []) def test_prepare_stack_for_update_in_progress(self) -> None: """Test prepare stack for update in progress.""" stack_name = "MockStack" - stack = generate_describe_stacks_stack( - stack_name, stack_status="UPDATE_IN_PROGRESS" - ) + stack = generate_describe_stacks_stack(stack_name, stack_status="UPDATE_IN_PROGRESS") - with self.assertRaises(exceptions.StackUpdateBadStatus) as raised: - with self.stubber: - self.provider.prepare_stack_for_update(stack, []) + with self.stubber, pytest.raises(exceptions.StackUpdateBadStatus) as raised: + self.provider.prepare_stack_for_update(stack, []) - self.assertIn("in-progress", str(raised.exception)) + assert "in-progress" in str(raised.value) def test_prepare_stack_for_update_non_recreatable(self) -> None: """Test prepare stack for update non recreatable.""" stack_name = "MockStack" - stack = generate_describe_stacks_stack( - stack_name, stack_status="REVIEW_IN_PROGRESS" - ) + stack = generate_describe_stacks_stack(stack_name, stack_status="REVIEW_IN_PROGRESS") - with self.assertRaises(exceptions.StackUpdateBadStatus) as raised: - with self.stubber: - self.provider.prepare_stack_for_update(stack, []) + with pytest.raises(exceptions.StackUpdateBadStatus) as excinfo, self.stubber: + self.provider.prepare_stack_for_update(stack, []) - self.assertIn("Unsupported state", str(raised.exception)) + assert "Unsupported state" in str(excinfo.value) def test_prepare_stack_for_update_disallowed(self) -> None: """Test prepare stack for update disallowed.""" stack_name = "MockStack" - stack = generate_describe_stacks_stack( - stack_name, stack_status="ROLLBACK_COMPLETE" - ) + stack = generate_describe_stacks_stack(stack_name, stack_status="ROLLBACK_COMPLETE") - with self.assertRaises(exceptions.StackUpdateBadStatus) as raised: - with self.stubber: - self.provider.prepare_stack_for_update(stack, []) + with pytest.raises(exceptions.StackUpdateBadStatus) as excinfo, self.stubber: + self.provider.prepare_stack_for_update(stack, []) - self.assertIn("re-creation is disabled", str(raised.exception)) + assert "re-creation is disabled" in str(excinfo.value) # Ensure we point out to the user how to enable re-creation - self.assertIn("--recreate-failed", str(raised.exception)) + assert "--recreate-failed" in str(excinfo.value) def test_prepare_stack_for_update_bad_tags(self) -> None: """Test prepare stack for update bad tags.""" stack_name = "MockStack" - stack = generate_describe_stacks_stack( - stack_name, stack_status="ROLLBACK_COMPLETE" - ) + stack = generate_describe_stacks_stack(stack_name, stack_status="ROLLBACK_COMPLETE") self.provider.recreate_failed = True - with self.assertRaises(exceptions.StackUpdateBadStatus) as raised: - with self.stubber: - self.provider.prepare_stack_for_update( - stack, tags=[{"Key": "cfngin_namespace", "Value": "test"}] - ) + with pytest.raises(exceptions.StackUpdateBadStatus) as excinfo, self.stubber: + self.provider.prepare_stack_for_update( + stack, tags=[{"Key": "cfngin_namespace", "Value": "test"}] + ) - self.assertIn("tags differ", str(raised.exception).lower()) + assert "tags differ" in str(excinfo.value).lower() def test_prepare_stack_for_update_recreate(self) -> None: """Test prepare stack for update recreate.""" stack_name = "MockStack" - stack = generate_describe_stacks_stack( - stack_name, stack_status="ROLLBACK_COMPLETE" - ) + stack = generate_describe_stacks_stack(stack_name, stack_status="ROLLBACK_COMPLETE") - self.stubber.add_response( - "delete_stack", {}, expected_params={"StackName": stack_name} - ) + self.stubber.add_response("delete_stack", {}, expected_params={"StackName": stack_name}) self.provider.recreate_failed = True with self.stubber: - self.assertFalse(self.provider.prepare_stack_for_update(stack, [])) + assert not self.provider.prepare_stack_for_update(stack, []) def test_noninteractive_changeset_update_no_stack_policy(self) -> None: """Test noninteractive changeset update no stack policy.""" - self.stubber.add_response( - "create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"} - ) + self.stubber.add_response("create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"}) changes = [generate_change()] self.stubber.add_response( "describe_change_set", @@ -860,9 +769,7 @@ def test_noninteractive_changeset_update_no_stack_policy(self) -> None: def test_noninteractive_changeset_update_with_stack_policy(self) -> None: """Test noninteractive changeset update with stack policy.""" - self.stubber.add_response( - "create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"} - ) + self.stubber.add_response("create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"}) changes = [generate_change()] self.stubber.add_response( "describe_change_set", @@ -888,7 +795,7 @@ def test_noninteractive_destroy_stack_termination_protected(self) -> None: """Test noninteractive_destroy_stack with termination protection.""" self.stubber.add_client_error("delete_stack") - with self.stubber, self.assertRaises(ClientError): + with self.stubber, pytest.raises(ClientError): self.provider.noninteractive_destroy_stack("fake-stack") self.stubber.assert_no_pending_responses() @@ -901,12 +808,8 @@ def test_get_stack_changes_update(self, mock_output_full_cs: MagicMock) -> None: self.stubber.add_response( "describe_stacks", {"Stacks": [generate_describe_stacks_stack(stack_name)]} ) - self.stubber.add_response( - "get_template", generate_get_template("cfn_template.yaml") - ) - self.stubber.add_response( - "create_change_set", {"Id": "CHANGESETID", "StackId": stack_name} - ) + self.stubber.add_response("get_template", generate_get_template("cfn_template.yaml")) + self.stubber.add_response("create_change_set", {"Id": "CHANGESETID", "StackId": stack_name}) changes = [generate_change()] self.stubber.add_response( "describe_change_set", @@ -933,8 +836,8 @@ def test_get_stack_changes_update(self, mock_output_full_cs: MagicMock) -> None: expected_outputs = { "FakeOutput": "" } - self.assertEqual(self.provider.get_outputs(stack_name), expected_outputs) - self.assertEqual(result, expected_outputs) + assert self.provider.get_outputs(stack_name) == expected_outputs + assert result == expected_outputs @patch("runway.cfngin.providers.aws.default.output_full_changeset") def test_get_stack_changes_create(self, mock_output_full_cs: MagicMock) -> None: @@ -946,15 +849,11 @@ def test_get_stack_changes_create(self, mock_output_full_cs: MagicMock) -> None: "describe_stacks", { "Stacks": [ - generate_describe_stacks_stack( - stack_name, stack_status="REVIEW_IN_PROGRESS" - ) + generate_describe_stacks_stack(stack_name, stack_status="REVIEW_IN_PROGRESS") ] }, ) - self.stubber.add_response( - "create_change_set", {"Id": "CHANGESETID", "StackId": stack_name} - ) + self.stubber.add_response("create_change_set", {"Id": "CHANGESETID", "StackId": stack_name}) changes = [generate_change()] self.stubber.add_response( "describe_change_set", @@ -967,9 +866,7 @@ def test_get_stack_changes_create(self, mock_output_full_cs: MagicMock) -> None: "describe_stacks", { "Stacks": [ - generate_describe_stacks_stack( - stack_name, stack_status="REVIEW_IN_PROGRESS" - ) + generate_describe_stacks_stack(stack_name, stack_status="REVIEW_IN_PROGRESS") ] }, ) @@ -977,9 +874,7 @@ def test_get_stack_changes_create(self, mock_output_full_cs: MagicMock) -> None: "describe_stacks", { "Stacks": [ - generate_describe_stacks_stack( - stack_name, stack_status="REVIEW_IN_PROGRESS" - ) + generate_describe_stacks_stack(stack_name, stack_status="REVIEW_IN_PROGRESS") ] }, ) @@ -1020,9 +915,8 @@ def test_tail_stack_retry_on_missing_stack(self) -> None: try: self.provider.tail_stack(stack, threading.Event()) except ClientError as exc: - self.assertEqual( - exc.response.get("ResponseMetadata", {}).get("attempt"), - MAX_TAIL_RETRIES, + assert ( # noqa: PT017 + exc.response.get("ResponseMetadata", {}).get("attempt") == MAX_TAIL_RETRIES ) def test_tail_stack_retry_on_missing_stack_eventual_success(self) -> None: @@ -1032,14 +926,13 @@ def test_tail_stack_retry_on_missing_stack_eventual_success(self) -> None: stack.fqn = f"my-namespace-{stack_name}" default.TAIL_RETRY_SLEEP = 0.01 - default.GET_EVENTS_SLEEP = 0.01 - received_events: List[Any] = [] + received_events: list[Any] = [] def mock_log_func(event: Any) -> None: received_events.append(event) - def valid_event_response(stack: Stack, event_id: str) -> Dict[str, Any]: + def valid_event_response(stack: Stack, event_id: str) -> dict[str, Any]: return { "StackEvents": [ { @@ -1065,24 +958,17 @@ def valid_event_response(stack: Stack, event_id: str) -> Dict[str, Any]: "describe_stack_events", valid_event_response(stack, "InitialEvents") ) - self.stubber.add_response( - "describe_stack_events", valid_event_response(stack, "Event1") - ) + self.stubber.add_response("describe_stack_events", valid_event_response(stack, "Event1")) - with self.stubber: - try: - self.provider.tail_stack( - stack, threading.Event(), log_func=mock_log_func - ) - except UnStubbedResponseError: - # Eventually we run out of responses - could not happen in - # regular execution - # normally this would just be dealt with when the threads were - # shutdown, but doing so here is a little difficult because - # we can't control the `tail_stack` loop - pass + with self.stubber, suppress(UnStubbedResponseError): + # Eventually we run out of responses - could not happen in + # regular execution + # normally this would just be dealt with when the threads were + # shutdown, but doing so here is a little difficult because + # we can't control the `tail_stack` loop + self.provider.tail_stack(stack, threading.Event(), log_func=mock_log_func) - self.assertEqual(received_events[0]["EventId"], "Event1") + assert received_events[0]["EventId"] == "Event1" def test_update_termination_protection(self) -> None: """Test update_termination_protection.""" @@ -1140,7 +1026,7 @@ def test_interactive_destroy_stack(self, patched_input: MagicMock) -> None: self.stubber.add_response("delete_stack", {}, stack) with self.stubber: - self.assertIsNone(self.provider.interactive_destroy_stack(stack_name)) + assert self.provider.interactive_destroy_stack(stack_name) is None self.stubber.assert_no_pending_responses() @patch("runway.cfngin.providers.aws.default.Provider.update_termination_protection") @@ -1153,9 +1039,7 @@ def test_interactive_destroy_stack_termination_protected( stack = {"StackName": stack_name} patched_input.return_value = "y" - self.stubber.add_client_error( - "delete_stack", service_message="TerminationProtection" - ) + self.stubber.add_client_error("delete_stack", service_message="TerminationProtection") self.stubber.add_response("delete_stack", {}, stack) with self.stubber: @@ -1169,17 +1053,14 @@ def test_destroy_stack_canceled(self, patched_input: MagicMock) -> None: """Test destroy stack canceled.""" patched_input.return_value = "n" - with self.assertRaises(exceptions.CancelExecution): - stack = {"StackName": "MockStack"} - self.provider.destroy_stack(stack) # type: ignore + with pytest.raises(exceptions.CancelExecution): + self.provider.destroy_stack({"StackName": "MockStack"}) # type: ignore def test_successful_init(self) -> None: """Test successful init.""" replacements = True - provider = Provider( - self.session, interactive=True, replacements_only=replacements - ) - self.assertEqual(provider.replacements_only, replacements) + provider = Provider(self.session, interactive=True, replacements_only=replacements) + assert provider.replacements_only == replacements @patch("runway.cfngin.providers.aws.default.Provider.update_termination_protection") @patch("runway.cfngin.providers.aws.default.ask_for_approval") @@ -1189,9 +1070,7 @@ def test_update_stack_execute_success_no_stack_policy( """Test update stack execute success no stack policy.""" stack_name = "my-fake-stack" - self.stubber.add_response( - "create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"} - ) + self.stubber.add_response("create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"}) changes = [generate_change()] self.stubber.add_response( @@ -1225,9 +1104,7 @@ def test_update_stack_execute_success_with_stack_policy( """Test update stack execute success with stack policy.""" stack_name = "my-fake-stack" - self.stubber.add_response( - "create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"} - ) + self.stubber.add_response("create_change_set", {"Id": "CHANGESETID", "StackId": "STACKID"}) changes = [generate_change()] self.stubber.add_response( @@ -1262,9 +1139,7 @@ def test_select_destroy_method(self) -> None: [{"force_interactive": False}, self.provider.interactive_destroy_stack], [{"force_interactive": True}, self.provider.interactive_destroy_stack], ]: - self.assertEqual( - self.provider.select_destroy_method(**i[0]), i[1] # type: ignore - ) + assert self.provider.select_destroy_method(**i[0]) == i[1] # type: ignore def test_select_update_method(self) -> None: """Test select update method.""" @@ -1286,9 +1161,7 @@ def test_select_update_method(self) -> None: self.provider.interactive_update_stack, ], ]: - self.assertEqual( - self.provider.select_update_method(**i[0]), i[1] # type: ignore - ) + assert self.provider.select_update_method(**i[0]) == i[1] # type: ignore @patch("runway.cfngin.providers.aws.default.output_full_changeset") @patch("runway.cfngin.providers.aws.default.output_summary") @@ -1302,12 +1175,8 @@ def test_get_stack_changes_interactive( self.stubber.add_response( "describe_stacks", {"Stacks": [generate_describe_stacks_stack(stack_name)]} ) - self.stubber.add_response( - "get_template", generate_get_template("cfn_template.yaml") - ) - self.stubber.add_response( - "create_change_set", {"Id": "CHANGESETID", "StackId": stack_name} - ) + self.stubber.add_response("get_template", generate_get_template("cfn_template.yaml")) + self.stubber.add_response("create_change_set", {"Id": "CHANGESETID", "StackId": stack_name}) changes = [generate_change()] self.stubber.add_response( "describe_change_set", diff --git a/tests/unit/cfngin/test_cfngin.py b/tests/unit/cfngin/test_cfngin.py index c9ed553a0..b5051dd7f 100644 --- a/tests/unit/cfngin/test_cfngin.py +++ b/tests/unit/cfngin/test_cfngin.py @@ -1,13 +1,12 @@ """Tests for runway.cfngin entry point.""" -# pylint: disable=redefined-outer-name, unnecessary-dunder-call from __future__ import annotations import shutil from typing import TYPE_CHECKING +from unittest.mock import Mock, call import pytest -from mock import Mock, call from yaml.constructor import ConstructorError from runway.cfngin.cfngin import CFNgin @@ -28,15 +27,11 @@ def copy_fixture(src: Path, dest: Path) -> Path: def copy_basic_fixtures(cfngin_fixtures: Path, tmp_path: Path) -> None: """Copy the basic env file and config file to a tmp_path.""" - copy_fixture( - src=cfngin_fixtures / "envs" / "basic.env", dest=tmp_path / "test-us-east-1.env" - ) - copy_fixture( - src=cfngin_fixtures / "configs" / "basic.yml", dest=tmp_path / "basic.yml" - ) + copy_fixture(src=cfngin_fixtures / "envs" / "basic.env", dest=tmp_path / "test-us-east-1.env") + copy_fixture(src=cfngin_fixtures / "configs" / "basic.yml", dest=tmp_path / "basic.yml") -@pytest.fixture(scope="function") +@pytest.fixture() def patch_safehaven(mocker: MockerFixture) -> Mock: """Patch SafeHaven.""" mock_haven = mocker.patch("runway.cfngin.cfngin.SafeHaven") @@ -58,9 +53,7 @@ def configure_mock_action_instance(mock_action: Mock) -> Mock: @staticmethod def get_context(name: str = "test", region: str = "us-east-1") -> MockRunwayContext: """Create a basic Runway context object.""" - context = MockRunwayContext( - deploy_environment=DeployEnvironment(explicit_name=name) - ) + context = MockRunwayContext(deploy_environment=DeployEnvironment(explicit_name=name)) context.env.aws_region = region return context @@ -87,9 +80,7 @@ def test_env_file(self, tmp_path: Path) -> None: result = CFNgin(ctx=self.get_context(region="us-west-2"), sys_path=tmp_path) assert result.env_file["test_value"] == "test-us-west-2" - result = CFNgin( - ctx=self.get_context(name="lab", region="ca-central-1"), sys_path=tmp_path - ) + result = CFNgin(ctx=self.get_context(name="lab", region="ca-central-1"), sys_path=tmp_path) assert result.env_file["test_value"] == "lab-ca-central-1" def test_deploy( @@ -103,9 +94,7 @@ def test_deploy( mock_action = mocker.patch("runway.cfngin.actions.deploy.Action", Mock()) mock_instance = self.configure_mock_action_instance(mock_action) copy_basic_fixtures(cfngin_fixtures, tmp_path) - copy_fixture( - src=cfngin_fixtures / "configs" / "basic.yml", dest=tmp_path / "basic2.yml" - ) + copy_fixture(src=cfngin_fixtures / "configs" / "basic.yml", dest=tmp_path / "basic2.yml") context = self.get_context() context.env.vars["CI"] = "1" @@ -184,9 +173,7 @@ def test_destroy( cfngin.destroy() mock_action.assert_called_once() - mock_instance.execute.assert_called_once_with( - concurrency=0, force=True, tail=False - ) + mock_instance.execute.assert_called_once_with(concurrency=0, force=True, tail=False) patch_safehaven.assert_has_calls( [ call(environ=context.env.vars), @@ -273,9 +260,7 @@ def test_load(self, cfngin_fixtures: Path, tmp_path: Path) -> None: assert len(result.stacks) == 1 assert result.stacks[0].name == "test-stack" - def test_load_raise_constructor_error( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test_load_raise_constructor_error(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test load raise ConstructorError.""" config = Mock(load=Mock(side_effect=ConstructorError(problem="something else"))) get_config = mocker.patch.object(CFNgin, "_get_config", return_value=config) diff --git a/tests/unit/cfngin/test_dag.py b/tests/unit/cfngin/test_dag.py index 8342ef9d1..aea1e8103 100644 --- a/tests/unit/cfngin/test_dag.py +++ b/tests/unit/cfngin/test_dag.py @@ -1,8 +1,7 @@ """Tests for runway.cfngin.dag.""" -# pyright: basic import threading -from typing import Any, List +from typing import Any import pytest @@ -65,7 +64,7 @@ def test_walk(empty_dag: DAG) -> None: # b and c should be executed at the same time. dag.from_dict({"a": ["b", "c"], "b": ["d"], "c": ["d"], "d": []}) - nodes: List[Any] = [] + nodes: list[Any] = [] def walk_func(node: Any) -> bool: nodes.append(node) @@ -172,9 +171,7 @@ def test_transitive_reduction(empty_dag: DAG) -> None: """Test transitive reduction.""" dag = empty_dag # https://en.wikipedia.org/wiki/Transitive_reduction#/media/File:Tred-G.svg - dag.from_dict( - {"a": ["b", "c", "d", "e"], "b": ["d"], "c": ["d", "e"], "d": ["e"], "e": []} - ) + dag.from_dict({"a": ["b", "c", "d", "e"], "b": ["d"], "c": ["d", "e"], "d": ["e"], "e": []}) dag.transitive_reduction() # https://en.wikipedia.org/wiki/Transitive_reduction#/media/File:Tred-Gprime.svg assert dag.graph == { @@ -206,7 +203,7 @@ def test_threaded_walker(empty_dag: DAG) -> None: dag.from_dict({"a": ["b", "c"], "b": ["d"], "c": ["d"], "d": []}) lock = threading.Lock() # Protects nodes from concurrent access - nodes: List[Any] = [] + nodes: list[Any] = [] def walk_func(node: Any) -> bool: with lock: diff --git a/tests/unit/cfngin/test_environment.py b/tests/unit/cfngin/test_environment.py index a57ad3ef1..9ad4c5c1f 100644 --- a/tests/unit/cfngin/test_environment.py +++ b/tests/unit/cfngin/test_environment.py @@ -1,7 +1,7 @@ """Tests for runway.cfngin.environment.""" -# pyright: basic, reportUnnecessaryIsInstance=none -import unittest +# pyright: reportUnnecessaryIsInstance=none +import pytest from runway.cfngin.environment import parse_environment @@ -27,27 +27,27 @@ """ -class TestEnvironment(unittest.TestCase): +class TestEnvironment: """Tests for runway.cfngin.environment.""" def test_simple_key_value_parsing(self) -> None: """Test simple key value parsing.""" parsed_env = parse_environment(TEST_ENV) - self.assertTrue(isinstance(parsed_env, dict)) - self.assertEqual(parsed_env["key1"], "value1") - self.assertEqual(parsed_env["key2"], "value2") - self.assertEqual(parsed_env["key3"], "some:complex::value") - self.assertEqual(parsed_env["key4"], ":otherValue:") - self.assertEqual(parsed_env["key5"], "@value") - self.assertEqual(len(parsed_env), 5) + assert isinstance(parsed_env, dict) + assert parsed_env["key1"] == "value1" + assert parsed_env["key2"] == "value2" + assert parsed_env["key3"] == "some:complex::value" + assert parsed_env["key4"] == ":otherValue:" + assert parsed_env["key5"] == "@value" + assert len(parsed_env) == 5 def test_simple_key_value_parsing_exception(self) -> None: """Test simple key value parsing exception.""" - with self.assertRaises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 parse_environment(TEST_ERROR_ENV) def test_blank_value(self) -> None: """Test blank value.""" env = """key1:""" parsed = parse_environment(env) - self.assertEqual(parsed["key1"], "") + assert not parsed["key1"] diff --git a/tests/unit/cfngin/test_exceptions.py b/tests/unit/cfngin/test_exceptions.py index 2c6498612..092ee8b01 100644 --- a/tests/unit/cfngin/test_exceptions.py +++ b/tests/unit/cfngin/test_exceptions.py @@ -3,7 +3,7 @@ from __future__ import annotations from pathlib import Path -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING import pytest @@ -29,9 +29,7 @@ class TestCfnginBucketRequired: (Path("/tmp"), "something", f"; something ({Path('/tmp')})"), ], ) - def test___init__( - self, config_path: Optional[AnyPath], reason: Optional[str], expected: str - ) -> None: + def test___init__(self, config_path: AnyPath | None, reason: str | None, expected: str) -> None: """Test __init__.""" expected_msg = f"cfngin_bucket is required{expected}" obj = CfnginBucketRequired(config_path=config_path, reason=reason) @@ -50,9 +48,7 @@ class TestInvalidConfig: "errors, expected_msg", [("error", "error"), (["error0", "error1"], "error0\nerror1")], ) - def test___init__( - self, errors: Union[str, List[Union[Exception, str]]], expected_msg: str - ) -> None: + def test___init__(self, errors: str | list[Exception | str], expected_msg: str) -> None: """Test __init__.""" obj = InvalidConfig(errors) assert obj.errors == errors @@ -76,9 +72,7 @@ class TestPersistentGraphLocked: (None, "reason", "Persistent graph is locked. reason"), ], ) - def test___init__( - self, message: Optional[str], reason: Optional[str], expected_msg: str - ) -> None: + def test___init__(self, message: str | None, reason: str | None, expected_msg: str) -> None: """Test __init__.""" obj = PersistentGraphLocked(message=message, reason=reason) assert obj.message == expected_msg @@ -101,9 +95,7 @@ class TestPersistentGraphUnlocked: (None, "reason", "Persistent graph is unlocked. reason"), ], ) - def test___init__( - self, message: Optional[str], reason: Optional[str], expected_msg: str - ) -> None: + def test___init__(self, message: str | None, reason: str | None, expected_msg: str) -> None: """Test __init__.""" obj = PersistentGraphUnlocked(message=message, reason=reason) assert obj.message == expected_msg diff --git a/tests/unit/cfngin/test_plan.py b/tests/unit/cfngin/test_plan.py index 77670f33b..78620fe6d 100644 --- a/tests/unit/cfngin/test_plan.py +++ b/tests/unit/cfngin/test_plan.py @@ -1,17 +1,17 @@ """Tests for runway.cfngin.plan.""" -# pylint: disable=protected-access,unused-argument -# pyright: basic +# ruff: noqa: SLF001 from __future__ import annotations import json -import os import shutil import tempfile import unittest -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from pathlib import Path +from typing import TYPE_CHECKING, Any +from unittest import mock -import mock +import pytest from runway.cfngin.dag import walk from runway.cfngin.exceptions import ( @@ -50,23 +50,23 @@ def setUp(self) -> None: def test_status(self) -> None: """Test status.""" - self.assertFalse(self.step.submitted) - self.assertFalse(self.step.completed) + assert not self.step.submitted + assert not self.step.completed self.step.submit() - self.assertEqual(self.step.status, SUBMITTED) - self.assertTrue(self.step.submitted) - self.assertFalse(self.step.completed) + assert self.step.status == SUBMITTED + assert self.step.submitted + assert not self.step.completed self.step.complete() - self.assertEqual(self.step.status, COMPLETE) - self.assertNotEqual(self.step.status, SUBMITTED) - self.assertTrue(self.step.submitted) - self.assertTrue(self.step.completed) + assert self.step.status == COMPLETE + assert self.step.status != SUBMITTED + assert self.step.submitted + assert self.step.completed - self.assertNotEqual(self.step.status, True) - self.assertNotEqual(self.step.status, False) - self.assertNotEqual(self.step.status, "banana") + assert self.step.status is not True + assert self.step.status is not False + assert self.step.status != "banana" def test_from_stack_name(self) -> None: """Return step from step name.""" @@ -74,21 +74,21 @@ def test_from_stack_name(self) -> None: stack_name = "test-stack" result = Step.from_stack_name(stack_name, context) - self.assertIsInstance(result, Step) - self.assertEqual(stack_name, result.stack.name) + assert isinstance(result, Step) + assert stack_name == result.stack.name def test_from_persistent_graph(self) -> None: """Return list of steps from graph dict.""" context = mock_context() - graph_dict: Dict[str, Any] = {"stack1": [], "stack2": ["stack1"]} + graph_dict: dict[str, Any] = {"stack1": [], "stack2": ["stack1"]} result = Step.from_persistent_graph(graph_dict, context) - self.assertEqual(2, len(result)) - self.assertIsInstance(result, list) + assert len(result) == 2 + assert isinstance(result, list) for step in result: - self.assertIsInstance(step, Step) - self.assertIn(step.stack.name, graph_dict.keys()) + assert isinstance(step, Step) + assert step.stack.name in graph_dict class TestGraph(unittest.TestCase): @@ -97,7 +97,7 @@ class TestGraph(unittest.TestCase): def setUp(self) -> None: """Run before tests.""" self.context = mock_context() - self.graph_dict: Dict[str, Any] = {"stack1": [], "stack2": ["stack1"]} + self.graph_dict: dict[str, Any] = {"stack1": [], "stack2": ["stack1"]} self.graph_dict_expected = {"stack1": set(), "stack2": {"stack1"}} self.steps = Step.from_persistent_graph(self.graph_dict, self.context) @@ -106,9 +106,9 @@ def test_add_steps(self) -> None: graph = Graph() graph.add_steps(self.steps) - self.assertEqual(self.steps, list(graph.steps.values())) - self.assertEqual([step.name for step in self.steps], list(graph.steps.keys())) - self.assertEqual(self.graph_dict_expected, graph.to_dict()) + assert self.steps == list(graph.steps.values()) + assert [step.name for step in self.steps] == list(graph.steps.keys()) + assert self.graph_dict_expected == graph.to_dict() def test_pop(self) -> None: """Test pop.""" @@ -117,31 +117,31 @@ def test_pop(self) -> None: stack2 = next(step for step in self.steps if step.name == "stack2") - self.assertEqual(stack2, graph.pop(stack2)) - self.assertEqual({"stack1": set()}, graph.to_dict()) + assert stack2 == graph.pop(stack2) + assert graph.to_dict() == {"stack1": set()} def test_dumps(self) -> None: """Test dumps.""" graph = Graph() graph.add_steps(self.steps) - self.assertEqual(json.dumps(self.graph_dict), graph.dumps()) + assert json.dumps(self.graph_dict) == graph.dumps() def test_from_dict(self) -> None: """Test from dict.""" graph = Graph.from_dict(self.graph_dict, self.context) - self.assertIsInstance(graph, Graph) - self.assertEqual([step.name for step in self.steps], list(graph.steps.keys())) - self.assertEqual(self.graph_dict_expected, graph.to_dict()) + assert isinstance(graph, Graph) + assert [step.name for step in self.steps] == list(graph.steps.keys()) + assert self.graph_dict_expected == graph.to_dict() def test_from_steps(self) -> None: """Test from steps.""" graph = Graph.from_steps(self.steps) - self.assertEqual(self.steps, list(graph.steps.values())) - self.assertEqual([step.name for step in self.steps], list(graph.steps.keys())) - self.assertEqual(self.graph_dict_expected, graph.to_dict()) + assert self.steps == list(graph.steps.values()) + assert [step.name for step in self.steps] == list(graph.steps.keys()) + assert self.graph_dict_expected == graph.to_dict() class TestPlan(unittest.TestCase): @@ -156,9 +156,8 @@ def setUp(self) -> None: class FakeLookup(LookupHandler): """False Lookup.""" - # pylint: disable=arguments-differ @classmethod - def handle(cls, value: str, *__args: Any, **__kwargs: Any) -> str: # type: ignore + def handle(cls, _value: str, *__args: Any, **__kwargs: Any) -> str: # type: ignore """Perform the lookup.""" return "test" @@ -179,7 +178,7 @@ def test_plan(self) -> None: graph = Graph.from_steps([Step(vpc, fn=None), Step(bastion, fn=None)]) plan = Plan(description="Test", graph=graph) - self.assertEqual(plan.graph.to_dict(), {"bastion-1": {"vpc-1"}, "vpc-1": set()}) + assert plan.graph.to_dict() == {"bastion-1": {"vpc-1"}, "vpc-1": set()} def test_plan_reverse(self) -> None: """Test plan reverse.""" @@ -193,8 +192,8 @@ def test_plan_reverse(self) -> None: # order is different between python2/3 so can't compare dicts result_graph_dict = plan.graph.to_dict() - self.assertEqual(set(), result_graph_dict.get("bastion-1")) - self.assertEqual({"bastion-1"}, result_graph_dict.get("vpc-1")) + assert set() == result_graph_dict.get("bastion-1") + assert {"bastion-1"} == result_graph_dict.get("vpc-1") def test_plan_targeted(self) -> None: """Test plan targeted.""" @@ -208,7 +207,7 @@ def test_plan_targeted(self) -> None: graph = Graph.from_steps([Step(vpc, fn=None), Step(bastion, fn=None)]) plan = Plan(description="Test", graph=graph, context=context) - self.assertEqual({vpc.name: set()}, plan.graph.to_dict()) + assert plan.graph.to_dict() == {vpc.name: set()} def test_execute_plan(self) -> None: """Test execute plan.""" @@ -219,18 +218,16 @@ def test_execute_plan(self) -> None: definition=generate_definition("bastion", 1, requires=[vpc.name]), context=context, ) - removed = Stack( - definition=generate_definition("removed", 1, requires=[]), context=context - ) + removed = Stack(definition=generate_definition("removed", 1, requires=[]), context=context) context._persistent_graph = Graph.from_steps([Step(removed)]) - calls: List[str] = [] + calls: list[str] = [] - def _launch_stack(stack: Stack, status: Optional[Status] = None) -> Status: + def _launch_stack(stack: Stack, status: Status | None = None) -> Status: # noqa: ARG001 calls.append(stack.fqn) return COMPLETE - def _destroy_stack(stack: Stack, status: Optional[Status] = None) -> Status: + def _destroy_stack(stack: Stack, status: Status | None = None) -> Status: # noqa: ARG001 calls.append(stack.fqn) return COMPLETE @@ -246,17 +243,17 @@ def _destroy_stack(stack: Stack, status: Optional[Status] = None) -> Status: plan.execute(walk) # the order these are appended changes between python2/3 - self.assertIn("namespace-vpc-1", calls) - self.assertIn("namespace-bastion-1", calls) - self.assertIn("namespace-removed-1", calls) + assert "namespace-vpc-1" in calls + assert "namespace-bastion-1" in calls + assert "namespace-removed-1" in calls context.put_persistent_graph.assert_called() # order is different between python2/3 so can't compare dicts result_graph_dict = context.persistent_graph.to_dict() # type: ignore - self.assertEqual(2, len(result_graph_dict)) - self.assertEqual(set(), result_graph_dict.get("vpc-1")) - self.assertEqual({"vpc-1"}, result_graph_dict.get("bastion-1")) - self.assertIsNone(result_graph_dict.get("namespace-removed-1")) + assert len(result_graph_dict) == 2 + assert set() == result_graph_dict.get("vpc-1") + assert {"vpc-1"} == result_graph_dict.get("bastion-1") + assert result_graph_dict.get("namespace-removed-1") is None def test_execute_plan_no_persist(self) -> None: """Test execute plan with no persistent graph.""" @@ -268,20 +265,18 @@ def test_execute_plan_no_persist(self) -> None: context=context, ) - calls: List[str] = [] + calls: list[str] = [] - def _launch_stack(stack: Stack, status: Optional[Status] = None) -> Status: + def _launch_stack(stack: Stack, status: Status | None = None) -> Status: # noqa: ARG001 calls.append(stack.fqn) return COMPLETE - graph = Graph.from_steps( - [Step(vpc, fn=_launch_stack), Step(bastion, fn=_launch_stack)] - ) + graph = Graph.from_steps([Step(vpc, fn=_launch_stack), Step(bastion, fn=_launch_stack)]) plan = Plan(description="Test", graph=graph, context=context) plan.execute(walk) - self.assertEqual(calls, ["namespace-vpc-1", "namespace-bastion-1"]) + assert calls == ["namespace-vpc-1", "namespace-bastion-1"] context.put_persistent_graph.assert_not_called() def test_execute_plan_locked(self) -> None: @@ -293,14 +288,13 @@ def test_execute_plan_locked(self) -> None: """ vpc = Stack(definition=generate_definition("vpc", 1), context=self.context) bastion = Stack( - definition=generate_definition("bastion", 1, requires=[vpc.name]), - locked=True, + definition=generate_definition("bastion", 1, locked=True, requires=[vpc.name]), context=self.context, ) - calls: List[str] = [] + calls: list[str] = [] - def fn(stack: Stack, status: Optional[Status] = None) -> Status: + def fn(stack: Stack, status: Status | None = None) -> Status: # noqa: ARG001 calls.append(stack.fqn) return COMPLETE @@ -308,7 +302,7 @@ def fn(stack: Stack, status: Optional[Status] = None) -> Status: plan = Plan(description="Test", graph=graph) plan.execute(walk) - self.assertEqual(calls, ["namespace-vpc-1", "namespace-bastion-1"]) + assert calls == ["namespace-vpc-1", "namespace-bastion-1"] def test_execute_plan_filtered(self) -> None: """Test execute plan filtered.""" @@ -322,9 +316,9 @@ def test_execute_plan_filtered(self) -> None: context=self.context, ) - calls: List[str] = [] + calls: list[str] = [] - def fn(stack: Stack, status: Optional[Status] = None) -> Status: + def fn(stack: Stack, status: Status | None = None) -> Status: # noqa: ARG001 calls.append(stack.fqn) return COMPLETE @@ -335,7 +329,7 @@ def fn(stack: Stack, status: Optional[Status] = None) -> Status: plan = Plan(context=context, description="Test", graph=graph) plan.execute(walk) - self.assertEqual(calls, ["namespace-vpc-1", "namespace-db-1"]) + assert calls == ["namespace-vpc-1", "namespace-db-1"] def test_execute_plan_exception(self) -> None: """Test execute plan exception.""" @@ -345,9 +339,9 @@ def test_execute_plan_exception(self) -> None: context=self.context, ) - calls: List[str] = [] + calls: list[str] = [] - def fn(stack: Stack, status: Optional[Status] = None) -> Status: + def fn(stack: Stack, status: Status | None = None) -> Status: # noqa: ARG001 calls.append(stack.fqn) if stack.name == vpc_step.name: raise ValueError("Boom") @@ -359,11 +353,11 @@ def fn(stack: Stack, status: Optional[Status] = None) -> Status: graph = Graph.from_steps([vpc_step, bastion_step]) plan = Plan(description="Test", graph=graph) - with self.assertRaises(PlanFailed): + with pytest.raises(PlanFailed): plan.execute(walk) - self.assertEqual(calls, ["namespace-vpc-1"]) - self.assertEqual(vpc_step.status, FAILED) + assert calls == ["namespace-vpc-1"] + assert vpc_step.status == FAILED def test_execute_plan_skipped(self) -> None: """Test execute plan skipped.""" @@ -373,9 +367,9 @@ def test_execute_plan_skipped(self) -> None: context=self.context, ) - calls: List[str] = [] + calls: list[str] = [] - def fn(stack: Stack, status: Optional[Status] = None) -> Status: + def fn(stack: Stack, status: Status | None = None) -> Status: # noqa: ARG001 calls.append(stack.fqn) if stack.fqn == vpc_step.name: return SKIPPED @@ -388,7 +382,7 @@ def fn(stack: Stack, status: Optional[Status] = None) -> Status: plan = Plan(description="Test", graph=graph) plan.execute(walk) - self.assertEqual(calls, ["namespace-vpc-1", "namespace-bastion-1"]) + assert calls == ["namespace-vpc-1", "namespace-bastion-1"] def test_execute_plan_failed(self) -> None: """Test execute plan failed.""" @@ -399,9 +393,9 @@ def test_execute_plan_failed(self) -> None: ) db = Stack(definition=generate_definition("db", 1), context=self.context) - calls: List[str] = [] + calls: list[str] = [] - def fn(stack: Stack, status: Optional[Status] = None) -> Status: + def fn(stack: Stack, status: Status | None = None) -> Status: # noqa: ARG001 calls.append(stack.fqn) if stack.name == vpc_step.name: return FAILED @@ -413,12 +407,12 @@ def fn(stack: Stack, status: Optional[Status] = None) -> Status: graph = Graph.from_steps([vpc_step, bastion_step, db_step]) plan = Plan(description="Test", graph=graph) - with self.assertRaises(PlanFailed): + with pytest.raises(PlanFailed): plan.execute(walk) calls.sort() - self.assertEqual(calls, ["namespace-db-1", "namespace-vpc-1"]) + assert calls == ["namespace-db-1", "namespace-vpc-1"] def test_execute_plan_cancelled(self) -> None: """Test execute plan cancelled.""" @@ -428,9 +422,9 @@ def test_execute_plan_cancelled(self) -> None: context=self.context, ) - calls: List[str] = [] + calls: list[str] = [] - def fn(stack: Stack, status: Optional[Status] = None) -> Status: + def fn(stack: Stack, status: Status | None = None) -> Status: # noqa: ARG001 calls.append(stack.fqn) if stack.fqn == vpc_step.name: raise CancelExecution @@ -443,7 +437,7 @@ def fn(stack: Stack, status: Optional[Status] = None) -> Status: plan = Plan(description="Test", graph=graph) plan.execute(walk) - self.assertEqual(calls, ["namespace-vpc-1", "namespace-bastion-1"]) + assert calls == ["namespace-vpc-1", "namespace-bastion-1"] def test_execute_plan_graph_locked(self) -> None: """Test execute plan with locked persistent graph.""" @@ -451,7 +445,7 @@ def test_execute_plan_graph_locked(self) -> None: context._persistent_graph = Graph.from_dict({"stack1": []}, context) context._persistent_graph_lock_code = "1111" plan = Plan(description="Test", graph=Graph(), context=context) - with self.assertRaises(PersistentGraphLocked): + with pytest.raises(PersistentGraphLocked): plan.execute() def test_build_graph_missing_dependency(self) -> None: @@ -461,14 +455,12 @@ def test_build_graph_missing_dependency(self) -> None: context=self.context, ) - with self.assertRaises(GraphError) as expected: + with pytest.raises(GraphError) as expected: Graph.from_steps([Step(bastion)]) - message_starts = ( - "Error detected when adding 'vpc-1' as a dependency of 'bastion-1':" - ) + message_starts = "Error detected when adding 'vpc-1' as a dependency of 'bastion-1':" message_contains = "dependent node vpc-1 does not exist" - self.assertTrue(str(expected.exception).startswith(message_starts)) - self.assertTrue(message_contains in str(expected.exception)) + assert str(expected.value).startswith(message_starts) + assert message_contains in str(expected.value) def test_build_graph_cyclic_dependencies(self) -> None: """Test build graph cyclic dependencies.""" @@ -482,19 +474,19 @@ def test_build_graph_cyclic_dependencies(self) -> None: context=self.context, ) - with self.assertRaises(GraphError) as expected: + with pytest.raises(GraphError) as expected: Graph.from_steps([Step(vpc), Step(db), Step(app)]) message = ( "Error detected when adding 'db-1' " "as a dependency of 'app-1': graph is " "not acyclic" ) - self.assertEqual(str(expected.exception), message) + assert str(expected.value) == message def test_dump(self) -> None: """Test dump.""" - requires: List[str] = [] - steps: List[Step] = [] + requires: list[str] = [] + steps: list[Step] = [] for i in range(5): overrides = { @@ -523,9 +515,6 @@ def test_dump(self) -> None: plan.dump(directory=tmp_dir, context=self.context) for step in plan.steps: - template_path = os.path.join( - tmp_dir, stack_template_key_name(step.stack.blueprint) # type: ignore - ) - self.assertTrue(os.path.isfile(template_path)) + assert (Path(tmp_dir) / stack_template_key_name(step.stack.blueprint)).is_file() finally: shutil.rmtree(tmp_dir) diff --git a/tests/unit/cfngin/test_stack.py b/tests/unit/cfngin/test_stack.py index 5102c2bc0..a1b3644cf 100644 --- a/tests/unit/cfngin/test_stack.py +++ b/tests/unit/cfngin/test_stack.py @@ -1,124 +1,193 @@ """Tests for runway.cfngin.stack.""" -# pyright: basic -import unittest -from typing import Any +from __future__ import annotations -from mock import MagicMock +from typing import TYPE_CHECKING, Any, ClassVar +from unittest.mock import Mock + +import pytest from runway.cfngin.lookups.registry import ( register_lookup_handler, unregister_lookup_handler, ) from runway.cfngin.stack import Stack -from runway.config import CfnginConfig -from runway.context import CfnginContext +from runway.config import CfnginStackDefinitionModel from runway.lookups.handlers.base import LookupHandler -from .factories import generate_definition +if TYPE_CHECKING: + from collections.abc import Iterator + from pathlib import Path + from pytest_mock import MockerFixture -class TestStack(unittest.TestCase): - """Tests for runway.cfngin.stack.Stack.""" + from ..factories import MockCfnginContext - def setUp(self) -> None: - """Run before tests.""" - self.sd = {"name": "test"} # pylint: disable=invalid-name - self.config = CfnginConfig.parse_obj({"namespace": "namespace"}) - self.context = CfnginContext(config=self.config) - self.stack = Stack( - definition=generate_definition("vpc", 1), context=self.context - ) +MODULE = "runway.cfngin.stack" - class FakeLookup(LookupHandler): - """False Lookup.""" - - # pylint: disable=arguments-differ,unused-argument - @classmethod - def handle(cls, value: str, *__args: Any, **__kwargs: Any) -> str: # type: ignore - """Perform the lookup.""" - return "test" - - register_lookup_handler("noop", FakeLookup) - - def tearDown(self) -> None: - """Run after tests.""" - unregister_lookup_handler("noop") - return super().tearDown() - - def test_stack_requires(self) -> None: - """Test stack requires.""" - definition = generate_definition( - base_name="vpc", - stack_id=1, - variables={ - "Var1": "${noop fakeStack3::FakeOutput}", - "Var2": ( - "some.template.value:${output fakeStack2.FakeOutput}:" - "${output fakeStack.FakeOutput}" - ), - "Var3": "${output fakeStack.FakeOutput}," - "${output fakeStack2.FakeOutput}", - }, - requires=["fakeStack"], + +@pytest.fixture(autouse=True, scope="module") +def fake_lookup() -> Iterator[None]: + """Register a fake lookup handler for testing.""" + + class FakeLookup(LookupHandler): + """False Lookup.""" + + TYPE_NAME: ClassVar[str] = "fake" + + @classmethod + def handle(cls, value: str, *__args: Any, **__kwargs: Any) -> str: # type: ignore # noqa: ARG003 + """Perform the lookup.""" + return "test" + + register_lookup_handler(FakeLookup.TYPE_NAME, FakeLookup) + yield + unregister_lookup_handler(FakeLookup.TYPE_NAME) + + +def generate_stack_definition( + base_name: str, stack_id: Any = None, **overrides: Any +) -> CfnginStackDefinitionModel: + """Generate stack definition.""" + definition: dict[str, Any] = { + "name": f"{base_name}-{stack_id}" if stack_id else base_name, + "class_path": f"tests.unit.cfngin.fixtures.mock_blueprints.{base_name.upper()}", + "requires": [], + } + definition.update(overrides) + return CfnginStackDefinitionModel(**definition) + + +class TestStack: + """Test Stack.""" + + def test_required_by(self, cfngin_context: MockCfnginContext) -> None: + """Test required_by.""" + stack = Stack( + definition=generate_stack_definition( + base_name="vpc", + required_by=["fakeStack0"], + variables={"Param1": "${output fakeStack.FakeOutput}"}, + ), + context=cfngin_context, ) - stack = Stack(definition=definition, context=self.context) - self.assertEqual(len(stack.requires), 2) - self.assertIn("fakeStack", stack.requires) - self.assertIn("fakeStack2", stack.requires) - - def test_stack_requires_circular_ref(self) -> None: - """Test stack requires circular ref.""" - definition = generate_definition( - base_name="vpc", - stack_id=1, - variables={"Var1": "${output vpc-1.FakeOutput}"}, + assert stack.required_by == {"fakeStack0"} + + def test_requires(self, cfngin_context: MockCfnginContext) -> None: + """Test requires.""" + stack = Stack( + definition=generate_stack_definition( + base_name="vpc", + variables={ + "Var1": "${fake fakeStack2::FakeOutput}", + "Var2": ( + "some.template.value:${output fakeStack1.FakeOutput}:" + "${output fakeStack0.FakeOutput}" + ), + "Var3": "${output fakeStack0.FakeOutput},${output fakeStack1.FakeOutput}", + }, + requires=["fakeStack0"], + ), + context=cfngin_context, ) - stack = Stack(definition=definition, context=self.context) - with self.assertRaises(ValueError): - stack.requires # pylint: disable=pointless-statement - - def test_stack_cfn_parameters(self) -> None: - """Test stack cfn parameters.""" - definition = generate_definition( - base_name="vpc", - stack_id=1, - variables={"Param1": "${output fakeStack.FakeOutput}"}, + assert len(stack.requires) == 2 + assert "fakeStack0" in stack.requires + assert "fakeStack1" in stack.requires + + def test_requires_cyclic_dependency(self, cfngin_context: MockCfnginContext) -> None: + """Test requires cyclic dependency.""" + stack = Stack( + definition=generate_stack_definition( + base_name="vpc", + variables={"Var1": "${output vpc.FakeOutput}"}, + ), + context=cfngin_context, ) - stack = Stack(definition=definition, context=self.context) - # pylint: disable=protected-access - stack._blueprint = MagicMock() - stack._blueprint.parameter_values = { - "Param2": "Some Resolved Value", - } - param = stack.parameter_values["Param2"] - self.assertEqual(param, "Some Resolved Value") - - def test_stack_tags_default(self) -> None: - """Test stack tags default.""" - self.config.tags = {"environment": "prod"} - definition = generate_definition(base_name="vpc", stack_id=1) - stack = Stack(definition=definition, context=self.context) - self.assertEqual(stack.tags, {"environment": "prod"}) - - def test_stack_tags_override(self) -> None: - """Test stack tags override.""" - self.config.tags = {"environment": "prod"} - definition = generate_definition( - base_name="vpc", stack_id=1, tags={"environment": "stage"} + with pytest.raises(ValueError, match="has a circular reference"): + assert stack.requires + + def test_resolve(self, cfngin_context: MockCfnginContext, mocker: MockerFixture) -> None: + """Test resolve.""" + mock_resolve_variables = mocker.patch(f"{MODULE}.resolve_variables") + mock_provider = Mock() + stack = Stack( + definition=generate_stack_definition(base_name="vpc"), + context=cfngin_context, ) - stack = Stack(definition=definition, context=self.context) - self.assertEqual(stack.tags, {"environment": "stage"}) - - def test_stack_tags_extra(self) -> None: - """Test stack tags extra.""" - self.config.tags = {"environment": "prod"} - definition = generate_definition( - base_name="vpc", stack_id=1, tags={"app": "graph"} + stack._blueprint = Mock() + assert not stack.resolve(cfngin_context, mock_provider) + mock_resolve_variables.assert_called_once_with( + stack.variables, cfngin_context, mock_provider ) - stack = Stack(definition=definition, context=self.context) - self.assertEqual(stack.tags, {"environment": "prod", "app": "graph"}) + stack._blueprint.resolve_variables.assert_called_once_with(stack.variables) + def test_set_outputs(self, cfngin_context: MockCfnginContext) -> None: + """Test set_outputs.""" + stack = Stack( + definition=generate_stack_definition(base_name="vpc"), + context=cfngin_context, + ) + assert not stack.outputs + outputs = {"foo": "bar"} + assert not stack.set_outputs(outputs) + assert stack.outputs == outputs + + def test_stack_policy(self, cfngin_context: MockCfnginContext, tmp_path: Path) -> None: + """Test stack_policy.""" + stack_policy_path = tmp_path / "stack_policy.json" + stack_policy_path.write_text("success") + assert ( + Stack( + definition=generate_stack_definition( + base_name="vpc", stack_policy_path=stack_policy_path + ), + context=cfngin_context, + ).stack_policy + == "success" + ) -if __name__ == "__main__": - unittest.main() + def test_stack_policy_not_provided(self, cfngin_context: MockCfnginContext) -> None: + """Test stack_policy.""" + assert not Stack( + definition=generate_stack_definition(base_name="vpc"), + context=cfngin_context, + ).stack_policy + + def test_tags(self, cfngin_context: MockCfnginContext) -> None: + """Test tags.""" + cfngin_context.config.tags = {"environment": "prod"} + assert Stack( + definition=generate_stack_definition( + base_name="vpc", tags={"app": "graph", "environment": "stage"} + ), + context=cfngin_context, + ).tags == {"app": "graph", "environment": "stage"} + + def test_tags_default(self, cfngin_context: MockCfnginContext) -> None: + """Test tags.""" + cfngin_context.config.tags = {"environment": "prod"} + assert Stack( + definition=generate_stack_definition(base_name="vpc"), + context=cfngin_context, + ).tags == {"environment": "prod"} + + @pytest.mark.parametrize( + "termination_protection, expected", + [(False, False), (True, True)], + ) + def test_termination_protection( + self, + cfngin_context: MockCfnginContext, + expected: str, + termination_protection: bool | str, + ) -> None: + """Test termination_protection.""" + assert ( + Stack( + definition=generate_stack_definition( + base_name="vpc", termination_protection=termination_protection + ), + context=cfngin_context, + ).termination_protection + is expected + ) diff --git a/tests/unit/cfngin/test_tokenize_userdata.py b/tests/unit/cfngin/test_tokenize_userdata.py index 125bc83d9..24696074f 100644 --- a/tests/unit/cfngin/test_tokenize_userdata.py +++ b/tests/unit/cfngin/test_tokenize_userdata.py @@ -1,6 +1,5 @@ """Tests for runway.cfngin.tokenize_userdata.""" -# pyright: basic import unittest import yaml @@ -16,8 +15,8 @@ def test_tokenize(self) -> None: user_data = ["field0", 'Ref("SshKey")', "field1", 'Fn::GetAtt("Blah", "Woot")'] user_data_dump = yaml.dump(user_data) parts = cf_tokenize(user_data_dump) - self.assertIsInstance(parts[1], dict) - self.assertIsInstance(parts[3], dict) - self.assertEqual(parts[1]["Ref"], "SshKey") # type: ignore - self.assertEqual(parts[3]["Fn::GetAtt"], ["Blah", "Woot"]) # type: ignore - self.assertEqual(len(parts), 5) + assert isinstance(parts[1], dict) + assert isinstance(parts[3], dict) + assert parts[1]["Ref"] == "SshKey" # type: ignore + assert parts[3]["Fn::GetAtt"] == ["Blah", "Woot"] # type: ignore + assert len(parts) == 5 diff --git a/tests/unit/cfngin/test_utils.py b/tests/unit/cfngin/test_utils.py index ac35b8ff7..89ed3c1ed 100644 --- a/tests/unit/cfngin/test_utils.py +++ b/tests/unit/cfngin/test_utils.py @@ -1,7 +1,5 @@ """Tests for runway.cfngin.utils.""" -# pylint: disable=unused-argument,invalid-name, broad-exception-raised -# pyright: basic from __future__ import annotations import logging @@ -11,10 +9,10 @@ import tempfile import unittest from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, cast +from typing import TYPE_CHECKING, Any, cast +from unittest import mock import boto3 -import mock import pytest from botocore.exceptions import ClientError from botocore.stub import Stubber @@ -41,7 +39,6 @@ from runway.config.models.cfngin import GitCfnginPackageSourceDefinitionModel if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture AWS_REGIONS = [ @@ -60,7 +57,7 @@ MODULE = "runway.cfngin.utils" -def mock_create_cache_directories(self: Any, **kwargs: Any) -> int: +def mock_create_cache_directories(self: Any, **kwargs: Any) -> int: # noqa: ARG001 """Mock create cache directories. Don't actually need the directories created in testing @@ -79,7 +76,7 @@ def test_ensure_s3_bucket() -> None: stubber.assert_no_pending_responses() -def test_ensure_s3_bucket_forbidden(caplog: LogCaptureFixture) -> None: +def test_ensure_s3_bucket_forbidden(caplog: pytest.LogCaptureFixture) -> None: """Test ensure_s3_bucket.""" caplog.set_level(logging.ERROR, logger=MODULE) s3_client = boto3.client("s3") @@ -144,22 +141,20 @@ def test_ensure_s3_bucket_not_found_persist_graph() -> None: stubber.assert_no_pending_responses() -def test_ensure_s3_bucket_persist_graph(caplog: LogCaptureFixture) -> None: +def test_ensure_s3_bucket_persist_graph(caplog: pytest.LogCaptureFixture) -> None: """Test ensure_s3_bucket.""" caplog.set_level(logging.WARNING, logger=MODULE) s3_client = boto3.client("s3") stubber = Stubber(s3_client) stubber.add_response("head_bucket", {}, {"Bucket": "test-bucket"}) - stubber.add_response( - "get_bucket_versioning", {"Status": "Enabled"}, {"Bucket": "test-bucket"} - ) + stubber.add_response("get_bucket_versioning", {"Status": "Enabled"}, {"Bucket": "test-bucket"}) with stubber: assert not ensure_s3_bucket(s3_client, "test-bucket", persist_graph=True) stubber.assert_no_pending_responses() assert not caplog.messages -def test_ensure_s3_bucket_persist_graph_mfa_delete(caplog: LogCaptureFixture) -> None: +def test_ensure_s3_bucket_persist_graph_mfa_delete(caplog: pytest.LogCaptureFixture) -> None: """Test ensure_s3_bucket.""" caplog.set_level(logging.WARNING, logger=MODULE) s3_client = boto3.client("s3") @@ -175,8 +170,7 @@ def test_ensure_s3_bucket_persist_graph_mfa_delete(caplog: LogCaptureFixture) -> stubber.assert_no_pending_responses() assert ( 'MFADelete must be disabled on bucket "test-bucket" when using persistent ' - "graphs to allow for proper management of the graphs" - in "\n".join(caplog.messages) + "graphs to allow for proper management of the graphs" in "\n".join(caplog.messages) ) @@ -184,26 +178,23 @@ def test_ensure_s3_bucket_persist_graph_mfa_delete(caplog: LogCaptureFixture) -> "versioning_response", [{"Status": "Disabled"}, {"Status": "Suspended"}, {}] ) def test_ensure_s3_bucket_persist_graph_versioning_not_enabled( - caplog: LogCaptureFixture, versioning_response: Dict[str, Any] + caplog: pytest.LogCaptureFixture, versioning_response: dict[str, Any] ) -> None: """Test ensure_s3_bucket.""" caplog.set_level(logging.WARNING, logger=MODULE) s3_client = boto3.client("s3") stubber = Stubber(s3_client) stubber.add_response("head_bucket", {}, {"Bucket": "test-bucket"}) - stubber.add_response( - "get_bucket_versioning", versioning_response, {"Bucket": "test-bucket"} - ) + stubber.add_response("get_bucket_versioning", versioning_response, {"Bucket": "test-bucket"}) with stubber: assert not ensure_s3_bucket(s3_client, "test-bucket", persist_graph=True) stubber.assert_no_pending_responses() - assert ( - "it is recommended to enable versioning when using persistent graphs" - in "\n".join(caplog.messages) + assert "it is recommended to enable versioning when using persistent graphs" in "\n".join( + caplog.messages ) -def test_ensure_s3_bucket_raise_client_error(caplog: LogCaptureFixture) -> None: +def test_ensure_s3_bucket_raise_client_error(caplog: pytest.LogCaptureFixture) -> None: """Test ensure_s3_bucket.""" caplog.set_level(logging.ERROR, logger=MODULE) s3_client = boto3.client("s3") @@ -224,13 +215,13 @@ def test_read_value_from_path_abs(tmp_path: Path) -> None: def test_read_value_from_path_dir(tmp_path: Path) -> None: """Test read_value_from_path directory.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 read_value_from_path(f"file://{tmp_path.absolute()}") def test_read_value_from_path_not_exist(tmp_path: Path) -> None: """Test read_value_from_path does not exist.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 read_value_from_path(f"file://{(tmp_path / 'something.txt').absolute()}") @@ -245,10 +236,7 @@ def test_read_value_from_path_root_path_dir(tmp_path: Path) -> None: """Test read_value_from_path root_path is dir.""" test_file = tmp_path / "test.txt" test_file.write_text("success") - assert ( - read_value_from_path(f"file://./{test_file.name}", root_path=tmp_path) - == "success" - ) + assert read_value_from_path(f"file://./{test_file.name}", root_path=tmp_path) == "success" def test_read_value_from_path_root_path_file(tmp_path: Path) -> None: @@ -256,9 +244,7 @@ def test_read_value_from_path_root_path_file(tmp_path: Path) -> None: test_file = tmp_path / "test.txt" test_file.write_text("success") assert ( - read_value_from_path( - f"file://./{test_file.name}", root_path=tmp_path / "something.json" - ) + read_value_from_path(f"file://./{test_file.name}", root_path=tmp_path / "something.json") == "success" ) @@ -283,7 +269,7 @@ def setUp(self) -> None: # Create a tar file using the temporary directory with tarfile.open(self.tmp_path / self.tar_file, "w") as tar: - tar.add(self.tmp_path, arcname=os.path.basename(self.tmp_path)) + tar.add(self.tmp_path, arcname=os.path.basename(self.tmp_path)) # noqa: PTH119 def tearDown(self) -> None: """Tear down test case.""" @@ -293,7 +279,7 @@ def test_cf_safe_name(self) -> None: """Test cf safe name.""" tests = (("abc-def", "AbcDef"), ("GhI", "GhI"), ("jKlm.noP", "JKlmNoP")) for test in tests: - self.assertEqual(cf_safe_name(test[0]), test[1]) + assert cf_safe_name(test[0]) == test[1] def test_camel_to_snake(self) -> None: """Test camel to snake.""" @@ -304,7 +290,7 @@ def test_camel_to_snake(self) -> None: ("testtemplate", "testtemplate"), ) for test in tests: - self.assertEqual(camel_to_snake(test[0]), test[1]) + assert camel_to_snake(test[0]) == test[1] def test_yaml_to_ordered_dict(self) -> None: """Test yaml to ordered dict.""" @@ -316,27 +302,27 @@ def test_yaml_to_ordered_dict(self) -> None: path: foo1.bar1 """ config = yaml_to_ordered_dict(raw_config) - self.assertEqual(list(config["pre_deploy"].keys())[0], "hook2") - self.assertEqual(config["pre_deploy"]["hook2"]["path"], "foo.bar") + assert next(iter(config["pre_deploy"].keys())) == "hook2" + assert config["pre_deploy"]["hook2"]["path"] == "foo.bar" def test_get_client_region(self) -> None: """Test get client region.""" regions = ["us-east-1", "us-west-1", "eu-west-1", "sa-east-1"] for region in regions: client = boto3.client("s3", region_name=region) - self.assertEqual(get_client_region(client), region) + assert get_client_region(client) == region def test_get_s3_endpoint(self) -> None: """Test get s3 endpoint.""" endpoint_url = "https://example.com" client = boto3.client("s3", region_name="us-east-1", endpoint_url=endpoint_url) - self.assertEqual(get_s3_endpoint(client), endpoint_url) + assert get_s3_endpoint(client) == endpoint_url def test_s3_bucket_location_constraint(self) -> None: """Test s3 bucket location constraint.""" tests = (("us-east-1", ""), ("us-west-1", "us-west-1")) for region, result in tests: - self.assertEqual(s3_bucket_location_constraint(region), result) + assert s3_bucket_location_constraint(region) == result def test_parse_cloudformation_template(self) -> None: """Test parse cloudformation template.""" @@ -374,54 +360,52 @@ def test_parse_cloudformation_template(self) -> None: } }, } - self.assertEqual(parse_cloudformation_template(template), parsed_template) + assert parse_cloudformation_template(template) == parsed_template - def test_is_within_directory(self): + def test_is_within_directory(self) -> None: """Test is within directory.""" directory = Path("my_directory") # Assert if the target is within the directory. target = "my_directory/sub_directory/file.txt" - self.assertTrue(is_within_directory(directory, target)) + assert is_within_directory(directory, target) # Assert if the target is NOT within the directory. target = "other_directory/file.txt" - self.assertFalse(is_within_directory(directory, target)) + assert not is_within_directory(directory, target) # Assert if the target is the directory. target = "my_directory" - self.assertTrue(is_within_directory(directory, target)) + assert is_within_directory(directory, target) - def test_safe_tar_extract_all_within(self): + def test_safe_tar_extract_all_within(self) -> None: """Test when all tar file contents are within the specified directory.""" path = self.tmp_path / "my_directory" with tarfile.open(self.tmp_path / self.tar_file, "r") as tar: - self.assertIsNone(safe_tar_extract(tar, path)) + assert safe_tar_extract(tar, path) is None - def test_safe_tar_extract_path_traversal(self): + def test_safe_tar_extract_path_traversal(self) -> None: """Test when a tar file tries to go outside the specified area.""" with tarfile.open(self.tmp_path / self.tar_file, "r") as tar: for member in tar.getmembers(): member.name = f"../{member.name}" path = self.tmp_path / "my_directory" - with self.assertRaises(Exception) as context: + with pytest.raises(Exception) as excinfo: # noqa: PT011 safe_tar_extract(tar, path) - self.assertEqual( - str(context.exception), "Attempted Path Traversal in Tar File" - ) + assert str(excinfo.value) == "Attempted Path Traversal in Tar File" # type: ignore - def test_extractors(self): + def test_extractors(self) -> None: """Test extractors.""" - self.assertEqual(Extractor(Path("test.zip")).archive, Path("test.zip")) - self.assertEqual(TarExtractor().extension, ".tar") - self.assertEqual(TarGzipExtractor().extension, ".tar.gz") - self.assertEqual(ZipExtractor().extension, ".zip") + assert Extractor(Path("test.zip")).archive == Path("test.zip") + assert TarExtractor().extension == ".tar" + assert TarGzipExtractor().extension == ".tar.gz" + assert ZipExtractor().extension == ".zip" for i in [TarExtractor(), ZipExtractor(), ZipExtractor()]: i.set_archive(Path("/tmp/foo")) - self.assertEqual(i.archive.name.endswith(i.extension), True) # type: ignore + assert i.archive.name.endswith(i.extension) is True # type: ignore - def test_SourceProcessor_helpers(self): # noqa: N802 + def test_SourceProcessor_helpers(self) -> None: # noqa: N802 """Test SourceProcessor helpers.""" with mock.patch.object( SourceProcessor, @@ -430,37 +414,34 @@ def test_SourceProcessor_helpers(self): # noqa: N802 ): sp = SourceProcessor(cache_dir=self.tmp_path, sources={}) # type: ignore - self.assertEqual( - sp.sanitize_git_path("git@github.com:foo/bar.git"), - "git_github.com_foo_bar", + assert sp.sanitize_git_path("git@github.com:foo/bar.git") == "git_github.com_foo_bar" + assert ( + sp.sanitize_uri_path("http://example.com/foo/bar.gz@1") + == "http___example.com_foo_bar.gz_1" ) - self.assertEqual( - sp.sanitize_uri_path("http://example.com/foo/bar.gz@1"), - "http___example.com_foo_bar.gz_1", + assert ( + sp.sanitize_git_path("git@github.com:foo/bar.git", "v1") + == "git_github.com_foo_bar-v1" ) - self.assertEqual( - sp.sanitize_git_path("git@github.com:foo/bar.git", "v1"), - "git_github.com_foo_bar-v1", - ) - self.assertEqual( + assert ( sp.determine_git_ls_remote_ref( GitCfnginPackageSourceDefinitionModel(branch="foo", uri="test") - ), - "refs/heads/foo", + ) + == "refs/heads/foo" ) - for i in [cast(Dict[str, Any], {}), {"tag": "foo"}, {"commit": "1234"}]: - self.assertEqual( + for i in [cast(dict[str, Any], {}), {"tag": "foo"}, {"commit": "1234"}]: + assert ( sp.determine_git_ls_remote_ref( GitCfnginPackageSourceDefinitionModel(uri="git@foo", **i) - ), - "HEAD", + ) + == "HEAD" ) - self.assertEqual( + assert ( sp.git_ls_remote( "https://github.com/remind101/stacker.git", "refs/heads/release-1.0" - ), - "857b4834980e582874d70feef77bb064b60762d1", + ) + == "857b4834980e582874d70feef77bb064b60762d1" ) bad_configs = [ @@ -471,33 +452,28 @@ def test_SourceProcessor_helpers(self): # noqa: N802 {"uri": "x", "commit": "1234", "branch": "x"}, ] for i in bad_configs: - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): sp.determine_git_ref(GitCfnginPackageSourceDefinitionModel(**i)) - self.assertEqual( + assert ( sp.determine_git_ref( GitCfnginPackageSourceDefinitionModel( - uri="https://github.com/remind101/stacker.git", - branch="release-1.0", + uri="https://github.com/remind101/stacker.git", branch="release-1.0" ) - ), - "857b4834980e582874d70feef77bb064b60762d1", + ) + == "857b4834980e582874d70feef77bb064b60762d1" ) - self.assertEqual( + assert ( sp.determine_git_ref( - GitCfnginPackageSourceDefinitionModel( - **{"uri": "git@foo", "commit": "1234"} - ) - ), - "1234", + GitCfnginPackageSourceDefinitionModel(uri="git@foo", commit="1234") + ) + == "1234" ) - self.assertEqual( + assert ( sp.determine_git_ref( - GitCfnginPackageSourceDefinitionModel( - **{"uri": "git@foo", "tag": "v1.0.0"} - ) - ), - "v1.0.0", + GitCfnginPackageSourceDefinitionModel(uri="git@foo", tag="v1.0.0") + ) + == "v1.0.0" ) @@ -516,25 +492,19 @@ def setUp(self) -> None: """Run before tests.""" self.counter = 0 - def _works_immediately( - self, a: Any, b: Any, x: Any = None, y: Any = None - ) -> List[Any]: + def _works_immediately(self, a: Any, b: Any, x: Any = None, y: Any = None) -> list[Any]: """Works immediately.""" self.counter += 1 return [a, b, x, y] - def _works_second_attempt( - self, a: Any, b: Any, x: Any = None, y: Any = None - ) -> List[Any]: + def _works_second_attempt(self, a: Any, b: Any, x: Any = None, y: Any = None) -> list[Any]: """Works second_attempt.""" self.counter += 1 if self.counter == 2: return [a, b, x, y] raise Exception("Broke.") - def _second_raises_exception2( - self, a: Any, b: Any, x: Any = None, y: Any = None - ) -> List[Any]: + def _second_raises_exception2(self, a: Any, b: Any, x: Any = None, y: Any = None) -> list[Any]: """Second raises exception2.""" self.counter += 1 if self.counter == 2: @@ -542,8 +512,8 @@ def _second_raises_exception2( raise MockException("Broke.") def _throws_exception2( - self, a: Any, b: Any, x: Any = None, y: Any = None - ) -> List[Any]: + self, a: Any, b: Any, x: Any = None, y: Any = None # noqa: ARG002 + ) -> list[Any]: """Throws exception2.""" self.counter += 1 raise MockException("Broke.") diff --git a/tests/unit/config/components/runway/test_deployment_def.py b/tests/unit/config/components/runway/test__deployment_def.py similarity index 68% rename from tests/unit/config/components/runway/test_deployment_def.py rename to tests/unit/config/components/runway/test__deployment_def.py index da7163d3d..c1b6b5a42 100644 --- a/tests/unit/config/components/runway/test_deployment_def.py +++ b/tests/unit/config/components/runway/test__deployment_def.py @@ -1,8 +1,6 @@ """Test runway.config.components.runway._deployment_dev.""" -# pylint: disable=protected-access -# pyright: basic -from typing import Any, Dict, List +from typing import Any import pytest @@ -72,7 +70,7 @@ class TestRunwayDeploymentDefinition: ), ], ) - def test_menu_entry(self, data: Dict[str, Any], expected: str) -> None: + def test_menu_entry(self, data: dict[str, Any], expected: str) -> None: """Test menu_entry.""" assert RunwayDeploymentDefinition.parse_obj(data).menu_entry == expected @@ -94,8 +92,8 @@ def test_modules_setter(self) -> None: RunwayModuleDefinition.parse_obj({"name": "test-02", "path": "./"}), ] obj.modules = new_modules - assert obj._data.modules[0] == new_modules[0].data - assert obj._data.modules[1] == new_modules[1].data + assert obj._data.modules[0].model_dump() == new_modules[0].data + assert obj._data.modules[1].model_dump() == new_modules[1].data def test_modules_setter_not_list(self) -> None: """Test modules.setter not a list.""" @@ -107,57 +105,47 @@ def test_modules_setter_not_list(self) -> None: with pytest.raises(TypeError): obj.modules = None # type: ignore with pytest.raises(TypeError): - obj.modules = [ # type: ignore - RunwayDeploymentDefinitionModel( - modules=[], name="test-01", regions=["us-east-1"] - ) + obj.modules = [ + RunwayDeploymentDefinitionModel(modules=[], name="test-01", regions=["us-east-1"]) # type: ignore ] def test_models_setter_invalid_list_item(self) -> None: """Test modules.setter when list item is now supported.""" + obj = RunwayDeploymentDefinition.parse_obj({"regions": ["us-east-1"]}) with pytest.raises(TypeError): - obj = RunwayDeploymentDefinition.parse_obj({"regions": ["us-east-1"]}) obj.modules = [RunwayModuleDefinitionModel(path="./"), "invalid"] # type: ignore def test_parse_obj(self) -> None: """Test parse_obj.""" - data: Dict[str, Any] = {"name": "test", "modules": [], "regions": ["us-east-1"]} + data: dict[str, Any] = {"name": "test", "modules": [], "regions": ["us-east-1"]} obj = RunwayDeploymentDefinition.parse_obj(data) - assert obj._data.dict(exclude_unset=True) == data + assert obj._data.model_dump(exclude_unset=True) == data def test_parse_obj_list(self) -> None: """Test parse_obj list.""" - data: List[Dict[str, Any]] = [ - {"name": "test", "modules": [], "regions": ["us-east-1"]} - ] + data: list[dict[str, Any]] = [{"name": "test", "modules": [], "regions": ["us-east-1"]}] result = RunwayDeploymentDefinition.parse_obj(data) assert isinstance(result, list) assert len(result) == 1 - # for some reason, the current version of pylint does not see this as list - # pylint: disable=unsubscriptable-object - assert result[0]._data.dict(exclude_unset=True) == data[0] + assert result[0]._data.model_dump(exclude_unset=True) == data[0] def test_register_variable(self) -> None: """Test _register_variable.""" - obj = RunwayDeploymentDefinition.parse_obj( - {"name": "test", "regions": ["us-east-1"]} - ) + obj = RunwayDeploymentDefinition.parse_obj({"name": "test", "regions": ["us-east-1"]}) assert obj._vars["regions"].name == "test.regions" def test_reverse(self) -> None: """Test reverse.""" - data: RunwayDeploymentDefinitionModel = ( - RunwayDeploymentDefinitionModel.parse_obj( - { - "name": "test", - "modules": [ - {"name": "test-01", "path": "./"}, - {"name": "test-02", "path": "./"}, - ], - "regions": ["us-east-1", "us-west-2"], - } - ) + data: RunwayDeploymentDefinitionModel = RunwayDeploymentDefinitionModel.model_validate( + { + "name": "test", + "modules": [ + {"name": "test-01", "path": "./"}, + {"name": "test-02", "path": "./"}, + ], + "regions": ["us-east-1", "us-west-2"], + } ) obj = RunwayDeploymentDefinition(data) assert not obj.reverse() @@ -168,40 +156,36 @@ def test_reverse(self) -> None: def test_reverse_parallel_modules(self) -> None: """Test reverse parallel modules.""" - data: RunwayDeploymentDefinitionModel = ( - RunwayDeploymentDefinitionModel.parse_obj( - { - "name": "test", - "modules": [ - { - "parallel": [ - {"name": "test-01", "path": "./"}, - {"name": "test-02", "path": "./"}, - ] - }, - ], - "regions": ["us-east-1", "us-west-2"], - } - ) + data: RunwayDeploymentDefinitionModel = RunwayDeploymentDefinitionModel.model_validate( + { + "name": "test", + "modules": [ + { + "parallel": [ + {"name": "test-01", "path": "./"}, + {"name": "test-02", "path": "./"}, + ] + }, + ], + "regions": ["us-east-1", "us-west-2"], + } ) obj = RunwayDeploymentDefinition(data) assert not obj.reverse() assert obj._data.modules != data.modules - invert_data: RunwayDeploymentDefinitionModel = data.copy(deep=True) + invert_data: RunwayDeploymentDefinitionModel = data.model_copy(deep=True) for mod in invert_data.modules: mod.parallel.reverse() assert obj._data.modules == invert_data.modules def test_reverse_parallel_regions(self) -> None: """Test reverse parallel regions.""" - data: RunwayDeploymentDefinitionModel = ( - RunwayDeploymentDefinitionModel.parse_obj( - { - "name": "test", - "modules": [{"name": "test-01", "path": "./"}], - "parallel_regions": ["us-east-1", "us-west-2"], - } - ) + data: RunwayDeploymentDefinitionModel = RunwayDeploymentDefinitionModel.model_validate( + { + "name": "test", + "modules": [{"name": "test-01", "path": "./"}], + "parallel_regions": ["us-east-1", "us-west-2"], + } ) obj = RunwayDeploymentDefinition(data) assert not obj.reverse() diff --git a/tests/unit/config/components/runway/test_module_def.py b/tests/unit/config/components/runway/test__module_def.py similarity index 90% rename from tests/unit/config/components/runway/test_module_def.py rename to tests/unit/config/components/runway/test__module_def.py index 4c2656b2a..4f47fadc8 100644 --- a/tests/unit/config/components/runway/test_module_def.py +++ b/tests/unit/config/components/runway/test__module_def.py @@ -1,9 +1,7 @@ """Test runway.config.components.runway._module_def.""" -# pylint: disable=protected-access -# pyright: basic from pathlib import Path -from typing import Any, Dict +from typing import Any import pytest @@ -33,7 +31,7 @@ def test_child_modules_setter(self) -> None: ] obj.child_modules = new_modules assert obj._data.parallel[0] == new_modules[0] - assert obj._data.parallel[1] == new_modules[1].data # type: ignore + assert obj._data.parallel[1].model_dump() == new_modules[1].data # type: ignore def test_child_modules_setter_not_list(self) -> None: """Test child_modules.setter not a list.""" @@ -47,11 +45,11 @@ def test_child_modules_setter_not_list(self) -> None: def test_child_modules_setter_invalid_list_item(self) -> None: """Test child_modules.setter when list item is now supported.""" + obj = RunwayModuleDefinition.parse_obj({"path": "./"}) with pytest.raises(TypeError): - obj = RunwayModuleDefinition.parse_obj({"path": "./"}) - obj.child_modules = [ # type: ignore + obj.child_modules = [ RunwayModuleDefinitionModel(path="./"), - "invalid", + "invalid", # type: ignore ] @pytest.mark.parametrize( @@ -81,7 +79,7 @@ def test_child_modules_setter_invalid_list_item(self) -> None: ), ], ) - def test_is_parent(self, data: Dict[str, Any], expected: bool) -> None: + def test_is_parent(self, data: dict[str, Any], expected: bool) -> None: """Test is_parent.""" assert RunwayModuleDefinition.parse_obj(data).is_parent is expected @@ -115,7 +113,7 @@ def test_is_parent(self, data: Dict[str, Any], expected: bool) -> None: ), ], ) - def test_menu_entry(self, data: Dict[str, Any], expected: str) -> None: + def test_menu_entry(self, data: dict[str, Any], expected: str) -> None: """Test menu entry.""" assert RunwayModuleDefinition.parse_obj(data).menu_entry == expected @@ -123,7 +121,7 @@ def test_parse_obj(self) -> None: """Test parse_obj.""" data = {"name": Path.cwd().name, "path": "./"} obj = RunwayModuleDefinition.parse_obj(data) - assert obj._data.dict(exclude_unset=True) == data + assert obj._data.model_dump(exclude_unset=True) == data def test_register_variable(self) -> None: """Test _register_variable.""" @@ -132,7 +130,7 @@ def test_register_variable(self) -> None: def test_reverse(self) -> None: """Test reverse.""" - data: RunwayModuleDefinitionModel = RunwayModuleDefinitionModel.parse_obj( + data: RunwayModuleDefinitionModel = RunwayModuleDefinitionModel.model_validate( { "name": "parallel_parent", "parallel": [ diff --git a/tests/unit/config/components/runway/test__test_def.py b/tests/unit/config/components/runway/test__test_def.py new file mode 100644 index 000000000..41eb172ee --- /dev/null +++ b/tests/unit/config/components/runway/test__test_def.py @@ -0,0 +1,27 @@ +"""Test runway.config.components.runway._test_def.""" + +import pytest +from pydantic import ValidationError + +from runway.config.components.runway import RunwayTestDefinition + + +class TestRunwayTestDefinition: + """Test runway.config.components.runway._test_def.RunwayTestDefinition.""" + + def test_new_invalid(self) -> None: + """Test new invalid type.""" + with pytest.raises(ValidationError, match="Input should be a valid dictionary or instance"): + RunwayTestDefinition.parse_obj([]) + + def test_parse_obj_invalid(self) -> None: + """Test parse_obj invalid object.""" + with pytest.raises(ValidationError): + RunwayTestDefinition.parse_obj({"type": "invalid"}) + + def test_register_variable(self) -> None: + """Test _register_variable.""" + obj = RunwayTestDefinition.parse_obj( + {"type": "script", "name": "test_register_variable", "required": True} + ) + assert obj._vars["required"].name == "test_register_variable.required" diff --git a/tests/unit/config/components/runway/test_variables_def.py b/tests/unit/config/components/runway/test__variables_def.py similarity index 81% rename from tests/unit/config/components/runway/test_variables_def.py rename to tests/unit/config/components/runway/test__variables_def.py index be6701f2f..b51b7d9d8 100644 --- a/tests/unit/config/components/runway/test_variables_def.py +++ b/tests/unit/config/components/runway/test__variables_def.py @@ -1,6 +1,5 @@ """Test runway.config.components.runway._variables_def.""" -# pyright: basic from pathlib import Path import pytest @@ -17,18 +16,13 @@ def test_init_no_file(self, cd_tmp_path: Path) -> None: """Test init with no file.""" assert not RunwayVariablesDefinition.parse_obj({"sys_path": cd_tmp_path}) - @pytest.mark.parametrize( - "filename", ("runway.variables.yml", "runway.variables.yaml") - ) + @pytest.mark.parametrize("filename", ["runway.variables.yml", "runway.variables.yaml"]) def test_init_autofind_file(self, cd_tmp_path: Path, filename: str) -> None: """Test init autofind file.""" data = {"key": "val"} (cd_tmp_path / filename).write_text(yaml.dump(data)) (cd_tmp_path / "runway.yml").touch() - assert ( - RunwayVariablesDefinition.parse_obj({"sys_path": cd_tmp_path})["key"] - == "val" - ) + assert RunwayVariablesDefinition.parse_obj({"sys_path": cd_tmp_path})["key"] == "val" def test_init_defined_file_path(self, cd_tmp_path: Path) -> None: """Test init with file_path.""" @@ -36,10 +30,7 @@ def test_init_defined_file_path(self, cd_tmp_path: Path) -> None: file_path = cd_tmp_path / "anything.yml" file_path.write_text(yaml.dump(data)) (cd_tmp_path / "runway.yml").touch() - assert ( - RunwayVariablesDefinition.parse_obj({"file_path": file_path})["key"] - == "val" - ) + assert RunwayVariablesDefinition.parse_obj({"file_path": file_path})["key"] == "val" def test_init_defined_file_path_no_found(self, cd_tmp_path: Path) -> None: """Test init with file_path not found.""" diff --git a/tests/unit/config/components/runway/test_base.py b/tests/unit/config/components/runway/test_base.py index 92054cf22..c06e580f7 100644 --- a/tests/unit/config/components/runway/test_base.py +++ b/tests/unit/config/components/runway/test_base.py @@ -1,14 +1,12 @@ """Test runway.config.components.runway.base.""" -# pylint: disable=protected-access -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING, Any +from unittest.mock import MagicMock, call import pytest -from mock import MagicMock, call -from pydantic import Extra +from pydantic import ConfigDict from runway.config.components.runway import RunwayVariablesDefinition from runway.config.components.runway.base import ConfigComponentDefinition @@ -16,7 +14,6 @@ from runway.exceptions import UnresolvedVariable if TYPE_CHECKING: - from pytest import MonkeyPatch from ....factories import MockRunwayContext @@ -24,17 +21,12 @@ class SampleConfigProperty(ConfigProperty): """Data class for SampleConfigComponentDefinition.""" + model_config = ConfigDict(extra="allow", validate_assignment=True, validate_default=True) + name: str = "test" var_attr: Any = None var_attr_pre: Any = None - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.allow - validate_all = False - validate_assignment = False - class SampleConfigComponentDefinition(ConfigComponentDefinition): """Subclass used to test a parent class.""" @@ -74,15 +66,13 @@ def parse_obj(cls, obj: Any) -> SampleConfigComponentDefinition: obj: The object to parse. """ - return cls(SampleConfigProperty.parse_obj(obj)) + return cls(SampleConfigProperty.model_validate(obj)) class TestConfigComponentDefinition: """Test runway.config.components.runway.base.ConfigComponentDefinition.""" - VARIABLES = RunwayVariablesDefinition.parse_obj( - {"key": "val", "test": {"key": "test-val"}} - ) + VARIABLES = RunwayVariablesDefinition.parse_obj({"key": "val", "test": {"key": "test-val"}}) def test_contains(self) -> None: """Test __contains__.""" @@ -97,8 +87,9 @@ def test_default(self) -> None: data = SampleConfigProperty() obj = SampleConfigComponentDefinition(data) assert obj._data == data - assert obj.data == data.dict() - assert not obj._vars and isinstance(obj._vars, dict) + assert obj.data == data.model_dump() + assert not obj._vars + assert isinstance(obj._vars, dict) def test_get(self) -> None: """Test get.""" @@ -113,9 +104,7 @@ def test_getattr(self, runway_context: MockRunwayContext) -> None: var_attr="${var ${env DEPLOY_ENVIRONMENT}.key}", var_attr_pre="${var key}" ) obj = SampleConfigComponentDefinition(data) - assert not obj.resolve( - runway_context, pre_process=True, variables=self.VARIABLES - ) + assert not obj.resolve(runway_context, pre_process=True, variables=self.VARIABLES) assert obj.var_attr_pre == self.VARIABLES["key"] with pytest.raises(UnresolvedVariable): @@ -123,12 +112,10 @@ def test_getattr(self, runway_context: MockRunwayContext) -> None: with pytest.raises(AttributeError): assert not obj.missing - def test_getitem(self, monkeypatch: MonkeyPatch) -> None: + def test_getitem(self, monkeypatch: pytest.MonkeyPatch) -> None: """Test __getitem__.""" mock_getattr = MagicMock(side_effect=["val", AttributeError]) - monkeypatch.setattr( - SampleConfigComponentDefinition, "__getattr__", mock_getattr - ) + monkeypatch.setattr(SampleConfigComponentDefinition, "__getattr__", mock_getattr) obj = SampleConfigComponentDefinition.parse_obj({}) assert obj["key"] == "val" @@ -175,9 +162,7 @@ def test_resolve_pre_process(self, runway_context: MockRunwayContext) -> None: var_attr="${var ${env DEPLOY_ENVIRONMENT}.key}", var_attr_pre="${var key}" ) obj = SampleConfigComponentDefinition(data) - assert not obj.resolve( - runway_context, pre_process=True, variables=self.VARIABLES - ) + assert not obj.resolve(runway_context, pre_process=True, variables=self.VARIABLES) assert not obj._vars["var_attr"].resolved with pytest.raises(UnresolvedVariable): @@ -191,7 +176,7 @@ def test_setattr(self) -> None: """Test __setattr__.""" obj = SampleConfigComponentDefinition.parse_obj({}) assert not obj._data.get("key") - obj.key = "val" # pylint: disable=attribute-defined-outside-init + obj.key = "val" assert obj._data["key"] == "val" assert obj.key == "val" @@ -207,7 +192,7 @@ def test_setattr_property(self) -> None: def test_setattr_underscore(self) -> None: """Test __setattr__ underscore.""" obj = SampleConfigComponentDefinition.parse_obj({}) - obj._key = "_val" # pylint: disable=attribute-defined-outside-init + obj._key = "_val" assert "_key" not in obj._data assert obj._key == "_val" diff --git a/tests/unit/config/components/runway/test_test_def.py b/tests/unit/config/components/runway/test_test_def.py deleted file mode 100644 index 3cad04376..000000000 --- a/tests/unit/config/components/runway/test_test_def.py +++ /dev/null @@ -1,112 +0,0 @@ -"""Test runway.config.components.runway._test_def.""" - -# pylint: disable=protected-access -# pyright: basic -import pytest -from pydantic import ValidationError - -from runway.config.components.runway import ( - CfnLintRunwayTestDefinition, - RunwayTestDefinition, - ScriptRunwayTestDefinition, - YamlLintRunwayTestDefinition, -) -from runway.config.models.runway import ( - CfnLintRunwayTestDefinitionModel, - ScriptRunwayTestDefinitionModel, - YamlLintRunwayTestDefinitionModel, -) - - -class TestCfnLintRunwayTestDefinition: - """Test runway.config.components.runway._test_def.CfnLintRunwayTestDefinition.""" - - def test_parse_obj(self) -> None: - """Test parse_obj.""" - assert isinstance( - CfnLintRunwayTestDefinition.parse_obj({}), CfnLintRunwayTestDefinition - ) - - -class TestRunwayTestDefinition: - """Test runway.config.components.runway._test_def.RunwayTestDefinition.""" - - def test_new_cfn_lint(self) -> None: - """Test creation CfnLintRunwayTestDefinition.""" - assert isinstance( - RunwayTestDefinition(CfnLintRunwayTestDefinitionModel()), - CfnLintRunwayTestDefinition, - ) - - def test_new_invalid(self) -> None: - """Test new invalid type.""" - with pytest.raises(TypeError) as excinfo: - RunwayTestDefinition({}) # type: ignore - assert str(excinfo.value).startswith("expected data of type") - - def test_new_script(self) -> None: - """Test creation ScriptRunwayTestDefinition.""" - assert isinstance( - RunwayTestDefinition(ScriptRunwayTestDefinitionModel()), - ScriptRunwayTestDefinition, - ) - - def test_new_yamllint(self) -> None: - """Test creation ScriptRunwayTestDefinition.""" - assert isinstance( - RunwayTestDefinition(YamlLintRunwayTestDefinitionModel()), - YamlLintRunwayTestDefinition, - ) - - def test_parse_obj_cfn_lint(self) -> None: - """Test parse_obj CfnLintRunwayTestDefinition.""" - assert isinstance( - RunwayTestDefinition.parse_obj({"type": "cfn-lint"}), - CfnLintRunwayTestDefinition, - ) - - def test_parse_obj_invalid(self) -> None: - """Test parse_obj invalid object.""" - with pytest.raises(ValidationError): - RunwayTestDefinition.parse_obj({"type": "invalid"}) - - def test_parse_obj_script(self) -> None: - """Test parse_obj ScriptRunwayTestDefinition.""" - assert isinstance( - RunwayTestDefinition.parse_obj({"type": "script"}), - ScriptRunwayTestDefinition, - ) - - def test_parse_obj_yamllint(self) -> None: - """Test parse_obj YamlLintRunwayTestDefinition.""" - assert isinstance( - RunwayTestDefinition.parse_obj({"type": "yamllint"}), - YamlLintRunwayTestDefinition, - ) - - def test_register_variable(self) -> None: - """Test _register_variable.""" - obj = RunwayTestDefinition.parse_obj( - {"type": "script", "name": "test_register_variable", "required": True} - ) - assert obj._vars["required"].name == "test_register_variable.required" - - -class TestScriptRunwayTestDefinition: - """Test runway.config.components.runway._test_def.ScriptRunwayTestDefinition.""" - - def test_parse_obj(self) -> None: - """Test parse_obj.""" - assert isinstance( - ScriptRunwayTestDefinition.parse_obj({}), ScriptRunwayTestDefinition - ) - - -class TestYamlLintRunwayTestDefinition: - """Test runway.config.components.runway._test_def.YamlLintRunwayTestDefinition.""" - - def test_parse_obj(self) -> None: - """Test parse_obj.""" - assert isinstance( - YamlLintRunwayTestDefinition.parse_obj({}), YamlLintRunwayTestDefinition - ) diff --git a/tests/unit/config/models/cfngin/test_cfngin.py b/tests/unit/config/models/cfngin/test___init__.py similarity index 68% rename from tests/unit/config/models/cfngin/test_cfngin.py rename to tests/unit/config/models/cfngin/test___init__.py index 82e8248ff..d951fc9ad 100644 --- a/tests/unit/config/models/cfngin/test_cfngin.py +++ b/tests/unit/config/models/cfngin/test___init__.py @@ -1,6 +1,5 @@ """Test runway.config.models.cfngin.__init__.""" -# pyright: basic import platform from pathlib import Path @@ -19,23 +18,21 @@ class TestCfnginConfigDefinitionModel: """Test runway.config.models.cfngin.CfnginConfigDefinitionModel.""" - @pytest.mark.parametrize( - "field", ["post_deploy", "post_destroy", "pre_deploy", "pre_destroy"] - ) + @pytest.mark.parametrize("field", ["post_deploy", "post_destroy", "pre_deploy", "pre_destroy"]) def test_convert_hook_definitions(self, field: str) -> None: """Test _convert_hook_definitions.""" dict_hook = {"name": {"path": "something"}} list_hook = [{"path": "something"}] assert ( - CfnginConfigDefinitionModel.parse_obj( + CfnginConfigDefinitionModel.model_validate( {"namespace": "test", field: dict_hook} - ).dict(exclude_unset=True)[field] + ).model_dump(exclude_unset=True)[field] == list_hook ) assert ( - CfnginConfigDefinitionModel.parse_obj( + CfnginConfigDefinitionModel.model_validate( {"namespace": "test", field: list_hook} - ).dict(exclude_unset=True)[field] + ).model_dump(exclude_unset=True)[field] == list_hook ) @@ -47,14 +44,14 @@ def test_convert_stack_definitions(self) -> None: CfnginConfigDefinitionModel( namespace="test", stacks=dict_stack, # type: ignore - ).dict(exclude_unset=True)["stacks"] + ).model_dump(exclude_unset=True)["stacks"] == list_stack ) assert ( CfnginConfigDefinitionModel( namespace="test", stacks=list_stack, # type: ignore - ).dict(exclude_unset=True)["stacks"] + ).model_dump(exclude_unset=True)["stacks"] == list_stack ) @@ -110,17 +107,15 @@ def test_resolve_path_fields(self) -> None: cfngin_cache_dir="./cache", # type: ignore sys_path="./something", # type: ignore ) - assert obj.cfngin_cache_dir and obj.cfngin_cache_dir.is_absolute() - assert obj.sys_path and obj.sys_path.is_absolute() + assert obj.cfngin_cache_dir + assert obj.cfngin_cache_dir.is_absolute() + assert obj.sys_path + assert obj.sys_path.is_absolute() def test_required_fields(self) -> None: """Test required fields.""" - with pytest.raises(ValidationError) as excinfo: - CfnginConfigDefinitionModel.parse_obj({}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("namespace",) - assert errors[0]["msg"] == "field required" + with pytest.raises(ValidationError, match="namespace\n Field required"): + CfnginConfigDefinitionModel.model_validate({}) def test_validate_unique_stack_names(self) -> None: """Test _validate_unique_stack_names.""" @@ -131,23 +126,22 @@ def test_validate_unique_stack_names(self) -> None: {"name": "stack1", "class_path": "stack1"}, ], } - assert CfnginConfigDefinitionModel.parse_obj(data) + assert CfnginConfigDefinitionModel.model_validate(data) def test_validate_unique_stack_names_invalid(self) -> None: """Test _validate_unique_stack_names.""" - with pytest.raises(ValidationError) as excinfo: - data = { - "namespace": "test", - "stacks": [ - {"name": "stack0", "class_path": "stack0"}, - {"name": "stack0", "class_path": "stack0"}, - ], - } - CfnginConfigDefinitionModel.parse_obj(data) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("stacks",) - assert errors[0]["msg"] == "Duplicate stack stack0 found at index 0" + with pytest.raises( + ValidationError, match="stacks\n Value error, Duplicate stack stack0 found at index 0" + ): + CfnginConfigDefinitionModel.model_validate( + { + "namespace": "test", + "stacks": [ + {"name": "stack0", "class_path": "stack0"}, + {"name": "stack0", "class_path": "stack0"}, + ], + } + ) class TestCfnginHookDefinitionModel: @@ -155,15 +149,11 @@ class TestCfnginHookDefinitionModel: def test_extra(self) -> None: """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises(ValidationError, match="invalid\n Extra inputs are not permitted"): CfnginHookDefinitionModel( invalid="something", # type: ignore path="something", ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" def test_field_defaults(self) -> None: """Test field default values.""" @@ -176,12 +166,8 @@ def test_field_defaults(self) -> None: def test_required_fields(self) -> None: """Test required fields.""" - with pytest.raises(ValidationError) as excinfo: - CfnginHookDefinitionModel.parse_obj({}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("path",) - assert errors[0]["msg"] == "field required" + with pytest.raises(ValidationError, match="path\n Field required"): + CfnginHookDefinitionModel.model_validate({}) class TestCfnginStackDefinitionModel: @@ -189,16 +175,12 @@ class TestCfnginStackDefinitionModel: def test_extra(self) -> None: """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises(ValidationError, match="invalid\n Extra inputs are not permitted"): CfnginStackDefinitionModel( class_path="something", invalid="something", # type: ignore name="stack-name", ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" def test_field_defaults(self) -> None: """Test field default values.""" @@ -222,12 +204,10 @@ def test_field_defaults(self) -> None: def test_required_fields(self) -> None: """Test required fields.""" - with pytest.raises(ValidationError) as excinfo: - CfnginStackDefinitionModel.parse_obj({}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("__root__",) - assert errors[0]["msg"] == "either class_path or template_path must be defined" + with pytest.raises( + ValidationError, match="Value error, either class_path or template_path must be defined" + ): + CfnginStackDefinitionModel.model_validate({}) @pytest.mark.skipif( platform.system() == "Windows", @@ -246,31 +226,22 @@ def test_resolve_path_fields(self) -> None: def test_required_fields_w_class_path(self) -> None: """Test required fields.""" - with pytest.raises(ValidationError) as excinfo: - CfnginStackDefinitionModel.parse_obj({"class_path": "something"}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("name",) - assert errors[0]["msg"] == "field required" + with pytest.raises(ValidationError, match="name\n Field required"): + CfnginStackDefinitionModel.model_validate({"class_path": "something"}) def test_validate_class_and_template(self) -> None: """Test _validate_class_and_template.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises( + ValidationError, + match="Value error, only one of class_path or template_path can be defined", + ): CfnginStackDefinitionModel( class_path="something", name="stack-name", template_path="./something.yml", # type: ignore ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("__root__",) - assert ( - errors[0]["msg"] == "only one of class_path or template_path can be defined" - ) - @pytest.mark.parametrize( - "enabled, locked", [(True, True), (False, True), (False, False)] - ) + @pytest.mark.parametrize("enabled, locked", [(True, True), (False, True), (False, False)]) def test_validate_class_or_template(self, enabled: bool, locked: bool) -> None: """Test _validate_class_or_template.""" assert CfnginStackDefinitionModel( @@ -285,9 +256,7 @@ def test_validate_class_or_template(self, enabled: bool, locked: bool) -> None: def test_validate_class_or_template_invalid(self) -> None: """Test _validate_class_or_template invalid.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises( + ValidationError, match="Value error, either class_path or template_path must be defined" + ): CfnginStackDefinitionModel(enabled=True, locked=False, name="stack-name") - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("__root__",) - assert errors[0]["msg"] == "either class_path or template_path must be defined" diff --git a/tests/unit/config/models/cfngin/test_package_sources.py b/tests/unit/config/models/cfngin/test__package_sources.py similarity index 64% rename from tests/unit/config/models/cfngin/test_package_sources.py rename to tests/unit/config/models/cfngin/test__package_sources.py index 21fc778de..cd191d1b0 100644 --- a/tests/unit/config/models/cfngin/test_package_sources.py +++ b/tests/unit/config/models/cfngin/test__package_sources.py @@ -1,8 +1,5 @@ """Test runway.config.models.cfngin._package_sources.""" -# pyright: basic -from typing import Dict, List - import pytest from pydantic import ValidationError @@ -19,12 +16,8 @@ class TestCfnginPackageSourcesDefinitionModel: def test_extra(self) -> None: """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: - CfnginPackageSourcesDefinitionModel.parse_obj({"invalid": "val"}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" + with pytest.raises(ValidationError, match="invalid\n Extra inputs are not permitted"): + CfnginPackageSourcesDefinitionModel.model_validate({"invalid": "val"}) def test_field_defaults(self) -> None: """Test field default values.""" @@ -41,7 +34,7 @@ def test_fields(self) -> None: "s3": [{"bucket": "bucket", "key": "something"}], } obj: CfnginPackageSourcesDefinitionModel = ( - CfnginPackageSourcesDefinitionModel.parse_obj(data) + CfnginPackageSourcesDefinitionModel.model_validate(data) ) assert isinstance(obj.git[0], GitCfnginPackageSourceDefinitionModel) assert isinstance(obj.local[0], LocalCfnginPackageSourceDefinitionModel) @@ -53,15 +46,11 @@ class TestGitCfnginPackageSourceDefinitionModel: def test_extra(self) -> None: """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises(ValidationError, match="invalid\n Extra inputs are not permitted"): GitCfnginPackageSourceDefinitionModel( invalid="something", # type: ignore uri="something", ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" def test_field_defaults(self) -> None: """Test field default values.""" @@ -75,12 +64,8 @@ def test_field_defaults(self) -> None: def test_required_fields(self) -> None: """Test required fields.""" - with pytest.raises(ValidationError) as excinfo: - GitCfnginPackageSourceDefinitionModel() # type: ignore - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("uri",) - assert errors[0]["msg"] == "field required" + with pytest.raises(ValidationError, match="uri\n Field required"): + GitCfnginPackageSourceDefinitionModel.model_validate({}) @pytest.mark.parametrize( "ref", @@ -90,12 +75,11 @@ def test_required_fields(self) -> None: {"field": "tag", "value": "v1.0.0"}, ], ) - def test_validate_one_ref(self, ref: Dict[str, str]) -> None: + def test_validate_one_ref(self, ref: dict[str, str]) -> None: """Test _validate_one_ref.""" data = {"uri": "something", ref["field"]: ref["value"]} assert ( - GitCfnginPackageSourceDefinitionModel.parse_obj(data)[ref["field"]] - == ref["value"] + GitCfnginPackageSourceDefinitionModel.model_validate(data)[ref["field"]] == ref["value"] ) @pytest.mark.parametrize( @@ -120,31 +104,26 @@ def test_validate_one_ref(self, ref: Dict[str, str]) -> None: ], ], ) - def test_validate_one_ref_invalid(self, refs: List[Dict[str, str]]) -> None: + def test_validate_one_ref_invalid(self, refs: list[dict[str, str]]) -> None: """Test _validate_one_ref invalid values.""" data = {"uri": "something", **{ref["field"]: ref["value"] for ref in refs}} - with pytest.raises(ValidationError) as excinfo: - GitCfnginPackageSourceDefinitionModel.parse_obj(data) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("__root__",) - assert errors[0]["msg"].startswith("only one of") + with pytest.raises( + ValidationError, + match=r"1 validation error.*\n Value error, only one of \['branch', 'commit', 'tag'\] can be defined", + ): + GitCfnginPackageSourceDefinitionModel.model_validate(data) class TestLocalCfnginPackageSourceDefinitionModel: - """Test runway.config.models.cfngin._package_sources.LocalCfnginPackageSourceDefinitionModel.""" # noqa + """Test runway.config.models.cfngin._package_sources.LocalCfnginPackageSourceDefinitionModel.""" def test_extra(self) -> None: """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises(ValidationError, match="invalid\n Extra inputs are not permitted"): LocalCfnginPackageSourceDefinitionModel( invalid="something", # type: ignore source="something", ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" def test_field_defaults(self) -> None: """Test field default values.""" @@ -155,12 +134,8 @@ def test_field_defaults(self) -> None: def test_required_fields(self) -> None: """Test required fields.""" - with pytest.raises(ValidationError) as excinfo: - LocalCfnginPackageSourceDefinitionModel() # type: ignore - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("source",) - assert errors[0]["msg"] == "field required" + with pytest.raises(ValidationError, match="source\n Field required"): + LocalCfnginPackageSourceDefinitionModel.model_validate({}) class TestS3CfnginPackageSourceDefinitionModel: @@ -168,27 +143,20 @@ class TestS3CfnginPackageSourceDefinitionModel: def test_extra(self) -> None: """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: + with pytest.raises(ValidationError, match="invalid\n Extra inputs are not permitted"): S3CfnginPackageSourceDefinitionModel( bucket="something", key="something", invalid="something", # type: ignore ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" def test_required_fields(self) -> None: """Test required fields.""" - with pytest.raises(ValidationError) as excinfo: - S3CfnginPackageSourceDefinitionModel() # type: ignore - errors = excinfo.value.errors() - assert len(errors) == 2 - assert errors[0]["loc"] == ("bucket",) - assert errors[0]["msg"] == "field required" - assert errors[1]["loc"] == ("key",) - assert errors[1]["msg"] == "field required" + with pytest.raises( + ValidationError, + match="2 validation errors.*\nbucket\n Field required.*\n.*\nkey\n Field required", + ): + S3CfnginPackageSourceDefinitionModel.model_validate({}) def test_field_defaults(self) -> None: """Test field default values.""" diff --git a/tests/unit/config/models/runway/options/test_cdk.py b/tests/unit/config/models/runway/options/test__cdk.py similarity index 64% rename from tests/unit/config/models/runway/options/test_cdk.py rename to tests/unit/config/models/runway/options/test__cdk.py index a2d9eba81..56efb0c11 100644 --- a/tests/unit/config/models/runway/options/test_cdk.py +++ b/tests/unit/config/models/runway/options/test__cdk.py @@ -1,6 +1,5 @@ """Test runway.config.models.runway.options.cdk.""" -# pyright: basic from runway.config.models.runway.options.cdk import RunwayCdkModuleOptionsDataModel @@ -10,18 +9,17 @@ class TestRunwayCdkModuleOptionsDataModel: def test_init_default(self) -> None: """Test init default.""" obj = RunwayCdkModuleOptionsDataModel() - assert not obj.build_steps and isinstance(obj.build_steps, list) + assert not obj.build_steps + assert isinstance(obj.build_steps, list) assert not obj.skip_npm_ci def test_init_extra(self) -> None: """Test init extra.""" - obj = RunwayCdkModuleOptionsDataModel.parse_obj({"invalid": "val"}) - assert "invalid" not in obj.dict() + obj = RunwayCdkModuleOptionsDataModel.model_validate({"invalid": "val"}) + assert "invalid" not in obj.model_dump() def test_init(self) -> None: """Test init.""" - obj = RunwayCdkModuleOptionsDataModel( - build_steps=["test0", "test1"], skip_npm_ci=True - ) + obj = RunwayCdkModuleOptionsDataModel(build_steps=["test0", "test1"], skip_npm_ci=True) assert obj.build_steps == ["test0", "test1"] assert obj.skip_npm_ci diff --git a/tests/unit/config/models/runway/options/test_k8s.py b/tests/unit/config/models/runway/options/test__k8s.py similarity index 75% rename from tests/unit/config/models/runway/options/test_k8s.py rename to tests/unit/config/models/runway/options/test__k8s.py index 349d025b1..8365621c8 100644 --- a/tests/unit/config/models/runway/options/test_k8s.py +++ b/tests/unit/config/models/runway/options/test__k8s.py @@ -1,6 +1,5 @@ """Test runway.config.models.runway.options.k8s.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING @@ -22,13 +21,11 @@ def test_init_default(self) -> None: def test_init_extra(self) -> None: """Test init extra.""" - obj = RunwayK8sModuleOptionsDataModel.parse_obj({"invalid": "val"}) - assert "invalid" not in obj.dict() + obj = RunwayK8sModuleOptionsDataModel.model_validate({"invalid": "val"}) + assert "invalid" not in obj.model_dump() def test_init(self, tmp_path: Path) -> None: """Test init.""" - obj = RunwayK8sModuleOptionsDataModel( - kubectl_version="0.13.0", overlay_path=tmp_path - ) + obj = RunwayK8sModuleOptionsDataModel(kubectl_version="0.13.0", overlay_path=tmp_path) assert obj.kubectl_version == "0.13.0" assert obj.overlay_path == tmp_path diff --git a/tests/unit/config/models/runway/options/test_serverless.py b/tests/unit/config/models/runway/options/test__serverless.py similarity index 85% rename from tests/unit/config/models/runway/options/test_serverless.py rename to tests/unit/config/models/runway/options/test__serverless.py index 049b3b180..3b2939571 100644 --- a/tests/unit/config/models/runway/options/test_serverless.py +++ b/tests/unit/config/models/runway/options/test__serverless.py @@ -1,6 +1,5 @@ """Test runway.config.models.runway.options.serverless.""" -# pyright: basic import pytest from pydantic import ValidationError @@ -16,10 +15,10 @@ class TestRunwayServerlessModuleOptionsDataModel: def test_init_default(self) -> None: """Test init default values.""" obj = RunwayServerlessModuleOptionsDataModel() - assert not obj.args and isinstance(obj.args, list) - assert not obj.extend_serverless_yml and isinstance( - obj.extend_serverless_yml, dict - ) + assert not obj.args + assert isinstance(obj.args, list) + assert not obj.extend_serverless_yml + assert isinstance(obj.extend_serverless_yml, dict) assert obj.promotezip == RunwayServerlessPromotezipOptionDataModel() assert obj.skip_npm_ci is False @@ -31,7 +30,7 @@ def test_init(self) -> None: "promotezip": {"bucketname": "test"}, "skip_npm_ci": True, } - obj = RunwayServerlessModuleOptionsDataModel(**data) + obj = RunwayServerlessModuleOptionsDataModel.model_validate(data) assert obj.args == data["args"] assert obj.extend_serverless_yml == data["extend_serverless_yml"] assert obj.promotezip == RunwayServerlessPromotezipOptionDataModel( @@ -56,7 +55,7 @@ def test_init_default(self) -> None: def test_init_extra(self) -> None: """Test init with extra values.""" with pytest.raises(ValidationError): - RunwayServerlessPromotezipOptionDataModel.parse_obj({"invalid": "val"}) + RunwayServerlessPromotezipOptionDataModel.model_validate({"invalid": "val"}) def test_init(self) -> None: """Test init.""" diff --git a/tests/unit/config/models/runway/options/test_terraform.py b/tests/unit/config/models/runway/options/test__terraform.py similarity index 75% rename from tests/unit/config/models/runway/options/test_terraform.py rename to tests/unit/config/models/runway/options/test__terraform.py index 5c392d524..e670fcdb6 100644 --- a/tests/unit/config/models/runway/options/test_terraform.py +++ b/tests/unit/config/models/runway/options/test__terraform.py @@ -1,6 +1,5 @@ """Test runway.config.models.runway.options.terraform.""" -# pyright: basic import pytest from pydantic import ValidationError @@ -17,20 +16,21 @@ class TestRunwayTerraformArgsDataModel: def test_init_default(self) -> None: """Test init default.""" obj = RunwayTerraformArgsDataModel() - assert not obj.apply and isinstance(obj.apply, list) - assert not obj.init and isinstance(obj.init, list) - assert not obj.plan and isinstance(obj.plan, list) + assert not obj.apply + assert isinstance(obj.apply, list) + assert not obj.init + assert isinstance(obj.init, list) + assert not obj.plan + assert isinstance(obj.plan, list) def test_init_extra(self) -> None: """Test init extra.""" with pytest.raises(ValidationError): - RunwayTerraformArgsDataModel.parse_obj({"invalid": "val"}) + RunwayTerraformArgsDataModel.model_validate({"invalid": "val"}) def test_init(self) -> None: """Test init.""" - obj = RunwayTerraformArgsDataModel( - apply=["-apply"], init=["-init"], plan=["-plan"] - ) + obj = RunwayTerraformArgsDataModel(apply=["-apply"], init=["-init"], plan=["-plan"]) assert obj.apply == ["-apply"] assert obj.init == ["-init"] assert obj.plan == ["-plan"] @@ -43,9 +43,7 @@ def test_bool(self) -> None: """Test __bool__.""" assert RunwayTerraformBackendConfigDataModel(bucket="test") assert RunwayTerraformBackendConfigDataModel(dynamodb_table="test") - assert RunwayTerraformBackendConfigDataModel( - bucket="test", dynamodb_table="test" - ) + assert RunwayTerraformBackendConfigDataModel(bucket="test", dynamodb_table="test") assert RunwayTerraformBackendConfigDataModel( bucket="test", dynamodb_table="test", workspace_key_prefix="state" ) @@ -62,7 +60,7 @@ def test_init_default(self) -> None: def test_init_extra(self) -> None: """Test init extra.""" with pytest.raises(ValidationError): - RunwayTerraformBackendConfigDataModel.parse_obj({"invalid": "val"}) + RunwayTerraformBackendConfigDataModel.model_validate({"invalid": "val"}) def test_init(self) -> None: """Test init.""" @@ -72,7 +70,7 @@ def test_init(self) -> None: "region": "us-east-1", "workspace_key_prefix": "workspace_prefix", } - obj = RunwayTerraformBackendConfigDataModel.parse_obj(data) + obj = RunwayTerraformBackendConfigDataModel.model_validate(data) assert obj.bucket == data["bucket"] assert obj.dynamodb_table == data["dynamodb_table"] assert obj.region == data["region"] @@ -84,10 +82,12 @@ class TestRunwayTerraformModuleOptionsDataModel: def test_convert_args(self) -> None: """Test _convert_args.""" - obj = RunwayTerraformModuleOptionsDataModel.parse_obj({"args": ["test"]}) + obj = RunwayTerraformModuleOptionsDataModel.model_validate({"args": ["test"]}) assert obj.args.apply == ["test"] - assert not obj.args.init and isinstance(obj.args.init, list) - assert not obj.args.plan and isinstance(obj.args.plan, list) + assert not obj.args.init + assert isinstance(obj.args.init, list) + assert not obj.args.plan + assert isinstance(obj.args.plan, list) def test_init_default(self) -> None: """Test init default.""" @@ -102,7 +102,7 @@ def test_init_default(self) -> None: def test_init_extra(self) -> None: """Test init extra.""" - assert RunwayTerraformModuleOptionsDataModel.parse_obj({"invalid": "val"}) + assert RunwayTerraformModuleOptionsDataModel.model_validate({"invalid": "val"}) def test_init(self) -> None: """Test init.""" @@ -113,11 +113,10 @@ def test_init(self) -> None: "terraform_workspace": "default", "terraform_write_auto_tfvars": True, } - obj = RunwayTerraformModuleOptionsDataModel.parse_obj(data) + obj = RunwayTerraformModuleOptionsDataModel.model_validate(data) assert obj.args.init == data["args"]["init"] # type: ignore assert ( - obj.backend_config.bucket - == data["terraform_backend_config"]["bucket"] # type: ignore + obj.backend_config.bucket == data["terraform_backend_config"]["bucket"] # type: ignore ) assert obj.version == data["terraform_version"] assert obj.workspace == data["terraform_workspace"] diff --git a/tests/unit/config/models/runway/test___init__.py b/tests/unit/config/models/runway/test___init__.py new file mode 100644 index 000000000..3d5874016 --- /dev/null +++ b/tests/unit/config/models/runway/test___init__.py @@ -0,0 +1,191 @@ +"""Test runway.config.models.runway.__init__.""" + +from pathlib import Path +from typing import Any + +import pytest +import yaml +from packaging.specifiers import SpecifierSet +from pydantic import ValidationError + +from runway.config.models.runway import ( + RunwayAssumeRoleDefinitionModel, + RunwayConfigDefinitionModel, + RunwayDeploymentDefinitionModel, + RunwayFutureDefinitionModel, + RunwayVariablesDefinitionModel, +) + + +class TestRunwayConfigDefinitionModel: + """Test runway.config.models.runway.RunwayConfigDefinitionModel.""" + + def test_add_deployment_names(self) -> None: + """Test _add_deployment_names.""" + data = { + "deployments": [ + {"modules": ["sampleapp.cfn"], "regions": ["us-east-1"]}, + { + "name": "test-name", + "modules": ["sampleapp.cfn"], + "regions": ["us-west-2"], + }, + ] + } + obj = RunwayConfigDefinitionModel.model_validate(data) + # this also adds coverage for __getitem__ + assert obj["deployments"][0]["name"] == "deployment_1" + assert obj["deployments"][1]["name"] == "test-name" + + def test_convert_runway_version(self) -> None: + """Test _convert_runway_version.""" + assert RunwayConfigDefinitionModel( # handle string + runway_version=">1.11.0" # type: ignore + ).runway_version == SpecifierSet(">1.11.0", prereleases=True) + assert RunwayConfigDefinitionModel( # handle exact version + runway_version="1.11.0" # type: ignore + ).runway_version == SpecifierSet("==1.11.0", prereleases=True) + assert RunwayConfigDefinitionModel( # handle SpecifierSet + runway_version=SpecifierSet(">1.11.0") # type: ignore + ).runway_version == SpecifierSet(">1.11.0", prereleases=True) + assert RunwayConfigDefinitionModel( # handle SpecifierSet + runway_version=SpecifierSet(">1.11.0", prereleases=True) # type: ignore + ).runway_version == SpecifierSet(">1.11.0", prereleases=True) + + def test_convert_runway_version_invalid(self) -> None: + """Test _convert_runway_version invalid specifier set.""" + with pytest.raises( + ValidationError, match="Value error, =latest is not a valid version specifier set" + ): + RunwayConfigDefinitionModel(runway_version="=latest") # type: ignore + + def test_extra(self) -> None: + """Test extra fields.""" + with pytest.raises(ValidationError, match="invalid\n Extra inputs are not permitted"): + RunwayConfigDefinitionModel.model_validate({"invalid": "val"}) + + def test_field_defaults(self) -> None: + """Test filed default values.""" + obj = RunwayConfigDefinitionModel() + assert obj.deployments == [] + assert isinstance(obj.future, RunwayFutureDefinitionModel) + assert not obj.ignore_git_branch + assert obj.runway_version is None + assert obj.tests == [] + assert isinstance(obj.variables, RunwayVariablesDefinitionModel) + + def test_parse_file(self, tmp_path: Path) -> None: + """Test parse_file.""" + data = { + "deployments": [ + { + "name": "test-name", + "modules": ["sampleapp.cfn"], + "regions": ["us-east-1"], + }, + ] + } + runway_yml = tmp_path / "runway.yml" + runway_yml.write_text(yaml.dump(data)) + + obj = RunwayConfigDefinitionModel.parse_file(runway_yml) + assert obj.deployments[0].modules[0].name == "sampleapp.cfn" + + +class TestRunwayDeploymentDefinitionModel: + """Test runway.config.models.runway.RunwayDeploymentDefinitionModel.""" + + def test_convert_simple_module(self) -> None: + """Test _convert_simple_module.""" + obj = RunwayDeploymentDefinitionModel( + modules=["sampleapp.cfn", {"path": "./"}], # type: ignore + regions=["us-east-1"], + ) + assert obj.modules[0].path == "sampleapp.cfn" + assert obj.modules[1].path == "./" + + def test_extra(self) -> None: + """Test extra fields.""" + with pytest.raises(ValidationError, match="invalid\n Extra inputs are not permitted"): + RunwayDeploymentDefinitionModel.model_validate( + {"invalid": "val", "regions": ["us-east-1"]} + ) + + def test_field_defaults(self) -> None: + """Test field default values.""" + obj = RunwayDeploymentDefinitionModel(modules=[], regions=["us-east-1"]) + assert obj.account_alias is None + assert obj.account_id is None + assert isinstance(obj.assume_role, RunwayAssumeRoleDefinitionModel) + assert obj.env_vars == {} + assert obj.environments == {} + assert obj.modules == [] + assert obj.module_options == {} + assert obj.name == "unnamed_deployment" + assert obj.parallel_regions == [] + assert obj.parameters == {} + assert obj.regions == ["us-east-1"] + + @pytest.mark.parametrize( + "field", + [ + "env_vars", + "environments", + "module_options", + "parallel_regions", + "parameters", + "regions", + ], + ) + def test_fields_string_lookup_only(self, field: str) -> None: + """Test fields that support strings only for lookups.""" + data: dict[str, Any] = {} + if field not in ["parallel_regions", "regions"]: + data["regions"] = ["us-east-1"] + data[field] = "something" + with pytest.raises( + ValidationError, + match=f"{field}\n Value error, field can only be a string if it's a lookup", + ): + RunwayDeploymentDefinitionModel.model_validate(data) + + data[field] = "${var something}" + obj = RunwayDeploymentDefinitionModel.model_validate(data) + assert obj[field] == data[field] + + def test_validate_regions(self) -> None: + """Test _validate_regions.""" + with pytest.raises(ValidationError): + RunwayDeploymentDefinitionModel(modules=[]) + with pytest.raises(ValidationError): + RunwayDeploymentDefinitionModel( + modules=[], parallel_regions=["us-east-1"], regions=["us-east-1"] + ) + with pytest.raises(ValidationError): + RunwayDeploymentDefinitionModel( + modules=[], + parallel_regions=["us-east-1"], + regions={"parallel": ["us-east-1"]}, # type: ignore + ) + with pytest.raises( + ValidationError, + match="Value error, unable to validate parallel_regions/regions - both are defined as strings", + ): + RunwayDeploymentDefinitionModel( + modules=[], parallel_regions="something", regions="something" + ) + + obj0 = RunwayDeploymentDefinitionModel(modules=[], regions=["us-east-1"]) + assert obj0.regions == ["us-east-1"] + assert obj0.parallel_regions == [] + + obj1 = RunwayDeploymentDefinitionModel(modules=[], parallel_regions=["us-east-1"]) + assert obj1.regions == [] + assert obj1.parallel_regions == ["us-east-1"] + + obj2 = RunwayDeploymentDefinitionModel( + modules=[], + regions={"parallel": ["us-east-1"]}, # type: ignore + ) + assert obj2.regions == [] + assert obj2.parallel_regions == ["us-east-1"] diff --git a/tests/unit/config/models/runway/test__assume_role.py b/tests/unit/config/models/runway/test__assume_role.py new file mode 100644 index 000000000..6e2d6feba --- /dev/null +++ b/tests/unit/config/models/runway/test__assume_role.py @@ -0,0 +1,69 @@ +"""Test runway.config.models.runway._assume_role.""" + +from __future__ import annotations + +import pytest +from pydantic import ValidationError + +from runway.config.models.runway._assume_role import RunwayAssumeRoleDefinitionModel + + +class TestRunwayAssumeRoleDefinitionModel: + """Test RunwayAssumeRoleDefinitionModel.""" + + @pytest.mark.parametrize("arn", ["null", "none", "None", "undefined"]) + def test_convert_arn_null_value(self, arn: str) -> None: + """Test _convert_arn_null_value.""" + assert not RunwayAssumeRoleDefinitionModel(arn=arn).arn + + def test_extra(self) -> None: + """Test extra fields.""" + with pytest.raises(ValidationError, match="Extra inputs are not permitted"): + RunwayAssumeRoleDefinitionModel.model_validate({"invalid": "val"}) + + def test_field_defaults(self) -> None: + """Test field values.""" + obj = RunwayAssumeRoleDefinitionModel() + assert not obj.arn + assert obj.duration == 3600 + assert not obj.post_deploy_env_revert + assert obj.session_name == "runway" + + def test_fields(self) -> None: + """Test fields.""" + data = { + "arn": "test-arn", + "duration": 900, + "post_deploy_env_revert": True, + "session_name": "test-session", + } + obj = RunwayAssumeRoleDefinitionModel.model_validate(data) + assert obj.arn == data["arn"] + assert obj.duration == data["duration"] + assert obj.post_deploy_env_revert == data["post_deploy_env_revert"] + assert obj.session_name == data["session_name"] + + def test_string_duration(self) -> None: + """Test duration defined as a string.""" + with pytest.raises( + ValidationError, + match="duration\n Value error, field can only be a string if it's a lookup", + ): + RunwayAssumeRoleDefinitionModel(duration="something") + + def test_string_duration_lookup(self) -> None: + """Test duration defined as a lookup string.""" + value = "${var something}" + obj = RunwayAssumeRoleDefinitionModel(duration=value) + assert obj.duration == value + + @pytest.mark.parametrize("duration", [900, 3600, 43_200]) + def test_validate_duration(self, duration: int) -> None: + """Test _validate_duration.""" + assert RunwayAssumeRoleDefinitionModel(duration=duration).duration == duration + + @pytest.mark.parametrize("duration", [899, 43_201]) + def test_validate_duration_invalid(self, duration: int) -> None: + """Test _validate_duration.""" + with pytest.raises(ValidationError, match="duration"): + RunwayAssumeRoleDefinitionModel(duration=duration) diff --git a/tests/unit/config/models/runway/test__builtin_tests.py b/tests/unit/config/models/runway/test__builtin_tests.py new file mode 100644 index 000000000..4290f0368 --- /dev/null +++ b/tests/unit/config/models/runway/test__builtin_tests.py @@ -0,0 +1,56 @@ +"""Test runway.config.models.runway._builtin_tests.""" + +from __future__ import annotations + +import pytest +from pydantic import ValidationError + +from runway.config.models.runway import RunwayTestDefinitionModel + + +class TestRunwayTestDefinitionModel: + """Test runway.config.models.runway._builtin_tests.RunwayTestDefinitionModel.""" + + def test_invalid_type(self) -> None: + """Test invalid type.""" + with pytest.raises(ValidationError) as excinfo: + RunwayTestDefinitionModel.model_validate({"type": "invalid"}) + assert excinfo.value.errors()[0]["loc"] == ("type",) + + @pytest.mark.parametrize("required", [None, True, False]) + def test_required(self, required: bool | None) -> None: + """Test required.""" + if isinstance(required, bool): + obj = RunwayTestDefinitionModel(type="script", required=required) + assert obj.required is required + else: + obj = RunwayTestDefinitionModel(type="script") + assert obj.required is False + + def test_string_args(self) -> None: + """Test args defined as a string.""" + with pytest.raises( + ValidationError, + match="args\n Value error, field can only be a string if it's a lookup", + ): + RunwayTestDefinitionModel.model_validate({"args": "something", "type": "yamllint"}) + + def test_string_args_lookup(self) -> None: + """Test args defined as a lookup string.""" + data = {"args": "${var something}", "type": "yamllint"} + obj = RunwayTestDefinitionModel.model_validate(data) + assert obj.args == data["args"] + + def test_string_required(self) -> None: + """Test required defined as a string.""" + with pytest.raises( + ValidationError, + match="required\n Value error, field can only be a string if it's a lookup", + ): + RunwayTestDefinitionModel.model_validate({"required": "something", "type": "yamllint"}) + + def test_string_required_lookup(self) -> None: + """Test required defined as a lookup string.""" + data = {"required": "${var something}", "type": "yamllint"} + obj = RunwayTestDefinitionModel.model_validate(data) + assert obj.required == data["required"] diff --git a/tests/unit/config/models/runway/test__future.py b/tests/unit/config/models/runway/test__future.py new file mode 100644 index 000000000..646f305d2 --- /dev/null +++ b/tests/unit/config/models/runway/test__future.py @@ -0,0 +1,17 @@ +"""Test runway.config.models.runway._future.""" + +from __future__ import annotations + +import pytest +from pydantic import ValidationError + +from runway.config.models.runway._future import RunwayFutureDefinitionModel + + +class TestRunwayFutureDefinitionModel: + """Test RunwayFutureDefinitionModel.""" + + def test_extra(self) -> None: + """Test extra fields.""" + with pytest.raises(ValidationError, match="Extra inputs are not permitted"): + RunwayFutureDefinitionModel.model_validate({"invalid": "val"}) diff --git a/tests/unit/config/models/runway/test__module.py b/tests/unit/config/models/runway/test__module.py new file mode 100644 index 000000000..75c45e524 --- /dev/null +++ b/tests/unit/config/models/runway/test__module.py @@ -0,0 +1,90 @@ +"""Test runway.config.models.runway._module.""" + +from __future__ import annotations + +from pathlib import Path + +import pytest +from pydantic import ValidationError + +from runway.config.models.runway._module import RunwayModuleDefinitionModel + + +class TestRunwayModuleDefinitionModel: + """Test rRunwayModuleDefinitionModel.""" + + def test__validate_name(self) -> None: + """Test _validate_name.""" + assert RunwayModuleDefinitionModel().name == "runway" + assert RunwayModuleDefinitionModel(name="test-name").name == "test-name" + assert ( + RunwayModuleDefinitionModel(parallel=[{"path": "./"}]).name # type: ignore + == "parallel_parent" + ) + assert ( + RunwayModuleDefinitionModel( + name="something", + parallel=[{"path": "./"}], # type: ignore + ).name + == "something" + ) + assert RunwayModuleDefinitionModel(path="./").name == Path.cwd().resolve().name + + def test__validate_parallel(self) -> None: + """Test _validate_parallel.""" + with pytest.raises( + ValidationError, + match="parallel\n Value error, only one of parallel or path can be defined", + ): + RunwayModuleDefinitionModel( + path=Path.cwd(), + parallel=["./"], # type: ignore + ) + + assert RunwayModuleDefinitionModel().parallel == [] + assert RunwayModuleDefinitionModel(parallel=["./"]).parallel == [ # type: ignore + RunwayModuleDefinitionModel(path="./") + ] + assert RunwayModuleDefinitionModel( + parallel=[{"name": "test", "path": "./"}] # type: ignore + ).parallel == [RunwayModuleDefinitionModel(name="test", path="./")] + + def test__validate_path(self) -> None: + """Test _validate_path.""" + assert RunwayModuleDefinitionModel().path == Path.cwd() + assert not RunwayModuleDefinitionModel(parallel=[{"path": "./"}]).path # type: ignore + defined_path = Path("./sampleapp.cfn") + assert RunwayModuleDefinitionModel(path=defined_path).path == defined_path + + @pytest.mark.parametrize("field", ["env_vars", "environments", "options", "parameters"]) + def test__validate_string_is_lookup(self, field: str) -> None: + """Test fields that support strings only for lookups.""" + data = {field: "something"} + with pytest.raises( + ValidationError, + match=f"{field}\n Value error, field can only be a string if it's a lookup", + ): + RunwayModuleDefinitionModel.model_validate(data) + + data[field] = "${var something}" + obj = RunwayModuleDefinitionModel.model_validate(data) + assert obj[field] == data[field] + + def test_extra(self) -> None: + """Test extra fields.""" + with pytest.raises(ValidationError, match="Extra inputs are not permitted"): + RunwayModuleDefinitionModel.model_validate({"invalid": "val"}) + + def test_field_defaults(self) -> None: + """Test field defaults.""" + obj = RunwayModuleDefinitionModel() + assert not obj.class_path + assert obj.environments == {} + assert obj.env_vars == {} + assert obj.name == "runway" + assert obj.options == {} + assert obj.parameters == {} + assert obj.path == Path.cwd() + assert obj.tags == [] + assert obj.type is None + assert obj.parallel == [] diff --git a/tests/unit/config/models/runway/test__region.py b/tests/unit/config/models/runway/test__region.py new file mode 100644 index 000000000..3cec3ab34 --- /dev/null +++ b/tests/unit/config/models/runway/test__region.py @@ -0,0 +1,37 @@ +"""Test runway.config.models.runway._deployment_region.""" + +from __future__ import annotations + +import pytest +from pydantic import ValidationError + +from runway.config.models.runway._region import RunwayDeploymentRegionDefinitionModel + + +class TestRunwayDeploymentRegionDefinitionModel: + """Test RunwayDeploymentRegionDefinitionModel.""" + + def test_extra(self) -> None: + """Test extra fields.""" + with pytest.raises(ValidationError, match="Extra inputs are not permitted"): + RunwayDeploymentRegionDefinitionModel.model_validate({"invalid": "val", "parallel": []}) + + def test_fields(self) -> None: + """Test fields.""" + assert not RunwayDeploymentRegionDefinitionModel(parallel=[]).parallel + value = ["us-east-1", "us-west-2"] + assert RunwayDeploymentRegionDefinitionModel(parallel=value).parallel == value + + def test_string_parallel(self) -> None: + """Test parallel defined as a string.""" + with pytest.raises( + ValidationError, + match="parallel\n Value error, field can only be a string if it's a lookup", + ): + RunwayDeploymentRegionDefinitionModel(parallel="something") + + def test_string_parallel_lookup(self) -> None: + """Test parallel defined as a lookup string.""" + value = "${var something}" + obj = RunwayDeploymentRegionDefinitionModel(parallel=value) + assert obj.parallel == value diff --git a/tests/unit/config/models/runway/test__variables.py b/tests/unit/config/models/runway/test__variables.py new file mode 100644 index 000000000..9bcabd731 --- /dev/null +++ b/tests/unit/config/models/runway/test__variables.py @@ -0,0 +1 @@ +"""Test runway.config.models.runway._variables.""" diff --git a/tests/unit/config/models/runway/test_builtin_tests.py b/tests/unit/config/models/runway/test_builtin_tests.py deleted file mode 100644 index 60d7798fa..000000000 --- a/tests/unit/config/models/runway/test_builtin_tests.py +++ /dev/null @@ -1,172 +0,0 @@ -"""Test runway.config.models.runway._builtin_tests.""" - -# pyright: basic -from typing import Optional - -import pytest -from pydantic import ValidationError - -from runway.config.models.runway import ( - CfnLintRunwayTestArgs, - CfnLintRunwayTestDefinitionModel, - RunwayTestDefinitionModel, - ScriptRunwayTestArgs, - ScriptRunwayTestDefinitionModel, - YamlLintRunwayTestDefinitionModel, -) - - -class TestRunwayTestDefinitionModel: - """Test runway.config.models.runway._builtin_tests.RunwayTestDefinitionModel.""" - - def test_init_cfnlint(self) -> None: - """Test init cfn-lint subclass.""" - data = {"type": "cfn-lint"} - obj = RunwayTestDefinitionModel.parse_obj(data) - - assert isinstance(obj, CfnLintRunwayTestDefinitionModel) - assert obj.args.dict() == {"cli_args": []} - assert obj.name == "cfn-lint" - assert obj.type == "cfn-lint" - - def test_init_script(self) -> None: - """Test init script subclass.""" - data = {"type": "script"} - obj = RunwayTestDefinitionModel.parse_obj(data) - - assert isinstance(obj, ScriptRunwayTestDefinitionModel) - assert obj.args.dict() == {"commands": []} - assert obj.name == "script" - assert obj.type == "script" - - def test_init_yamllint(self) -> None: - """Test init yamllint subclass.""" - data = {"type": "yamllint"} - obj = RunwayTestDefinitionModel.parse_obj(data) - - assert isinstance(obj, YamlLintRunwayTestDefinitionModel) - assert obj.args == {} - assert obj.name == "yamllint" - assert obj.type == "yamllint" - - def test_invalid_type(self) -> None: - """Test invalid type.""" - with pytest.raises(ValidationError) as excinfo: - RunwayTestDefinitionModel.parse_obj({"type": "invalid"}) - assert excinfo.value.errors()[0]["loc"] == ("type",) - - @pytest.mark.parametrize("required", [None, True, False]) - def test_required(self, required: Optional[bool]) -> None: - """Test required.""" - if isinstance(required, bool): - obj = RunwayTestDefinitionModel(type="script", required=required) - assert obj.required is required - else: - obj = RunwayTestDefinitionModel(type="script") - assert obj.required is False - - def test_string_args(self) -> None: - """Test args defined as a string.""" - with pytest.raises(ValidationError) as excinfo: - RunwayTestDefinitionModel.parse_obj( - {"args": "something", "type": "yamllint"} - ) - error = excinfo.value.errors()[0] - assert error["loc"] == ("args",) - assert error["msg"] == "field can only be a string if it's a lookup" - - def test_string_args_lookup(self) -> None: - """Test args defined as a lookup string.""" - data = {"args": "${var something}", "type": "yamllint"} - obj = RunwayTestDefinitionModel.parse_obj(data) - assert obj.args == data["args"] - - def test_string_required(self) -> None: - """Test required defined as a string.""" - with pytest.raises(ValidationError) as excinfo: - RunwayTestDefinitionModel.parse_obj( - {"required": "something", "type": "yamllint"} - ) - error = excinfo.value.errors()[0] - assert error["loc"] == ("required",) - assert error["msg"] == "field can only be a string if it's a lookup" - - def test_string_required_lookup(self) -> None: - """Test required defined as a lookup string.""" - data = {"required": "${var something}", "type": "yamllint"} - obj = RunwayTestDefinitionModel.parse_obj(data) - assert obj.required == data["required"] - - -class TestCfnLintRunwayTestArgs: - """Test runway.config.models.runway._builtin_tests.CfnLintRunwayTestArgs.""" - - def test_cli_args_string(self) -> None: - """Test cli_args defined as a string.""" - with pytest.raises(ValidationError) as excinfo: - CfnLintRunwayTestArgs(cli_args="something") - error = excinfo.value.errors()[0] - assert error["loc"] == ("cli_args",) - assert error["msg"] == "field can only be a string if it's a lookup" - - def test_cli_args_string_lookup(self) -> None: - """Test args defined as a lookup string.""" - data = {"cli_args": "${var something}"} - assert CfnLintRunwayTestArgs.parse_obj(data).cli_args == data["cli_args"] - - def test_extra(self) -> None: - """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: - CfnLintRunwayTestArgs.parse_obj({"invalid": "val"}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" - - -class TestCfnLintRunwayTestDefinitionModel: - """Test runway.config.models.runway._builtin_tests.CfnLintRunwayTestDefinitionModel.""" - - def test_args(self) -> None: - """Test args.""" - data = {"args": {"cli_args": ["example"]}, "type": "cfn-lint"} - obj = CfnLintRunwayTestDefinitionModel.parse_obj(data) - assert isinstance(obj.args, CfnLintRunwayTestArgs) - assert obj.args.cli_args == data["args"]["cli_args"] # type: ignore - - -class TestScriptRunwayTestArgs: - """Test runway.config.models.runway._builtin_tests.ScriptRunwayTestArgs.""" - - def test_commands_string(self) -> None: - """Test commands defined as a string.""" - with pytest.raises(ValidationError) as excinfo: - ScriptRunwayTestArgs(commands="something") - error = excinfo.value.errors()[0] - assert error["loc"] == ("commands",) - assert error["msg"] == "field can only be a string if it's a lookup" - - def test_commands_string_lookup(self) -> None: - """Test args defined as a lookup string.""" - data = {"commands": "${var something}"} - assert ScriptRunwayTestArgs.parse_obj(data).commands == data["commands"] - - def test_extra(self) -> None: - """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: - ScriptRunwayTestArgs.parse_obj({"invalid": "val"}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" - - -class TestScriptRunwayTestDefinitionModel: - """Test runway.config.models.runway._builtin_tests.ScriptRunwayTestDefinitionModel.""" - - def test_args(self) -> None: - """Test args.""" - data = {"args": {"commands": ["example"]}} - obj = ScriptRunwayTestDefinitionModel.parse_obj(data) - assert isinstance(obj.args, ScriptRunwayTestArgs) - assert obj.args.commands == data["args"]["commands"] diff --git a/tests/unit/config/models/runway/test_runway.py b/tests/unit/config/models/runway/test_runway.py deleted file mode 100644 index 2c17f80ac..000000000 --- a/tests/unit/config/models/runway/test_runway.py +++ /dev/null @@ -1,407 +0,0 @@ -"""Test runway.config.models.runway.__init__.""" - -# pyright: basic -from pathlib import Path -from typing import Any, Dict - -import pytest -import yaml -from packaging.specifiers import SpecifierSet -from pydantic import ValidationError - -from runway.config.models.runway import ( - RunwayAssumeRoleDefinitionModel, - RunwayConfigDefinitionModel, - RunwayDeploymentDefinitionModel, - RunwayDeploymentRegionDefinitionModel, - RunwayFutureDefinitionModel, - RunwayModuleDefinitionModel, - RunwayVariablesDefinitionModel, -) - - -class TestRunwayAssumeRoleDefinitionModel: - """Test runway.config.models.runway.RunwayAssumeRoleDefinitionModel.""" - - @pytest.mark.parametrize("arn", ["null", "none", "None", "undefined"]) - def test_convert_arn_null_value(self, arn: str) -> None: - """Test _convert_arn_null_value.""" - assert not RunwayAssumeRoleDefinitionModel(arn=arn).arn - - def test_extra(self) -> None: - """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: - RunwayAssumeRoleDefinitionModel.parse_obj({"invalid": "val"}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" - - def test_field_defaults(self) -> None: - """Test field values.""" - obj = RunwayAssumeRoleDefinitionModel() - assert not obj.arn - assert obj.duration == 3600 - assert not obj.post_deploy_env_revert - assert obj.session_name == "runway" - - def test_fields(self) -> None: - """Test fields.""" - data = { - "arn": "test-arn", - "duration": 900, - "post_deploy_env_revert": True, - "session_name": "test-session", - } - obj = RunwayAssumeRoleDefinitionModel.parse_obj(data) - assert obj.arn == data["arn"] - assert obj.duration == data["duration"] - assert obj.post_deploy_env_revert == data["post_deploy_env_revert"] - assert obj.session_name == data["session_name"] - - def test_string_duration(self) -> None: - """Test duration defined as a string.""" - with pytest.raises(ValidationError) as excinfo: - RunwayAssumeRoleDefinitionModel(duration="something") - error = excinfo.value.errors()[0] - assert error["loc"] == ("duration",) - assert error["msg"] == "field can only be a string if it's a lookup" - - def test_string_duration_lookup(self) -> None: - """Test duration defined as a lookup string.""" - value = "${var something}" - obj = RunwayAssumeRoleDefinitionModel(duration=value) - assert obj.duration == value - - @pytest.mark.parametrize("duration", [900, 3600, 43_200]) - def test_validate_duration(self, duration: int) -> None: - """Test _validate_duration.""" - assert RunwayAssumeRoleDefinitionModel(duration=duration).duration == duration - - @pytest.mark.parametrize("duration", [899, 43_201]) - def test_validate_duration_invalid(self, duration: int) -> None: - """Test _validate_duration.""" - with pytest.raises(ValidationError) as excinfo: - RunwayAssumeRoleDefinitionModel(duration=duration) - error = excinfo.value.errors()[0] - assert error["loc"] == ("duration",) - - -class TestRunwayConfigDefinitionModel: - """Test runway.config.models.runway.RunwayConfigDefinitionModel.""" - - def test_add_deployment_names(self) -> None: - """Test _add_deployment_names.""" - data = { - "deployments": [ - {"modules": ["sampleapp.cfn"], "regions": ["us-east-1"]}, - { - "name": "test-name", - "modules": ["sampleapp.cfn"], - "regions": ["us-west-2"], - }, - ] - } - obj = RunwayConfigDefinitionModel.parse_obj(data) - # this also adds coverage for __getitem__ - assert obj["deployments"][0]["name"] == "deployment_1" - assert obj["deployments"][1]["name"] == "test-name" - - def test_convert_runway_version(self) -> None: - """Test _convert_runway_version.""" - assert RunwayConfigDefinitionModel( # handle string - runway_version=">1.11.0" # type: ignore - ).runway_version == SpecifierSet(">1.11.0", prereleases=True) - assert RunwayConfigDefinitionModel( # handle exact version - runway_version="1.11.0" # type: ignore - ).runway_version == SpecifierSet("==1.11.0", prereleases=True) - assert RunwayConfigDefinitionModel( # handle SpecifierSet - runway_version=SpecifierSet(">1.11.0") # type: ignore - ).runway_version == SpecifierSet(">1.11.0", prereleases=True) - assert RunwayConfigDefinitionModel( # handle SpecifierSet - runway_version=SpecifierSet(">1.11.0", prereleases=True) # type: ignore - ).runway_version == SpecifierSet(">1.11.0", prereleases=True) - - def test_convert_runway_version_invalid(self) -> None: - """Test _convert_runway_version invalid specifier set.""" - with pytest.raises(ValidationError) as excinfo: - RunwayConfigDefinitionModel(runway_version="=latest") # type: ignore - assert ( - excinfo.value.errors()[0]["msg"] - == "=latest is not a valid version specifier set" - ) - - def test_extra(self) -> None: - """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: - RunwayConfigDefinitionModel.parse_obj({"invalid": "val"}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" - - def test_field_defaults(self) -> None: - """Test filed default values.""" - obj = RunwayConfigDefinitionModel() - assert obj.deployments == [] - assert isinstance(obj.future, RunwayFutureDefinitionModel) - assert not obj.ignore_git_branch - assert obj.runway_version == SpecifierSet(">1.10", prereleases=True) - assert obj.tests == [] - assert isinstance(obj.variables, RunwayVariablesDefinitionModel) - - def test_parse_file(self, tmp_path: Path) -> None: - """Test parse_file.""" - data = { - "deployments": [ - { - "name": "test-name", - "modules": ["sampleapp.cfn"], - "regions": ["us-east-1"], - }, - ] - } - runway_yml = tmp_path / "runway.yml" - runway_yml.write_text(yaml.dump(data)) - - obj = RunwayConfigDefinitionModel.parse_file(runway_yml) - assert obj.deployments[0].modules[0].name == "sampleapp.cfn" - - -class TestRunwayDeploymentDefinitionModel: - """Test runway.config.models.runway.RunwayDeploymentDefinitionModel.""" - - def test_convert_simple_module(self) -> None: - """Test _convert_simple_module.""" - obj = RunwayDeploymentDefinitionModel( - modules=["sampleapp.cfn", {"path": "./"}], # type: ignore - regions=["us-east-1"], - ) - assert obj.modules[0].path == "sampleapp.cfn" - assert obj.modules[1].path == "./" - - def test_extra(self) -> None: - """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: - RunwayDeploymentDefinitionModel.parse_obj( - {"invalid": "val", "regions": ["us-east-1"]} - ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" - - def test_field_defaults(self) -> None: - """Test field default values.""" - obj = RunwayDeploymentDefinitionModel(modules=[], regions=["us-east-1"]) - assert obj.account_alias is None - assert obj.account_id is None - assert isinstance(obj.assume_role, RunwayAssumeRoleDefinitionModel) - assert obj.env_vars == {} - assert obj.environments == {} - assert obj.modules == [] - assert obj.module_options == {} - assert obj.name == "unnamed_deployment" - assert obj.parallel_regions == [] - assert obj.parameters == {} - assert obj.regions == ["us-east-1"] - - @pytest.mark.parametrize( - "field", - [ - "env_vars", - "environments", - "module_options", - "parallel_regions", - "parameters", - "regions", - ], - ) - def test_fields_string_lookup_only(self, field: str) -> None: - """Test fields that support strings only for lookups.""" - data: Dict[str, Any] = {} - if field not in ["parallel_regions", "regions"]: - data["regions"] = ["us-east-1"] - data[field] = "something" - with pytest.raises(ValidationError) as excinfo: - RunwayDeploymentDefinitionModel.parse_obj(data) - error = excinfo.value.errors()[0] - assert error["loc"] == (field,) - assert error["msg"] == "field can only be a string if it's a lookup" - - data[field] = "${var something}" - obj = RunwayDeploymentDefinitionModel.parse_obj(data) - assert obj[field] == data[field] - - def test_validate_regions(self) -> None: - """Test _validate_regions.""" - with pytest.raises(ValidationError): - RunwayDeploymentDefinitionModel(modules=[]) - with pytest.raises(ValidationError): - RunwayDeploymentDefinitionModel( - modules=[], parallel_regions=["us-east-1"], regions=["us-east-1"] - ) - with pytest.raises(ValidationError): - RunwayDeploymentDefinitionModel( - modules=[], - parallel_regions=["us-east-1"], - regions={"parallel": ["us-east-1"]}, # type: ignore - ) - with pytest.raises(ValidationError) as excinfo: - RunwayDeploymentDefinitionModel( - modules=[], parallel_regions="something", regions="something" - ) - error = excinfo.value.errors()[0] - assert error["loc"] == ("__root__",) - assert error["msg"].startswith("unable to validate parallel_regions/regions") - - obj0 = RunwayDeploymentDefinitionModel(modules=[], regions=["us-east-1"]) - assert obj0.regions == ["us-east-1"] - assert obj0.parallel_regions == [] - - obj1 = RunwayDeploymentDefinitionModel( - modules=[], parallel_regions=["us-east-1"] - ) - assert obj1.regions == [] - assert obj1.parallel_regions == ["us-east-1"] - - obj2 = RunwayDeploymentDefinitionModel( - modules=[], - regions={"parallel": ["us-east-1"]}, # type: ignore - ) - assert obj2.regions == [] - assert obj2.parallel_regions == ["us-east-1"] - - -class TestRunwayDeploymentRegionDefinitionModel: - """Test runway.config.models.runway.RunwayDeploymentRegionDefinitionModel.""" - - def test_extra(self) -> None: - """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: - RunwayDeploymentRegionDefinitionModel.parse_obj( - {"invalid": "val", "parallel": []} - ) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" - - def test_fields(self) -> None: - """Test fields.""" - assert not RunwayDeploymentRegionDefinitionModel(parallel=[]).parallel - value = ["us-east-1", "us-west-2"] - assert RunwayDeploymentRegionDefinitionModel(parallel=value).parallel == value - - def test_string_parallel(self) -> None: - """Test parallel defined as a string.""" - with pytest.raises(ValidationError) as excinfo: - RunwayDeploymentRegionDefinitionModel(parallel="something") - error = excinfo.value.errors()[0] - assert error["loc"] == ("parallel",) - assert error["msg"] == "field can only be a string if it's a lookup" - - def test_string_parallel_lookup(self) -> None: - """Test parallel defined as a lookup string.""" - value = "${var something}" - obj = RunwayDeploymentRegionDefinitionModel(parallel=value) - assert obj.parallel == value - - -class TestRunwayFutureDefinitionModel: - """Test runway.config.models.runway.RunwayFutureDefinitionModel.""" - - def test_extra(self) -> None: - """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: - RunwayFutureDefinitionModel.parse_obj({"invalid": "val"}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" - - -class TestRunwayModuleDefinitionModel: - """Test runway.config.models.runway.RunwayModuleDefinitionModel.""" - - def test_extra(self) -> None: - """Test extra fields.""" - with pytest.raises(ValidationError) as excinfo: - RunwayModuleDefinitionModel.parse_obj({"invalid": "val"}) - errors = excinfo.value.errors() - assert len(errors) == 1 - assert errors[0]["loc"] == ("invalid",) - assert errors[0]["msg"] == "extra fields not permitted" - - def test_field_defaults(self) -> None: - """Test field defaults.""" - obj = RunwayModuleDefinitionModel() - assert not obj.class_path - assert obj.environments == {} - assert obj.env_vars == {} - assert obj.name == "undefined" - assert obj.options == {} - assert obj.parameters == {} - assert obj.path == Path.cwd() - assert obj.tags == [] - assert obj.type is None - assert obj.parallel == [] - - def test_validate_name(self) -> None: - """Test _validate_name.""" - assert RunwayModuleDefinitionModel().name == "undefined" - assert RunwayModuleDefinitionModel(name="test-name").name == "test-name" - assert ( - RunwayModuleDefinitionModel(parallel=[{"path": "./"}]).name # type: ignore - == "parallel_parent" - ) - assert ( - RunwayModuleDefinitionModel( - name="something", - parallel=[{"path": "./"}], # type: ignore - ).name - == "something" - ) - assert RunwayModuleDefinitionModel(path="./").name == Path.cwd().resolve().name - - def test_validate_path(self) -> None: - """Test _validate_path.""" - assert RunwayModuleDefinitionModel().path == Path.cwd() - assert not RunwayModuleDefinitionModel(parallel=[{"path": "./"}]).path # type: ignore - defined_path = Path("./sampleapp.cfn") - assert RunwayModuleDefinitionModel(path=defined_path).path == defined_path - - def test_validate_parallel(self) -> None: - """Test _validate_parallel.""" - with pytest.raises(ValidationError) as excinfo: - RunwayModuleDefinitionModel( - path=Path.cwd(), - parallel=["./"], # type: ignore - ) - error = excinfo.value.errors()[0] - assert error["loc"] == ("parallel",) - assert error["msg"] == "only one of parallel or path can be defined" - - assert RunwayModuleDefinitionModel().parallel == [] - assert RunwayModuleDefinitionModel(parallel=["./"]).parallel == [ # type: ignore - RunwayModuleDefinitionModel(path="./") - ] - assert RunwayModuleDefinitionModel( - parallel=[{"name": "test", "path": "./"}] # type: ignore - ).parallel == [RunwayModuleDefinitionModel(name="test", path="./")] - - @pytest.mark.parametrize( - "field", ["env_vars", "environments", "options", "parameters"] - ) - def test_fields_string_lookup_only(self, field: str) -> None: - """Test fields that support strings only for lookups.""" - data = {field: "something"} - with pytest.raises(ValidationError) as excinfo: - RunwayModuleDefinitionModel.parse_obj(data) - error = excinfo.value.errors()[0] - assert error["loc"] == (field,) - assert error["msg"] == "field can only be a string if it's a lookup" - - data[field] = "${var something}" - obj = RunwayModuleDefinitionModel.parse_obj(data) - assert obj[field] == data[field] diff --git a/tests/unit/config/models/test_base.py b/tests/unit/config/models/test_base.py index 8a35a00cb..0cf1e5960 100644 --- a/tests/unit/config/models/test_base.py +++ b/tests/unit/config/models/test_base.py @@ -1,10 +1,11 @@ """Test runway.config.models.base.""" -# pyright: basic -from typing import Any, Dict, Optional +from __future__ import annotations + +from typing import Any import pytest -from pydantic import Extra, ValidationError +from pydantic import ValidationError from runway.config.models.base import ConfigProperty @@ -18,11 +19,6 @@ class BadObject(ConfigProperty): name: str = ("invalid",) # type: ignore - class Config(ConfigProperty.Config): - """Model configuration.""" - - extra = Extra.forbid - class GoodObject(ConfigProperty): """Subclass used to test a parent class. @@ -33,13 +29,11 @@ class GoodObject(ConfigProperty): name: str bool_field: bool = True - dict_field: Dict[str, Any] = {} - optional_str_field: Optional[str] = None + dict_field: dict[str, Any] = {} + optional_str_field: str | None = None - class Config(ConfigProperty.Config): - """Model configuration.""" - extra = Extra.forbid +GoodObject.model_config["extra"] = "forbid" class TestConfigProperty: @@ -76,14 +70,13 @@ def test_validate_all(self) -> None: errors = excinfo.value.errors() assert len(errors) == 1 assert errors[0]["loc"] == ("name",) - assert errors[0]["msg"] == "str type expected" + assert errors[0]["msg"] == "Input should be a valid string" def test_validate_assignment(self) -> None: """Test Config.validate_assignment.""" with pytest.raises(ValidationError) as excinfo: - obj = GoodObject(name="test") - obj.name = ("invalid",) # type: ignore + GoodObject(name="test").name = ("invalid",) # type: ignore errors = excinfo.value.errors() assert len(errors) == 1 assert errors[0]["loc"] == ("name",) - assert errors[0]["msg"] == "str type expected" + assert errors[0]["msg"] == "Input should be a valid string" diff --git a/tests/unit/config/models/test_utils.py b/tests/unit/config/models/test_utils.py index ec2ea856c..b6f0bd353 100644 --- a/tests/unit/config/models/test_utils.py +++ b/tests/unit/config/models/test_utils.py @@ -1,8 +1,9 @@ """Test runway.config.models.utils.""" -# pyright: basic +from __future__ import annotations + from pathlib import Path -from typing import Any, Optional +from typing import Any import pytest @@ -36,7 +37,7 @@ def test_convert_null_values(provided: Any, expected: Any) -> None: @pytest.mark.parametrize("provided", [None, Path("./")]) -def test_resolve_path_field(provided: Optional[Path]) -> None: +def test_resolve_path_field(provided: Path | None) -> None: """Test resolve_path_field.""" if provided is None: assert not resolve_path_field(provided) @@ -66,6 +67,6 @@ def test_validate_string_is_lookup(provided: Any) -> None: ) def test_validate_string_is_lookup_raises(provided: str) -> None: """Test validate_string_is_lookup.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError) as excinfo: # noqa: PT011 validate_string_is_lookup(provided) assert excinfo.value == RUNWAY_LOOKUP_STRING_ERROR diff --git a/tests/unit/config/test_config.py b/tests/unit/config/test___init__.py similarity index 83% rename from tests/unit/config/test_config.py rename to tests/unit/config/test___init__.py index 9f11e306e..ed47e10e5 100644 --- a/tests/unit/config/test_config.py +++ b/tests/unit/config/test___init__.py @@ -1,14 +1,12 @@ -"""Test runway.config.""" +"""Test runway.config.__init__.""" -# pyright: basic from __future__ import annotations -from pathlib import Path from typing import TYPE_CHECKING +from unittest.mock import MagicMock, patch import pytest import yaml -from mock import MagicMock, patch from pydantic import BaseModel from runway.cfngin.exceptions import MissingEnvironment @@ -20,7 +18,8 @@ from runway.exceptions import ConfigNotFound if TYPE_CHECKING: - from pytest import MonkeyPatch + from pathlib import Path + MODULE = "runway.config" @@ -34,10 +33,10 @@ class ExampleModel(BaseModel): class TestBaseConfig: """Test runway.config.BaseConfig.""" - def test_dump(self, monkeypatch: MonkeyPatch) -> None: + def test_dump(self, monkeypatch: pytest.MonkeyPatch) -> None: """Test dump.""" mock_dict = MagicMock(return_value={"name": "test"}) - monkeypatch.setattr(ExampleModel, "dict", mock_dict) + monkeypatch.setattr(ExampleModel, "model_dump", mock_dict) obj = BaseConfig(ExampleModel()) assert obj.dump() == "name: test\n" mock_dict.assert_called_once_with( @@ -122,7 +121,7 @@ def test_parse_file_file_path_missing(self, tmp_path: Path) -> None: assert excinfo.value.path == config_yml def test_parse_file_find_config_file( - self, monkeypatch: MonkeyPatch, tmp_path: Path + self, monkeypatch: pytest.MonkeyPatch, tmp_path: Path ) -> None: """Test parse_file with path.""" file_path = tmp_path / "test.yml" @@ -138,41 +137,35 @@ def test_parse_file_find_config_file( ) def test_parse_file_find_config_file_value_error( - self, monkeypatch: MonkeyPatch, tmp_path: Path + self, monkeypatch: pytest.MonkeyPatch, tmp_path: Path ) -> None: """Test parse_file with path raise ValueError.""" - mock_find_config_file = MagicMock( - return_value=[tmp_path / "01.yml", tmp_path / "02.yml"] - ) + mock_find_config_file = MagicMock(return_value=[tmp_path / "01.yml", tmp_path / "02.yml"]) monkeypatch.setattr(CfnginConfig, "find_config_file", mock_find_config_file) - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="more than one"): CfnginConfig.parse_file(path=tmp_path) - assert str(excinfo.value).startswith("more than one") - def test_parse_file_value_error(self): + def test_parse_file_value_error(self) -> None: """Test parse_file raise ValueError.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="must provide path or file_path"): CfnginConfig.parse_file() - assert str(excinfo.value) == "must provide path or file_path" - def test_parse_obj(self, monkeypatch: MonkeyPatch) -> None: - """Test parse_obj.""" + def test_parse_obj(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Test model_validate.""" monkeypatch.setattr( - MODULE + ".CfnginConfigDefinitionModel.parse_obj", - lambda x: CfnginConfigDefinitionModel(namespace="success"), # type: ignore + MODULE + ".CfnginConfigDefinitionModel.model_validate", + lambda x: CfnginConfigDefinitionModel(namespace="success"), # type: ignore # noqa: ARG005 ) assert CfnginConfig.parse_obj({}).namespace == "success" - def test_parse_raw(self, monkeypatch: MonkeyPatch, tmp_path: Path) -> None: + def test_parse_raw(self, monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None: """Test parse_raw.""" mock_resolve_raw_data = MagicMock() mock_parse_obj = MagicMock() mock_process_package_sources = MagicMock() monkeypatch.setattr(CfnginConfig, "resolve_raw_data", mock_resolve_raw_data) monkeypatch.setattr(CfnginConfig, "parse_obj", mock_parse_obj) - monkeypatch.setattr( - CfnginConfig, "process_package_sources", mock_process_package_sources - ) + monkeypatch.setattr(CfnginConfig, "process_package_sources", mock_process_package_sources) data = {"namespace": "test"} data_str = yaml.dump(data) @@ -181,20 +174,14 @@ def test_parse_raw(self, monkeypatch: MonkeyPatch, tmp_path: Path) -> None: mock_process_package_sources.return_value = data_str assert ( - CfnginConfig.parse_raw( - data_str, skip_package_sources=True, work_dir=tmp_path - ) - == data + CfnginConfig.parse_raw(data_str, skip_package_sources=True, work_dir=tmp_path) == data ) mock_resolve_raw_data.assert_called_once_with(yaml.dump(data), parameters={}) mock_parse_obj.assert_called_once_with(data) mock_process_package_sources.assert_not_called() assert ( - CfnginConfig.parse_raw( - data_str, parameters={"key": "val"}, work_dir=tmp_path - ) - == data + CfnginConfig.parse_raw(data_str, parameters={"key": "val"}, work_dir=tmp_path) == data ) mock_resolve_raw_data.assert_called_with( yaml.dump(data), @@ -207,7 +194,7 @@ def test_parse_raw(self, monkeypatch: MonkeyPatch, tmp_path: Path) -> None: @patch(MODULE + ".SourceProcessor") def test_process_package_sources( - self, mock_source_processor: MagicMock, monkeypatch: MonkeyPatch, tmp_path: Path + self, mock_source_processor: MagicMock, monkeypatch: pytest.MonkeyPatch, tmp_path: Path ) -> None: """Test process_package_sources.""" mock_resolve_raw_data = MagicMock(return_value="rendered") @@ -234,7 +221,7 @@ def test_process_package_sources( data = {"namespace": "test", "package_sources": {"git": [{"uri": "something"}]}} raw_data = yaml.dump(data) - mock_source_processor.configs_to_merge = [str(other_config.resolve())] + mock_source_processor.configs_to_merge = [other_config.resolve()] assert ( CfnginConfig.process_package_sources( raw_data, parameters={"key": "val"}, work_dir=tmp_path @@ -242,7 +229,7 @@ def test_process_package_sources( == "rendered" ) mock_source_processor.assert_called_with( - sources=CfnginPackageSourcesDefinitionModel.parse_obj( + sources=CfnginPackageSourcesDefinitionModel.model_validate( {"git": [{"uri": "something"}]} ), cache_dir=tmp_path / "cache", @@ -258,10 +245,7 @@ def test_resolve_raw_data(self) -> None: """Test resolve_raw_data.""" raw_data = "namespace: ${namespace}" expected = "namespace: test" - assert ( - CfnginConfig.resolve_raw_data(raw_data, parameters={"namespace": "test"}) - == expected - ) + assert CfnginConfig.resolve_raw_data(raw_data, parameters={"namespace": "test"}) == expected def test_resolve_raw_data_missing_value(self) -> None: """Test resolve_raw_data missing value.""" @@ -278,13 +262,13 @@ def test_resolve_raw_data_ignore_lookup(self) -> None: class TestRunwayConfig: """Test runway.config.RunwayConfig.""" - def test_find_config_file_yaml(self, tmp_path: Path): + def test_find_config_file_yaml(self, tmp_path: Path) -> None: """Test file_config_file runway.yaml.""" runway_yaml = tmp_path / "runway.yaml" runway_yaml.touch() assert RunwayConfig.find_config_file(tmp_path) == runway_yaml - def test_find_config_file_yml(self, tmp_path: Path): + def test_find_config_file_yml(self, tmp_path: Path) -> None: """Test file_config_file runway.yml.""" runway_yml = tmp_path / "runway.yml" runway_yml.touch() @@ -308,9 +292,8 @@ def test_find_config_file_value_error(self, tmp_path: Path) -> None: """Test file_config_file raise ValueError.""" (tmp_path / "runway.yaml").touch() (tmp_path / "runway.yml").touch() - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="more than one"): RunwayConfig.find_config_file(tmp_path) - assert str(excinfo.value).startswith("more than one") def test_parse_obj(self) -> None: """Test parse_obj.""" diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index e13e32989..e9eb954c8 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,22 +1,21 @@ """Pytest fixtures and plugins.""" -# pylint: disable=redefined-outer-name from __future__ import annotations import logging import os from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, cast +from typing import TYPE_CHECKING, Any, cast +from unittest.mock import MagicMock, Mock import pytest import yaml -from mock import MagicMock from runway.config import RunwayConfig from runway.core.components import DeployEnvironment from .factories import ( - MockCFNginContext, + MockCfnginContext, MockRunwayConfig, MockRunwayContext, YamlLoader, @@ -25,18 +24,18 @@ from .mock_docker.fake_api_client import make_fake_client if TYPE_CHECKING: + from collections.abc import Iterator + from _pytest.config import Config from _pytest.python import Module from docker import DockerClient - from pytest import FixtureRequest, MonkeyPatch from pytest_mock import MockerFixture -LOG = logging.getLogger(__name__) -TEST_ROOT = Path(os.path.dirname(os.path.realpath(__file__))) +LOGGER = logging.getLogger(__name__) +TEST_ROOT = Path(__file__).parent -# pylint: disable=unused-argument -def pytest_ignore_collect(path: Any, config: Config) -> bool: +def pytest_ignore_collect(path: Any, config: Config) -> bool: # noqa: ARG001 """Determine if this directory should have its tests collected.""" if config.option.functional: return True @@ -56,16 +55,16 @@ def aws_credentials() -> Iterator[None]: "AWS_SECRET_ACCESS_KEY": "testing", "AWS_DEFAULT_REGION": "us-east-1", } - saved_env: Dict[str, Optional[str]] = {} + saved_env: dict[str, str | None] = {} for key, value in overrides.items(): - LOG.info("Overriding env var: %s=%s", key, value) + LOGGER.info("Overriding env var: %s=%s", key, value) saved_env[key] = os.environ.get(key, None) os.environ[key] = value yield for key, value in saved_env.items(): - LOG.info("Restoring saved env var: %s=%s", key, value) + LOGGER.info("Restoring saved env var: %s=%s", key, value) if value is None: os.environ.pop(key, None) # handle key missing else: @@ -75,9 +74,9 @@ def aws_credentials() -> Iterator[None]: @pytest.fixture(scope="package") -def fixture_dir() -> str: +def fixture_dir() -> Path: """Path to the fixture directory.""" - return os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures") + return Path(__file__).parent / "fixtures" @pytest.fixture(scope="module") @@ -90,19 +89,19 @@ def fx_config() -> YamlLoader: ) -@pytest.fixture(scope="function") +@pytest.fixture() def fx_deployments() -> YamlLoaderDeployment: """Return YAML loader for deployment fixtures.""" return YamlLoaderDeployment(TEST_ROOT / "fixtures" / "deployments") -@pytest.fixture(scope="function") +@pytest.fixture() def mock_docker_client() -> DockerClient: """Create a docker client with mock API backend.""" return make_fake_client() -@pytest.fixture(scope="function") +@pytest.fixture() def tempfile_temporary_directory(mocker: MockerFixture, tmp_path: Path) -> MagicMock: """Mock tempfile.TemporaryDirectory.""" return mocker.patch( @@ -112,63 +111,61 @@ def tempfile_temporary_directory(mocker: MockerFixture, tmp_path: Path) -> Magic @pytest.fixture(scope="module") -def yaml_fixtures(request: FixtureRequest, fixture_dir: str) -> Dict[str, Any]: +def yaml_fixtures(request: pytest.FixtureRequest, fixture_dir: Path) -> dict[str, Any]: """Load test fixture yaml files. Uses a list of file paths within the fixture directory loaded from the `YAML_FIXTURES` variable of the module. """ - file_paths: List[str] = getattr( + file_paths: list[str] = getattr( cast("Module", request.module), "YAML_FIXTURES", [] # type: ignore ) - result: Dict[str, Any] = {} + result: dict[str, Any] = {} for file_path in file_paths: - with open(os.path.join(fixture_dir, file_path), encoding="utf-8") as _file: - data = _file.read() - result[file_path] = yaml.safe_load(data) + result[file_path] = yaml.safe_load((fixture_dir / file_path).read_bytes()) return result -@pytest.fixture(scope="function") +@pytest.fixture() def deploy_environment(tmp_path: Path) -> DeployEnvironment: """Create a deploy environment that can be used for testing.""" return DeployEnvironment(explicit_name="test", root_dir=tmp_path) -@pytest.fixture(scope="function") -def cfngin_context(runway_context: MockRunwayContext) -> MockCFNginContext: +@pytest.fixture() +def cfngin_context(runway_context: MockRunwayContext) -> MockCfnginContext: """Create a mock CFNgin context object.""" - return MockCFNginContext(deploy_environment=runway_context.env, parameters={}) + return MockCfnginContext(deploy_environment=runway_context.env, parameters={}) -@pytest.fixture -def patch_time(monkeypatch: MonkeyPatch) -> None: - """Patch built-in time object.""" - monkeypatch.setattr("time.sleep", lambda s: None) # type: ignore +@pytest.fixture() +def mock_sleep(mocker: MockerFixture) -> Mock: + """Patch built-in ``time.sleep``.""" + return mocker.patch("time.sleep", return_value=None) -@pytest.fixture +@pytest.fixture() def platform_darwin(mocker: MockerFixture) -> None: """Patch platform.system to always return "Darwin".""" mocker.patch("platform.system", return_value="Darwin") -@pytest.fixture +@pytest.fixture() def platform_linux(mocker: MockerFixture) -> None: """Patch platform.system to always return "Linux".""" mocker.patch("platform.system", return_value="Linux") -@pytest.fixture +@pytest.fixture() def platform_windows(mocker: MockerFixture) -> None: """Patch platform.system to always return "Windows".""" mocker.patch("platform.system", return_value="Windows") -@pytest.fixture(scope="function") +@pytest.fixture() def patch_runway_config( - request: FixtureRequest, monkeypatch: MonkeyPatch, runway_config: MockRunwayConfig + request: pytest.FixtureRequest, monkeypatch: pytest.MonkeyPatch, runway_config: MockRunwayConfig ) -> MockRunwayConfig: """Patch Runway config and return a mock config object.""" patch_path = getattr(cast("Module", request.module), "PATCH_RUNWAY_CONFIG", None) @@ -177,25 +174,19 @@ def patch_runway_config( return runway_config -@pytest.fixture(scope="function") +@pytest.fixture() def runway_config() -> MockRunwayConfig: """Create a mock runway config object.""" return MockRunwayConfig() -@pytest.fixture(scope="function") -def runway_context(request: FixtureRequest, tmp_path: Path) -> MockRunwayContext: +@pytest.fixture() +def runway_context(request: pytest.FixtureRequest, tmp_path: Path) -> MockRunwayContext: """Create a mock Runway context object.""" env_vars = { - "AWS_REGION": getattr( - cast("Module", request.module), "AWS_REGION", "us-east-1" - ), - "DEFAULT_AWS_REGION": getattr( - cast("Module", request.module), "AWS_REGION", "us-east-1" - ), - "DEPLOY_ENVIRONMENT": getattr( - cast("Module", request.module), "DEPLOY_ENVIRONMENT", "test" - ), + "AWS_REGION": getattr(cast("Module", request.module), "AWS_REGION", "us-east-1"), + "DEFAULT_AWS_REGION": getattr(cast("Module", request.module), "AWS_REGION", "us-east-1"), + "DEPLOY_ENVIRONMENT": getattr(cast("Module", request.module), "DEPLOY_ENVIRONMENT", "test"), } creds = { "AWS_ACCESS_KEY_ID": "test_access_key", diff --git a/tests/unit/context/test_base.py b/tests/unit/context/test_base.py index 263e36436..a0912102c 100644 --- a/tests/unit/context/test_base.py +++ b/tests/unit/context/test_base.py @@ -1,14 +1,12 @@ """Test runway.context._base.""" -# pylint: disable=redefined-outer-name -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING, cast +from unittest.mock import MagicMock import boto3 import pytest -from mock import MagicMock from runway.context._base import BaseContext from runway.context.sys_info import SystemInfo @@ -31,7 +29,7 @@ } -@pytest.fixture(scope="function") +@pytest.fixture() def mock_boto3_session(mocker: MockerFixture) -> MagicMock: """Mock boto3.Session.""" mock_session = MagicMock(autospec=boto3.Session) @@ -39,7 +37,7 @@ def mock_boto3_session(mocker: MockerFixture) -> MagicMock: return mock_session -@pytest.fixture(scope="function") +@pytest.fixture() def mock_sso_botocore_session(mocker: MockerFixture) -> MagicMock: """Mock runway.aws_sso_botocore.session.Session.""" return mocker.patch(f"{MODULE}.Session") @@ -62,10 +60,7 @@ class TestBaseContext: def test_boto3_credentials(self, mocker: MockerFixture) -> None: """Test boto3_credentials.""" mocker.patch.object(self.env, "vars", TEST_ENV_CREDS) - assert ( - BaseContext(deploy_environment=self.env).boto3_credentials - == TEST_BOTO3_CREDS - ) + assert BaseContext(deploy_environment=self.env).boto3_credentials == TEST_BOTO3_CREDS def test_boto3_credentials_empty(self, mocker: MockerFixture) -> None: """Test boto3_credentials empty.""" @@ -75,9 +70,7 @@ def test_boto3_credentials_empty(self, mocker: MockerFixture) -> None: def test_current_aws_creds(self, mocker: MockerFixture) -> None: """Test current_aws_creds.""" mocker.patch.object(self.env, "vars", TEST_ENV_CREDS) - assert ( - BaseContext(deploy_environment=self.env).current_aws_creds == TEST_ENV_CREDS - ) + assert BaseContext(deploy_environment=self.env).current_aws_creds == TEST_ENV_CREDS def test_current_aws_creds_empty(self, mocker: MockerFixture) -> None: """Test current_aws_creds empty.""" diff --git a/tests/unit/context/test_cfngin.py b/tests/unit/context/test_cfngin.py index 93f669927..d23a23cfc 100644 --- a/tests/unit/context/test_cfngin.py +++ b/tests/unit/context/test_cfngin.py @@ -1,17 +1,16 @@ """Test runway.context._cfngin.""" -# pyright: basic from __future__ import annotations import io import json from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Union, cast +from typing import TYPE_CHECKING, Any, cast +from unittest.mock import MagicMock import pytest from botocore.response import StreamingBody from botocore.stub import Stubber -from mock import MagicMock from runway.cfngin.exceptions import ( PersistentGraphCannotLock, @@ -41,12 +40,12 @@ } -def gen_tagset(tags: Dict[str, str]) -> TagSetTypeDef: +def gen_tagset(tags: dict[str, str]) -> TagSetTypeDef: """Create TagSet value from a dict.""" return [{"Key": key, "Value": value} for key, value in tags.items()] -def gen_s3_object_content(content: Union[Dict[str, Any], str]) -> StreamingBody: +def gen_s3_object_content(content: dict[str, Any] | str) -> StreamingBody: """Convert a string or dict to S3 object body. Args: @@ -68,12 +67,12 @@ def gen_s3_object_content(content: Union[Dict[str, Any], str]) -> StreamingBody: ("-", None, "test"), ], ) -def test_get_fqn(delim: str, expected: str, name: Optional[str]) -> None: +def test_get_fqn(delim: str, expected: str, name: str | None) -> None: """Test runway.context._cfngin.get_fqn.""" assert get_fqn("test", delim, name) == expected -class TestCFNginContext: # pylint: disable=too-many-public-methods +class TestCFNginContext: """Test runway.context._cfngin.CFNginContext.""" config = CfnginConfig.parse_obj( @@ -97,7 +96,7 @@ class TestCFNginContext: # pylint: disable=too-many-public-methods {"name": "stack2", "template_path": ".", "requires": ["stack1"]}, ], } - persist_graph_raw: Dict[str, Set[str]] = {"stack1": set(), "stack2": {"stack1"}} + persist_graph_raw: dict[str, set[str]] = {"stack1": set(), "stack2": {"stack1"}} persist_graph_config = CfnginConfig.parse_obj(persist_graph_raw_config) @pytest.mark.parametrize( @@ -119,9 +118,7 @@ def test_bucket_name_config(self, mocker: MockerFixture) -> None: mocker.patch.object(CfnginContext, "upload_to_s3", True) assert ( CfnginContext( - config=CfnginConfig.parse_obj( - {"namespace": "test", "cfngin_bucket": "test-bucket"} - ) + config=CfnginConfig.parse_obj({"namespace": "test", "cfngin_bucket": "test-bucket"}) ).bucket_name == "test-bucket" ) @@ -149,9 +146,7 @@ def test_get_fqn(self, mocker: MockerFixture) -> None: mock_get_fqn = mocker.patch(f"{MODULE}.get_fqn", return_value="success") obj = CfnginContext(config=self.config) assert obj.get_fqn("name") == "success" - mock_get_fqn.assert_called_once_with( - obj.base_fqn, self.config.namespace_delimiter, "name" - ) + mock_get_fqn.assert_called_once_with(obj.base_fqn, self.config.namespace_delimiter, "name") def test_get_stack(self) -> None: """Test get_stack.""" @@ -184,7 +179,8 @@ def test_init(self, tmp_path: Path) -> None: assert obj.config_path == tmp_path assert obj.env == self.env assert obj.force_stacks == ["stack-01"] - assert not obj.hook_data and isinstance(obj.hook_data, dict) + assert not obj.hook_data + assert isinstance(obj.hook_data, dict) assert obj.logger assert obj.parameters == {"key": "val"} assert obj.stack_names == ["stack-02"] @@ -198,10 +194,13 @@ def test_init_defaults(self) -> None: assert obj.config_path == Path.cwd() assert isinstance(obj.env, DeployEnvironment) assert obj.force_stacks == [] - assert not obj.hook_data and isinstance(obj.hook_data, dict) + assert not obj.hook_data + assert isinstance(obj.hook_data, dict) assert obj.logger - assert not obj.parameters and isinstance(obj.parameters, dict) - assert not obj.stack_names and isinstance(obj.stack_names, list) + assert not obj.parameters + assert isinstance(obj.parameters, dict) + assert not obj.stack_names + assert isinstance(obj.stack_names, list) def test_lock_persistent_graph_locked(self, mocker: MockerFixture) -> None: """Test lock_persistent_graph no graph.""" @@ -245,10 +244,7 @@ def test_lock_persistent_graph(self, mocker: MockerFixture) -> None: "put_object_tagging", {}, { - "Tagging": { - # pylint: disable=protected-access - "TagSet": [{"Key": obj._persistent_graph_lock_tag, "Value": "123"}] - }, + "Tagging": {"TagSet": [{"Key": obj._persistent_graph_lock_tag, "Value": "123"}]}, **obj.persistent_graph_location, }, ) @@ -269,13 +265,8 @@ def test_namespace(self) -> None: def test_namespace_delimiter(self) -> None: """Test namespace_delimiter.""" - config = CfnginConfig.parse_obj( - {"namespace": "test", "namespace_delimiter": "."} - ) - assert ( - CfnginContext(config=config).namespace_delimiter - == config.namespace_delimiter - ) + config = CfnginConfig.parse_obj({"namespace": "test", "namespace_delimiter": "."}) + assert CfnginContext(config=config).namespace_delimiter == config.namespace_delimiter def test_persistent_graph_no_location(self, mocker: MockerFixture) -> None: """Test persistent_graph no persistent_graph_location.""" @@ -313,7 +304,7 @@ def test_persistent_graph_no_such_key(self, mocker: MockerFixture) -> None: "put_object", {}, { - "Body": "{}".encode(), + "Body": b"{}", "ServerSideEncryption": "AES256", "ACL": "bucket-owner-full-control", "ContentType": "application/json", @@ -372,7 +363,7 @@ def test_persistent_graph_location_add_json(self) -> None: {"cfngin_bucket": "", "persistent_graph_key": "something"}, ], ) - def test_persistent_graph_location_empty(self, config_ext: Dict[str, str]) -> None: + def test_persistent_graph_location_empty(self, config_ext: dict[str, str]) -> None: """Test persistent_graph_location.""" config = CfnginConfig.parse_obj({"namespace": "test", **config_ext}) assert not CfnginContext(config=config).persistent_graph_location @@ -449,9 +440,7 @@ def test_persistent_graph_tags(self, mocker: MockerFixture) -> None: obj = CfnginContext() stubber = Stubber(obj.s3_client) - stubber.add_response( - "get_object_tagging", {"TagSet": []}, obj.persistent_graph_location - ) + stubber.add_response("get_object_tagging", {"TagSet": []}, obj.persistent_graph_location) stubber.add_response( "get_object_tagging", {"TagSet": [{"Key": "key", "Value": "val"}]}, @@ -476,9 +465,7 @@ def test_put_persistent_graph_empty(self, mocker: MockerFixture) -> None: with stubber: assert not obj.put_persistent_graph("123") - def test_put_persistent_graph_lock_code_mismatch( - self, mocker: MockerFixture - ) -> None: + def test_put_persistent_graph_lock_code_mismatch(self, mocker: MockerFixture) -> None: """Test put_persistent_graph lock code mismatch.""" mocker.patch.object( CfnginContext, @@ -526,9 +513,7 @@ def test_put_persistent_graph(self, mocker: MockerFixture) -> None: "put_object", {}, { - "Body": json.dumps( - self.persist_graph_raw, default=json_serial, indent=4 - ).encode(), + "Body": json.dumps(self.persist_graph_raw, default=json_serial, indent=4).encode(), "ServerSideEncryption": "AES256", "ACL": "bucket-owner-full-control", "ContentType": "application/json", @@ -582,7 +567,7 @@ def test_set_hook_data_key_error(self) -> None: def test_set_hook_data_type_error(self, data: Any) -> None: """Test set_hook_data TypeError.""" with pytest.raises(TypeError): - CfnginContext().set_hook_data("test", data) # type: ignore + CfnginContext().set_hook_data("test", data) def test_set_hook_data(self) -> None: """Test set_hook_data.""" @@ -610,16 +595,12 @@ def test_stacks(self) -> None: def test_tags_empty(self) -> None: """Test tags empty.""" - obj = CfnginContext( - config=CfnginConfig.parse_obj({"namespace": "test", "tags": {}}) - ) + obj = CfnginContext(config=CfnginConfig.parse_obj({"namespace": "test", "tags": {}})) assert obj.tags == {} def test_tags_none(self) -> None: """Test tags None.""" - obj = CfnginContext( - config=CfnginConfig.parse_obj({"namespace": "test", "tags": None}) - ) + obj = CfnginContext(config=CfnginConfig.parse_obj({"namespace": "test", "tags": None})) assert obj.tags == {"cfngin_namespace": obj.config.namespace} def test_tags(self) -> None: @@ -631,10 +612,7 @@ def test_tags(self) -> None: def test_template_indent(self) -> None: """Test template_indent.""" - assert ( - CfnginContext(config=self.config).template_indent - == self.config.template_indent - ) + assert CfnginContext(config=self.config).template_indent == self.config.template_indent @pytest.mark.parametrize( "config, expected", @@ -646,16 +624,11 @@ def test_template_indent(self) -> None: ({"namespace": "test", "cfngin_bucket": "something"}, True), ], ) - def test_upload_to_s3(self, config: Dict[str, Any], expected: bool) -> None: + def test_upload_to_s3(self, config: dict[str, Any], expected: bool) -> None: """Test upload_to_s3.""" - assert ( - CfnginContext(config=CfnginConfig.parse_obj(config)).upload_to_s3 - is expected - ) + assert CfnginContext(config=CfnginConfig.parse_obj(config)).upload_to_s3 is expected - def test_unlock_persistent_graph_empty_no_such_key( - self, mocker: MockerFixture - ) -> None: + def test_unlock_persistent_graph_empty_no_such_key(self, mocker: MockerFixture) -> None: """Test unlock_persistent_graph empty graph NoSuchKey.""" mocker.patch.object( CfnginContext, @@ -669,9 +642,7 @@ def test_unlock_persistent_graph_empty_no_such_key( with stubber: assert obj.unlock_persistent_graph("123") - def test_unlock_persistent_graph_lock_code_mismatch( - self, mocker: MockerFixture - ) -> None: + def test_unlock_persistent_graph_lock_code_mismatch(self, mocker: MockerFixture) -> None: """Test unlock_persistent_graph lock code mismatch.""" mocker.patch.object( CfnginContext, @@ -718,7 +689,7 @@ def test_unlock_persistent_graph_no_such_key(self, mocker: MockerFixture) -> Non stubber = Stubber(obj.s3_client) stubber.add_response( "get_object", - {"Body": "{}".encode()}, + {"Body": b"{}"}, { "ResponseContentType": "application/json", **obj.persistent_graph_location, @@ -728,11 +699,9 @@ def test_unlock_persistent_graph_no_such_key(self, mocker: MockerFixture) -> Non with stubber: assert obj.unlock_persistent_graph("123") - @pytest.mark.parametrize( - "graph_dict", cast(List[Dict[str, List[str]]], [{"stack0": []}, {}]) - ) + @pytest.mark.parametrize("graph_dict", cast(list[dict[str, list[str]]], [{"stack0": []}, {}])) def test_unlock_persistent_graph( - self, graph_dict: Dict[str, List[str]], mocker: MockerFixture + self, graph_dict: dict[str, list[str]], mocker: MockerFixture ) -> None: """Test unlock_persistent_graph.""" mocker.patch.object( @@ -748,7 +717,7 @@ def test_unlock_persistent_graph( if not graph_dict: stubber.add_response( "get_object", - {"Body": "{}".encode()}, + {"Body": b"{}"}, { "ResponseContentType": "application/json", **obj.persistent_graph_location, diff --git a/tests/unit/context/test_runway.py b/tests/unit/context/test_runway.py index cbef68e81..76e2ba133 100644 --- a/tests/unit/context/test_runway.py +++ b/tests/unit/context/test_runway.py @@ -1,12 +1,11 @@ """Test runway.context._runway.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING, Any +from unittest.mock import MagicMock import pytest -from mock import MagicMock from runway.context._runway import RunwayContext from runway.context.sys_info import OsInfo diff --git a/tests/unit/context/test_sys_info.py b/tests/unit/context/test_sys_info.py index 0e831550c..e6d03143f 100644 --- a/tests/unit/context/test_sys_info.py +++ b/tests/unit/context/test_sys_info.py @@ -1,7 +1,5 @@ """Test runway.context.sys_info.""" -# pylint: disable=invalid-name,unused-argument -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING @@ -16,43 +14,43 @@ MODULE = "runway.context.sys_info" -@pytest.fixture(scope="function") -def clear_OsInfo() -> None: # noqa +@pytest.fixture() +def clear_os_info() -> None: """Clear OsInfo singleton.""" OsInfo.clear_singleton() -@pytest.fixture(scope="function") -def clear_SystemInfo() -> None: # noqa +@pytest.fixture() +def clear_system_info() -> None: """Clear OsInfo singleton.""" SystemInfo.clear_singleton() -@pytest.mark.usefixtures("clear_OsInfo") +@pytest.mark.usefixtures("clear_os_info") class TestOsInfo: """Test OsInfo.""" - def test_is_darwin_false(self, platform_linux: None) -> None: + def test_is_darwin_false(self, platform_linux: None) -> None: # noqa: ARG002 """Test is_darwin False.""" assert not OsInfo().is_darwin - def test_is_darwin(self, platform_darwin: None) -> None: + def test_is_darwin(self, platform_darwin: None) -> None: # noqa: ARG002 """Test is_darwin.""" assert OsInfo().is_darwin - def test_is_linux_false(self, platform_darwin: None) -> None: + def test_is_linux_false(self, platform_darwin: None) -> None: # noqa: ARG002 """Test is_linux False.""" assert not OsInfo().is_linux - def test_is_linux(self, platform_linux: None) -> None: + def test_is_linux(self, platform_linux: None) -> None: # noqa: ARG002 """Test is_linux.""" assert OsInfo().is_linux - def test_is_macos_false(self, platform_linux: None) -> None: + def test_is_macos_false(self, platform_linux: None) -> None: # noqa: ARG002 """Test is_macos False.""" assert not OsInfo().is_macos - def test_is_macos(self, platform_darwin: None) -> None: + def test_is_macos(self, platform_darwin: None) -> None: # noqa: ARG002 """Test is_macos.""" assert OsInfo().is_macos @@ -68,23 +66,23 @@ def test_is_posix(self, mocker: MockerFixture) -> None: mock_os.name = "posix" assert OsInfo().is_posix - def test_is_windows_false(self, platform_linux: None) -> None: + def test_is_windows_false(self, platform_linux: None) -> None: # noqa: ARG002 """Test is_windows False.""" assert not OsInfo().is_windows - def test_is_windows(self, platform_windows: None) -> None: + def test_is_windows(self, platform_windows: None) -> None: # noqa: ARG002 """Test is_windows.""" assert OsInfo().is_windows - def test_name_darwin(self, platform_darwin: None) -> None: + def test_name_darwin(self, platform_darwin: None) -> None: # noqa: ARG002 """Test name darwin.""" assert OsInfo().name == "darwin" - def test_name_linux(self, platform_linux: None) -> None: + def test_name_linux(self, platform_linux: None) -> None: # noqa: ARG002 """Test name linux.""" assert OsInfo().name == "linux" - def test_name_windows(self, platform_windows: None) -> None: + def test_name_windows(self, platform_windows: None) -> None: # noqa: ARG002 """Test name windows.""" assert OsInfo().name == "windows" @@ -93,7 +91,7 @@ def test_singleton(self) -> None: assert id(OsInfo()) == id(OsInfo()) -@pytest.mark.usefixtures("clear_SystemInfo") +@pytest.mark.usefixtures("clear_system_info") class TestSystemInfo: """Test SystemInfo.""" diff --git a/tests/unit/core/components/test_deploy_environment.py b/tests/unit/core/components/test__deploy_environment.py similarity index 90% rename from tests/unit/core/components/test_deploy_environment.py rename to tests/unit/core/components/test__deploy_environment.py index 35219b917..d57207bac 100644 --- a/tests/unit/core/components/test_deploy_environment.py +++ b/tests/unit/core/components/test__deploy_environment.py @@ -1,22 +1,20 @@ -"""Test runway.core.components.deploy_environment.""" +"""Test runway.core.components._deploy_environment.""" -# pylint: disable=protected-access -# pyright: basic +# ruff: noqa: SLF001 from __future__ import annotations import logging import os from pathlib import Path -from typing import TYPE_CHECKING, Dict, List +from typing import TYPE_CHECKING +from unittest.mock import MagicMock import pytest from git.exc import InvalidGitRepositoryError -from mock import MagicMock -from runway.core.components import DeployEnvironment +from runway.core.components._deploy_environment import DeployEnvironment if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture MODULE = "runway.core.components._deploy_environment" @@ -31,7 +29,7 @@ class TestDeployEnvironment: """Test runway.core.components.DeployEnvironment.""" - def test_init(self, cd_tmp_path: Path) -> None: + def test___init__(self, cd_tmp_path: Path) -> None: """Test attributes set by init.""" new_dir = cd_tmp_path / "new_dir" obj = DeployEnvironment( @@ -45,7 +43,7 @@ def test_init(self, cd_tmp_path: Path) -> None: assert obj.root_dir == new_dir assert obj.vars == {"key": "val"} - def test_init_defaults(self, cd_tmp_path: Path) -> None: + def test___init___defaults(self, cd_tmp_path: Path) -> None: """Test attributes set by init default values.""" obj = DeployEnvironment() @@ -54,6 +52,11 @@ def test_init_defaults(self, cd_tmp_path: Path) -> None: assert obj.root_dir == cd_tmp_path assert obj.vars == os.environ + def test___init___empty_environ(self) -> None: + """Test attributes set by init.""" + obj = DeployEnvironment(environ={}) + assert obj.vars == os.environ + def test_boto3_credentials(self) -> None: """Test boto3_credentials.""" obj = DeployEnvironment(environ=TEST_CREDENTIALS) @@ -99,9 +102,7 @@ def test_branch_name(self, mocker: MockerFixture) -> None: obj = DeployEnvironment() assert obj.branch_name == branch_name - mock_git.Repo.assert_called_once_with( - os.getcwd(), search_parent_directories=True - ) + mock_git.Repo.assert_called_once_with(str(Path.cwd()), search_parent_directories=True) def test_branch_name_invalid_repo(self, mocker: MockerFixture) -> None: """Test branch_name handle InvalidGitRepositoryError.""" @@ -110,12 +111,10 @@ def test_branch_name_invalid_repo(self, mocker: MockerFixture) -> None: obj = DeployEnvironment() assert obj.branch_name is None - mock_git.Repo.assert_called_once_with( - os.getcwd(), search_parent_directories=True - ) + mock_git.Repo.assert_called_once_with(str(Path.cwd()), search_parent_directories=True) def test_branch_name_no_git( - self, mocker: MockerFixture, caplog: LogCaptureFixture + self, mocker: MockerFixture, caplog: pytest.LogCaptureFixture ) -> None: """Test branch_name git ImportError.""" caplog.set_level(logging.DEBUG, logger="runway.core.components") @@ -129,7 +128,7 @@ def test_branch_name_no_git( ) in caplog.messages def test_branch_name_type_error( - self, mocker: MockerFixture, caplog: LogCaptureFixture + self, mocker: MockerFixture, caplog: pytest.LogCaptureFixture ) -> None: """Test branch_name handle TypeError.""" caplog.set_level(logging.WARNING, logger="runway") @@ -137,8 +136,7 @@ def test_branch_name_type_error( mock_git.Repo.side_effect = TypeError with pytest.raises(SystemExit) as excinfo: - obj = DeployEnvironment() - assert not obj.branch_name + assert not DeployEnvironment().branch_name assert excinfo.value.code == 1 assert "Unable to retrieve the current git branch name!" in caplog.messages @@ -255,7 +253,7 @@ def test_name(self) -> None: ], ) def test_name_from_branch( - self, branch: str, environ: Dict[str, str], expected: str, mocker: MockerFixture + self, branch: str, environ: dict[str, str], expected: str, mocker: MockerFixture ) -> None: """Test name from branch.""" mock_prompt = MagicMock(return_value="user_value") @@ -317,8 +315,7 @@ def test_copy(self, mocker: MockerFixture, tmp_path: Path) -> None: ( "explicit", [ - 'deploy environment "test" is explicitly defined ' - "in the environment", + 'deploy environment "test" is explicitly defined in the environment', "if not correct, update the value or unset it to " "fall back to the name of the current git branch " "or parent directory", @@ -327,8 +324,7 @@ def test_copy(self, mocker: MockerFixture, tmp_path: Path) -> None: ( "branch", [ - 'deploy environment "test" was determined from the ' - "current git branch", + 'deploy environment "test" was determined from the current git branch', "if not correct, update the branch name or set an " "override via the DEPLOY_ENVIRONMENT environment " "variable", @@ -337,8 +333,7 @@ def test_copy(self, mocker: MockerFixture, tmp_path: Path) -> None: ( "directory", [ - 'deploy environment "test" was determined from ' - "the current directory", + 'deploy environment "test" was determined from the current directory', "if not correct, update the directory name or " "set an override via the DEPLOY_ENVIRONMENT " "environment variable", @@ -349,8 +344,8 @@ def test_copy(self, mocker: MockerFixture, tmp_path: Path) -> None: def test_log_name( self, derived_from: str, - expected: List[str], - caplog: LogCaptureFixture, + expected: list[str], + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, ) -> None: """Test log_name.""" diff --git a/tests/unit/core/components/test_deployment.py b/tests/unit/core/components/test__deployment.py similarity index 88% rename from tests/unit/core/components/test_deployment.py rename to tests/unit/core/components/test__deployment.py index 5326d831f..17104d70b 100644 --- a/tests/unit/core/components/test_deployment.py +++ b/tests/unit/core/components/test__deployment.py @@ -1,26 +1,24 @@ -"""Test runway.core.components.deployment.""" +"""Test runway.core.components._deployment.""" -# pylint: disable=protected-access -# pyright: basic +# ruff: noqa: SLF001 from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, List, cast +from typing import TYPE_CHECKING, Any, cast +from unittest.mock import ANY, MagicMock, Mock, PropertyMock, call import pytest -from mock import ANY, MagicMock, Mock, PropertyMock, call from runway.config.components.runway import ( RunwayDeploymentDefinition, RunwayVariablesDefinition, ) from runway.config.models.runway import RunwayFutureDefinitionModel -from runway.core.components import Deployment +from runway.core.components._deployment import Deployment from runway.exceptions import UnresolvedVariable from runway.variables import Variable if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from runway.core.type_defs import RunwayActionTypeDef @@ -102,7 +100,7 @@ def test___init___args( def test_assume_role_config( self, config: str, - expected: Dict[str, Any], + expected: dict[str, Any], fx_deployments: YamlLoaderDeployment, runway_context: MockRunwayContext, ) -> None: @@ -117,9 +115,7 @@ def test_env_vars_config_raise_unresolved_variable( runway_context: MockRunwayContext, ) -> None: """Test env_vars_config raise UnresolvedVariable.""" - mocker.patch.object( - Deployment, "_Deployment__merge_env_vars", Mock(return_value=None) - ) + mocker.patch.object(Deployment, "_Deployment__merge_env_vars", Mock(return_value=None)) mocker.patch.object( RunwayDeploymentDefinition, "env_vars", @@ -133,13 +129,12 @@ def test_env_vars_config_raise_unresolved_variable( ) with pytest.raises(UnresolvedVariable): - obj = Deployment( + assert not Deployment( context=runway_context, definition=RunwayDeploymentDefinition.parse_obj( - cast(Dict[str, Any], fx_deployments.get("min_required")) + cast(dict[str, Any], fx_deployments.get("min_required")) ), - ) - assert not obj.env_vars_config + ).env_vars_config def test_env_vars_config_unresolved( self, @@ -149,9 +144,7 @@ def test_env_vars_config_unresolved( ) -> None: """Test env_vars_config unresolved.""" expected = {"key": "val"} - mocker.patch.object( - Deployment, "_Deployment__merge_env_vars", Mock(return_value=None) - ) + mocker.patch.object(Deployment, "_Deployment__merge_env_vars", Mock(return_value=None)) mocker.patch.object( RunwayDeploymentDefinition, "env_vars", @@ -168,9 +161,7 @@ def test_env_vars_config_unresolved( ) variable = Mock(value=expected) - raw_deployment: Dict[str, Any] = cast( - Dict[str, Any], fx_deployments.get("min_required") - ) + raw_deployment: dict[str, Any] = cast(dict[str, Any], fx_deployments.get("min_required")) deployment = RunwayDeploymentDefinition.parse_obj(raw_deployment) obj = Deployment(context=runway_context, definition=deployment) obj.definition._vars.update({"env_vars": variable}) @@ -191,7 +182,7 @@ def test_env_vars_config_unresolved( def test_regions( self, config: str, - expected: List[str], + expected: list[str], fx_deployments: YamlLoaderDeployment, runway_context: MockRunwayContext, ) -> None: @@ -230,15 +221,13 @@ def test_deploy( """Test deploy.""" mock_run = MagicMock() mocker.patch.object(Deployment, "run", mock_run) - obj = Deployment( - context=runway_context, definition=fx_deployments.load("min_required") - ) + obj = Deployment(context=runway_context, definition=fx_deployments.load("min_required")) assert not obj.deploy() mock_run.assert_called_once_with("deploy", "us-east-1") def test_deploy_async( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -273,7 +262,7 @@ def test_deploy_async( def test_deploy_sync( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -289,12 +278,8 @@ def test_deploy_sync( definition=fx_deployments.load("simple_parallel_regions"), ) assert not obj.deploy() - assert ( - "unnamed_deployment:processing regions sequentially..." in caplog.messages - ) - mock_run.assert_has_calls( - [call("deploy", "us-east-1"), call("deploy", "us-west-2")] - ) + assert "unnamed_deployment:processing regions sequentially..." in caplog.messages + mock_run.assert_has_calls([call("deploy", "us-east-1"), call("deploy", "us-west-2")]) @pytest.mark.parametrize("async_used", [(True), (False)]) def test_destroy( @@ -327,7 +312,7 @@ def test_destroy( def test_init( self, async_used: bool, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -356,7 +341,7 @@ def test_init( def test_plan( self, async_used: bool, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -421,11 +406,9 @@ def test_run_async( ) -> None: """Test run async.""" mocker.patch(f"{MODULE}.aws") - # ensure that mock.MagicMock is used for backported features mock_module = mocker.patch(f"{MODULE}.Module", MagicMock()) definition = fx_deployments.load("simple_parallel_regions") runway_context._use_concurrent = True - # ensure that mock.MagicMock is used for backported features mock_resolve = mocker.patch.object(definition, "resolve", MagicMock()) mocker.patch.object(Deployment, "validate_account_credentials") obj = Deployment(context=runway_context, definition=definition) @@ -434,16 +417,15 @@ def test_run_async( new_ctx = mock_resolve.call_args.args[0] assert new_ctx != runway_context - assert new_ctx.command == "destroy" and runway_context.command != "destroy" - assert ( - new_ctx.env.aws_region == "us-west-2" - and runway_context.env.aws_region != "us-west-2" - ) + assert new_ctx.command == "destroy" + assert runway_context.command != "destroy" + assert new_ctx.env.aws_region == "us-west-2" + assert runway_context.env.aws_region != "us-west-2" assert mock_module.run_list.call_args.kwargs["context"] == new_ctx def test_validate_account_credentials( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, fx_deployments: YamlLoaderDeployment, runway_context: MockRunwayContext, @@ -451,9 +433,7 @@ def test_validate_account_credentials( """Test validate_account_credentials.""" caplog.set_level(logging.INFO, logger="runway") mock_aws = mocker.patch(f"{MODULE}.aws") - obj = Deployment( - context=runway_context, definition=fx_deployments.load("validate_account") - ) + obj = Deployment(context=runway_context, definition=fx_deployments.load("validate_account")) account = MagicMock() account.aliases = ["no-match"] @@ -462,9 +442,7 @@ def test_validate_account_credentials( with pytest.raises(SystemExit) as excinfo: assert obj.validate_account_credentials() assert excinfo.value.code == 1 - assert 'does not match required account "123456789012"' in "\n".join( - caplog.messages - ) + assert 'does not match required account "123456789012"' in "\n".join(caplog.messages) caplog.clear() del excinfo @@ -510,10 +488,6 @@ def test_run_list( future=None, # type: ignore variables=mock_vars, ) - dep0.resolve.assert_called_once_with( - runway_context, variables=mock_vars, pre_process=True - ) - dep1.resolve.assert_called_once_with( - runway_context, variables=mock_vars, pre_process=True - ) + dep0.resolve.assert_called_once_with(runway_context, variables=mock_vars, pre_process=True) + dep1.resolve.assert_called_once_with(runway_context, variables=mock_vars, pre_process=True) mock_action.assert_called_once_with() diff --git a/tests/unit/core/components/test_module.py b/tests/unit/core/components/test__module.py similarity index 95% rename from tests/unit/core/components/test_module.py rename to tests/unit/core/components/test__module.py index 3f6fe9cff..b8194714a 100644 --- a/tests/unit/core/components/test_module.py +++ b/tests/unit/core/components/test__module.py @@ -1,15 +1,13 @@ """Test runway.core.components._module.""" -# pylint: disable=protected-access,redefined-outer-name,unused-argument -# pyright: basic from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, List, Optional, cast +from typing import TYPE_CHECKING, Any, cast +from unittest.mock import MagicMock, call import pytest import yaml -from mock import MagicMock, call from runway.core.components import Deployment, Module from runway.core.components._module import validate_environment @@ -17,7 +15,6 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from ...factories import MockRunwayContext, YamlLoaderDeployment @@ -25,7 +22,7 @@ MODULE = "runway.core.components._module" -@pytest.fixture(scope="function") +@pytest.fixture() def empty_opts_from_file(mocker: MockerFixture) -> None: """Empty Module.opts_from_file.""" mocker.patch.object(Module, "opts_from_file", {}) @@ -78,7 +75,7 @@ def test_child_modules( def test_environment_matches_defined( self, - cd_tmp_path: Path, + cd_tmp_path: Path, # noqa: ARG002 fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -100,7 +97,7 @@ def test_environment_matches_defined( def test_environments_deployment( self, cd_tmp_path: Path, - empty_opts_from_file: None, + empty_opts_from_file: None, # noqa: ARG002 fx_deployments: YamlLoaderDeployment, runway_context: MockRunwayContext, ) -> None: @@ -125,9 +122,7 @@ def test_environments_opts_from_file( ) -> None: """Test environments with opts_from_file.""" runway_context.env.root_dir = cd_tmp_path - mocker.patch.object( - Module, "opts_from_file", {"environments": {"test": ["us-east-1"]}} - ) + mocker.patch.object(Module, "opts_from_file", {"environments": {"test": ["us-east-1"]}}) deployment = fx_deployments.load("environments_map_str") mod = Module( context=runway_context, @@ -182,7 +177,7 @@ def test_path( def test_payload_with_deployment( self, cd_tmp_path: Path, - empty_opts_from_file: None, + empty_opts_from_file: None, # noqa: ARG002 fx_deployments: YamlLoaderDeployment, runway_context: MockRunwayContext, ) -> None: @@ -238,7 +233,7 @@ def test_should_skip( fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, - validate: Optional[bool], + validate: bool | None, ) -> None: """Test should_skip.""" mocker.patch.object(Module, "environment_matches_defined", validate) @@ -258,9 +253,7 @@ def test_type( runway_context: MockRunwayContext, ) -> None: """Test type.""" - mock_path = mocker.patch( - f"{MODULE}.ModulePath", module_root=runway_context.env.root_dir - ) + mock_path = mocker.patch(f"{MODULE}.ModulePath", module_root=runway_context.env.root_dir) mock_type = mocker.patch(f"{MODULE}.RunwayModuleType") mock_type.return_value = mock_type mocker.patch.object(Module, "path", mock_path) @@ -313,9 +306,7 @@ def test_use_async( use_concurrent: bool, ) -> None: """Test use_async.""" - obj = Module( - context=runway_context, definition=fx_deployments.load(config).modules[0] - ) + obj = Module(context=runway_context, definition=fx_deployments.load(config).modules[0]) obj.ctx._use_concurrent = use_concurrent # type: ignore assert obj.use_async == expected @@ -336,7 +327,7 @@ def test_deploy( def test_deploy_async( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -374,7 +365,7 @@ def test_deploy_async( def test_deploy_sync( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -390,7 +381,7 @@ def test_deploy_sync( ) assert not mod.deploy() assert "parallel_parent:processing modules sequentially..." in caplog.messages - mock_run.assert_has_calls([call("deploy"), call("deploy")]) # type: ignore + mock_run.assert_has_calls([call("deploy"), call("deploy")]) @pytest.mark.parametrize("async_used", [(True), (False)]) def test_destroy( @@ -484,7 +475,7 @@ def test_init_no_children( def test_plan( self, async_used: bool, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -512,7 +503,7 @@ def test_plan( def test_plan_no_children( self, async_used: bool, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -534,7 +525,7 @@ def test_plan_no_children( def test_run( self, - empty_opts_from_file: None, + empty_opts_from_file: None, # noqa: ARG002 fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -560,9 +551,7 @@ def test_run( mocker.patch.object(Module, "should_skip", False) assert not mod.run("deploy") mock_change_dir.assert_called_once_with(tmp_path) - mock_type.module_class.assert_called_once_with( - mod.ctx, module_root=tmp_path, **mod.payload - ) + mock_type.module_class.assert_called_once_with(mod.ctx, module_root=tmp_path, **mod.payload) mock_inst["deploy"].assert_called_once_with() del mock_inst.deploy @@ -642,10 +631,10 @@ def test_run_list( ], ) def test_validate_environment( - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, env_def: Any, - expected_logs: List[str], - expected: Optional[bool], + expected_logs: list[str], + expected: bool | None, mocker: MockerFixture, runway_context: MockRunwayContext, ) -> None: @@ -653,7 +642,7 @@ def test_validate_environment( caplog.set_level(logging.DEBUG, logger="runway") mocker.patch( f"{MODULE}.aws", - **{"AccountDetails.return_value": MagicMock(id="123456789012")}, + **{"AccountDetails.return_value": MagicMock(id="123456789012")}, # type: ignore ) assert validate_environment(runway_context, env_def) is expected # all() does not give an output that can be used for troubleshooting failures diff --git a/tests/unit/core/components/test_module_path.py b/tests/unit/core/components/test__module_path.py similarity index 89% rename from tests/unit/core/components/test_module_path.py rename to tests/unit/core/components/test__module_path.py index a005666a2..c88b71ab2 100644 --- a/tests/unit/core/components/test_module_path.py +++ b/tests/unit/core/components/test__module_path.py @@ -1,14 +1,13 @@ """Test runway.core.components._module_path.""" -# pyright: basic from __future__ import annotations from copy import deepcopy from pathlib import Path -from typing import TYPE_CHECKING, Dict, List, Optional, Union +from typing import TYPE_CHECKING +from unittest.mock import MagicMock import pytest -from mock import MagicMock from typing_extensions import TypedDict from runway.config.components.runway import RunwayModuleDefinition @@ -23,20 +22,19 @@ MODULE = "runway.core.components._module_path" -TypeDefTestDefinitionExpected = TypedDict( - "TypeDefTestDefinitionExpected", - arguments=Dict[str, str], - location=str, - source=str, - uri=str, -) -TypeDefTestDefinition = TypedDict( - "TypeDefTestDefinition", - definition=Optional[Union[Path, str]], - expected=TypeDefTestDefinitionExpected, -) +class TypeDefTestDefinitionExpected(TypedDict): # noqa: D101 + arguments: dict[str, str] + location: str + source: str + uri: str -TESTS: List[TypeDefTestDefinition] = [ + +class TypeDefTestDefinition(TypedDict): # noqa: D101 + definition: Path | str | None + expected: TypeDefTestDefinitionExpected + + +TESTS: list[TypeDefTestDefinition] = [ { "definition": "git::git://github.com/onicagroup/foo/foo-bar.git", "expected": { @@ -92,7 +90,7 @@ }, }, { - "definition": "git::git://github.com/onicagroup/foo/bar.git//src/foo/bar?branch=foo&bar=baz", # noqa + "definition": "git::git://github.com/onicagroup/foo/bar.git//src/foo/bar?branch=foo&bar=baz", "expected": { "location": "src/foo/bar", "arguments": {"branch": "foo", "bar": "baz"}, @@ -245,10 +243,7 @@ def test_module_root( if isinstance(test["definition"], (type(None), Path)): assert obj.module_root == test["definition"] or Path.cwd() elif test["expected"]["source"] == "local": - assert ( - obj.module_root - == deploy_environment.root_dir / test["expected"]["location"] - ) + assert obj.module_root == deploy_environment.root_dir / test["expected"]["location"] else: assert ( obj.module_root @@ -297,19 +292,13 @@ def test_uri( == test["expected"]["uri"] ) - def test_parse_obj_none( - self, deploy_environment: DeployEnvironment, tmp_path: Path - ) -> None: + def test_parse_obj_none(self, deploy_environment: DeployEnvironment, tmp_path: Path) -> None: """Test parse_obj None.""" - obj = ModulePath.parse_obj( - None, cache_dir=tmp_path, deploy_environment=deploy_environment - ) + obj = ModulePath.parse_obj(None, cache_dir=tmp_path, deploy_environment=deploy_environment) assert obj.definition == Path.cwd() assert obj.env == deploy_environment - def test_parse_obj_path( - self, deploy_environment: DeployEnvironment, tmp_path: Path - ) -> None: + def test_parse_obj_path(self, deploy_environment: DeployEnvironment, tmp_path: Path) -> None: """Test parse_obj Path.""" obj = ModulePath.parse_obj( tmp_path, cache_dir=tmp_path, deploy_environment=deploy_environment @@ -334,9 +323,7 @@ def test_parse_obj_runway_config( assert obj1.definition == model.path assert obj1.env == deploy_environment - def test_parse_obj_str( - self, deploy_environment: DeployEnvironment, tmp_path: Path - ) -> None: + def test_parse_obj_str(self, deploy_environment: DeployEnvironment, tmp_path: Path) -> None: """Test parse_obj str.""" obj = ModulePath.parse_obj( "./test", cache_dir=tmp_path, deploy_environment=deploy_environment diff --git a/tests/unit/core/components/test_module_type.py b/tests/unit/core/components/test__module_type.py similarity index 91% rename from tests/unit/core/components/test_module_type.py rename to tests/unit/core/components/test__module_type.py index bc6432678..10b4c84bb 100644 --- a/tests/unit/core/components/test_module_type.py +++ b/tests/unit/core/components/test__module_type.py @@ -1,10 +1,9 @@ """Test runway.core.components._module_type.""" -# pyright: basic from __future__ import annotations import logging -from typing import TYPE_CHECKING, List, Type +from typing import TYPE_CHECKING import pytest @@ -19,8 +18,6 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture - from runway.config.models.runway import RunwayModuleTypeTypeDef from runway.module.base import RunwayModule @@ -43,7 +40,7 @@ class TestRunwayModuleType: ], ) def test_autodetection( - self, files: List[str], expected: Type[RunwayModule], cd_tmp_path: Path + self, files: list[str], expected: type[RunwayModule], cd_tmp_path: Path ) -> None: """Test from autodetection.""" for file_path in files: @@ -57,9 +54,7 @@ def test_autodetection( assert not result.type_str assert result.module_class.__name__ == expected.__name__ - def test_autodetection_fail( - self, caplog: LogCaptureFixture, cd_tmp_path: Path - ) -> None: + def test_autodetection_fail(self, caplog: pytest.LogCaptureFixture, cd_tmp_path: Path) -> None: """Test autodetection fail.""" caplog.set_level(logging.ERROR, logger="runway") with pytest.raises(SystemExit) as excinfo: @@ -90,7 +85,7 @@ def test_from_class_path(self, cd_tmp_path: Path) -> None: ], ) def test_from_extension( - self, ext: str, expected: Type[RunwayModule], cd_tmp_path: Path + self, ext: str, expected: type[RunwayModule], cd_tmp_path: Path ) -> None: """Test from path extension.""" filename = "filename." + ext @@ -111,7 +106,7 @@ def test_from_extension( def test_from_type_str( self, type_str: RunwayModuleTypeTypeDef, - expected: Type[RunwayModule], + expected: type[RunwayModule], cd_tmp_path: Path, ) -> None: """Test from type_str.""" diff --git a/tests/unit/core/providers/aws/s3/_helpers/conftest.py b/tests/unit/core/providers/aws/s3/_helpers/conftest.py index fa104748e..d9daae69e 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/conftest.py +++ b/tests/unit/core/providers/aws/s3/_helpers/conftest.py @@ -2,18 +2,23 @@ from __future__ import annotations -from pathlib import Path -from typing import List +from typing import TYPE_CHECKING import pytest from typing_extensions import TypedDict -LocalFiles = TypedDict( - "LocalFiles", files=List[Path], local_dir=Path, local_file=Path, tmp_path=Path -) +if TYPE_CHECKING: + from pathlib import Path -@pytest.fixture(scope="function") +class LocalFiles(TypedDict): + files: list[Path] + local_dir: Path + local_file: Path + tmp_path: Path + + +@pytest.fixture() def loc_files(tmp_path: Path) -> LocalFiles: """Fixture for creating local files.""" file0 = tmp_path / "some_directory" / "text0.txt" diff --git a/tests/unit/core/providers/aws/s3/_helpers/factories.py b/tests/unit/core/providers/aws/s3/_helpers/factories.py index 50ceb715b..39563309b 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/factories.py +++ b/tests/unit/core/providers/aws/s3/_helpers/factories.py @@ -2,13 +2,13 @@ from __future__ import annotations -from typing import Any, Dict, Optional +from typing import Any class FakeTransferFutureCallArgs: """Fake TransferFutureCallArgs.""" - def __init__(self, *, extra_args: Optional[Dict[str, Any]] = None, **kwargs: Any): + def __init__(self, *, extra_args: dict[str, Any] | None = None, **kwargs: Any) -> None: """Instantiate class.""" self.extra_args = extra_args or {} for kwarg, val in kwargs.items(): @@ -20,10 +20,10 @@ class FakeTransferFutureMeta: def __init__( self, - size: Optional[int] = None, - call_args: Optional[FakeTransferFutureCallArgs] = None, - transfer_id: Optional[str] = None, - ): + size: int | None = None, + call_args: FakeTransferFutureCallArgs | None = None, + transfer_id: str | None = None, + ) -> None: """Instantiate class.""" self.size = size self.call_args = call_args or FakeTransferFutureCallArgs() @@ -35,16 +35,16 @@ class FakeTransferFuture: def __init__( self, - result: Optional[str] = None, - exception: Exception = None, + result: str | None = None, + exception: Exception | None = None, meta: FakeTransferFutureMeta = None, - ): + ) -> None: """Instantiate class.""" self._result = result self._exception = exception self.meta = meta or FakeTransferFutureMeta() - def result(self) -> Optional[str]: + def result(self) -> str | None: """Return result.""" if self._exception: raise self._exception diff --git a/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_base.py b/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_base.py index 446e27fbd..b889e5829 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_base.py +++ b/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_base.py @@ -3,10 +3,10 @@ from __future__ import annotations import datetime -from typing import TYPE_CHECKING, List, Optional, cast +from typing import TYPE_CHECKING, Any, cast +from unittest.mock import Mock import pytest -from mock import Mock from runway.core.providers.aws.s3._helpers.file_generator import FileStats from runway.core.providers.aws.s3._helpers.sync_strategy.base import ( @@ -37,23 +37,20 @@ def test_compare_size(self, dest_size: int, expected: bool, src_size: int) -> No dest_file = FileStats(src="", size=dest_size) assert BaseSync.compare_size(src_file, dest_file) is expected - @pytest.mark.parametrize( - "src, dest", [(None, None), (Mock(), None), (None, Mock())] - ) + @pytest.mark.parametrize("src, dest", [(None, None), (Mock(), None), (None, Mock())]) def test_compare_size_raise_value_error( - self, dest: Optional[FileStats], src: Optional[FileStats] + self, dest: FileStats | None, src: FileStats | None ) -> None: """Test compare_time.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="src_file and dest_file must not be None"): BaseSync().compare_size(src, dest) - assert str(excinfo.value) == "src_file and dest_file must not be None" def test_compare_time(self) -> None: """Test compare_time.""" - obj = BaseSync() + obj: BaseSync[Any] = BaseSync() now = datetime.datetime.now() future = now + datetime.timedelta(0, 15) - kwargs = {"src": "", "operation_name": "invalid"} + kwargs: dict[str, Any] = {"src": "", "operation_name": "invalid"} assert ( obj.compare_time( FileStats(last_update=now, **kwargs), @@ -79,10 +76,10 @@ def test_compare_time(self) -> None: @pytest.mark.parametrize("operation_name", ["copy", "upload"]) def test_compare_time_copy_or_upload(self, operation_name: str) -> None: """Test compare_time.""" - obj = BaseSync() + obj: BaseSync[Any] = BaseSync() now = datetime.datetime.now() future = now + datetime.timedelta(0, 15) - kwargs = {"src": "", "operation_name": operation_name} + kwargs: dict[str, Any] = {"src": "", "operation_name": operation_name} assert ( obj.compare_time( FileStats(last_update=now, **kwargs), @@ -107,10 +104,10 @@ def test_compare_time_copy_or_upload(self, operation_name: str) -> None: def test_compare_time_download(self) -> None: """Test compare_time.""" - obj = BaseSync() + obj: BaseSync[Any] = BaseSync() now = datetime.datetime.now() future = now + datetime.timedelta(0, 15) - kwargs = {"src": "", "operation_name": "download"} + kwargs: dict[str, Any] = {"src": "", "operation_name": "download"} assert ( obj.compare_time( FileStats(last_update=now, **kwargs), @@ -133,16 +130,13 @@ def test_compare_time_download(self) -> None: is True ) - @pytest.mark.parametrize( - "src, dest", [(None, None), (Mock(), None), (None, Mock())] - ) + @pytest.mark.parametrize("src, dest", [(None, None), (Mock(), None), (None, Mock())]) def test_compare_time_raise_value_error( - self, dest: Optional[FileStats], src: Optional[FileStats] + self, dest: FileStats | None, src: FileStats | None ) -> None: """Test compare_time.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="src_file and dest_file must not be None"): BaseSync().compare_time(src, dest) - assert str(excinfo.value) == "src_file and dest_file must not be None" def test_determine_should_sync(self) -> None: """Test determine_should_sync.""" @@ -151,16 +145,16 @@ def test_determine_should_sync(self) -> None: def test_init(self) -> None: """Test __init__.""" - valid_sync_types: List[ValidSyncType] = [ + valid_sync_types: list[ValidSyncType] = [ "file_at_src_and_dest", "file_not_at_dest", "file_not_at_src", ] for sync_type in valid_sync_types: - strategy = BaseSync(sync_type) + strategy: BaseSync[Any] = BaseSync(sync_type) assert strategy.sync_type == sync_type - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown sync_type"): BaseSync("invalid_sync_type") # type: ignore def test_name(self) -> None: @@ -170,23 +164,17 @@ def test_name(self) -> None: def test_register_strategy(self) -> None: """Test register_strategy.""" session = Mock() - obj = BaseSync() + obj: BaseSync[Any] = BaseSync() obj.register_strategy(session) register_args = cast(Mock, session.register).call_args_list assert register_args[0][0][0] == "choosing-s3-sync-strategy" - # pylint: disable=comparison-with-callable assert register_args[0][0][1] == obj.use_sync_strategy def test_use_sync_strategy(self, mocker: MockerFixture) -> None: """Test use_sync_strategy.""" - assert ( - BaseSync().use_sync_strategy( - {"invalid_sync_strategy": True} # type: ignore - ) - is None - ) + assert BaseSync().use_sync_strategy({"invalid_sync_strategy": True}) is None # type: ignore mocker.patch.object(BaseSync, "name", "something") - obj = BaseSync() + obj: BaseSync[Any] = BaseSync() assert obj.use_sync_strategy({"something": True}) == obj # type: ignore @@ -213,9 +201,7 @@ def test_determine_should_sync( MissingFileSync, "compare_time", return_value=is_time ) assert ( - MissingFileSync().determine_should_sync( - FileStats(src=""), FileStats(src="") - ) + MissingFileSync().determine_should_sync(FileStats(src=""), FileStats(src="")) is expected ) mock_compare_size.assert_not_called() @@ -246,16 +232,9 @@ def test_determine_should_sync( self, expected: bool, is_size: bool, is_time: bool, mocker: MockerFixture ) -> None: """Test determine_should_sync.""" - mock_compare_size = mocker.patch.object( - NeverSync, "compare_size", return_value=is_size - ) - mock_compare_time = mocker.patch.object( - NeverSync, "compare_time", return_value=is_time - ) - assert ( - NeverSync().determine_should_sync(FileStats(src=""), FileStats(src="")) - is expected - ) + mock_compare_size = mocker.patch.object(NeverSync, "compare_size", return_value=is_size) + mock_compare_time = mocker.patch.object(NeverSync, "compare_time", return_value=is_time) + assert NeverSync().determine_should_sync(FileStats(src=""), FileStats(src="")) is expected mock_compare_size.assert_not_called() mock_compare_time.assert_not_called() @@ -292,10 +271,7 @@ def test_determine_should_sync( mock_compare_time = mocker.patch.object( SizeAndLastModifiedSync, "compare_time", return_value=is_time ) - assert ( - SizeAndLastModifiedSync().determine_should_sync(src_file, dest_file) - is expected - ) + assert SizeAndLastModifiedSync().determine_should_sync(src_file, dest_file) is expected mock_compare_size.assert_called_once_with(src_file, dest_file) mock_compare_time.assert_called_once_with(src_file, dest_file) diff --git a/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_delete.py b/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_delete.py index f1c273f19..0aa511249 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_delete.py +++ b/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_delete.py @@ -31,16 +31,9 @@ def test_determine_should_sync( self, expected: bool, is_size: bool, is_time: bool, mocker: MockerFixture ) -> None: """Test determine_should_sync.""" - mock_compare_size = mocker.patch.object( - DeleteSync, "compare_size", return_value=is_size - ) - mock_compare_time = mocker.patch.object( - DeleteSync, "compare_time", return_value=is_time - ) - assert ( - DeleteSync().determine_should_sync(FileStats(src=""), FileStats(src="")) - is expected - ) + mock_compare_size = mocker.patch.object(DeleteSync, "compare_size", return_value=is_size) + mock_compare_time = mocker.patch.object(DeleteSync, "compare_time", return_value=is_time) + assert DeleteSync().determine_should_sync(FileStats(src=""), FileStats(src="")) is expected mock_compare_size.assert_not_called() mock_compare_time.assert_not_called() diff --git a/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_exact_timestamps.py b/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_exact_timestamps.py index 92ac0e66e..a7d3590ff 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_exact_timestamps.py +++ b/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_exact_timestamps.py @@ -3,10 +3,9 @@ from __future__ import annotations import datetime -from typing import Optional +from unittest.mock import Mock import pytest -from mock import Mock from runway.core.providers.aws.s3._helpers.file_generator import FileStats from runway.core.providers.aws.s3._helpers.sync_strategy.exact_timestamps import ( @@ -43,16 +42,13 @@ def test_compare_time_dest_older_not_download(self) -> None: is False ) - @pytest.mark.parametrize( - "src, dest", [(None, None), (Mock(), None), (None, Mock())] - ) + @pytest.mark.parametrize("src, dest", [(None, None), (Mock(), None), (None, Mock())]) def test_compare_time_raise_value_error( - self, dest: Optional[FileStats], src: Optional[FileStats] + self, dest: FileStats | None, src: FileStats | None ) -> None: """Test compare_time.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="src_file and dest_file must not be None"): ExactTimestampsSync().compare_time(src, dest) - assert str(excinfo.value) == "src_file and dest_file must not be None" def test_compare_time_same(self) -> None: """Test compare_time.""" diff --git a/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_register.py b/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_register.py index c761a4660..429de4b7d 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_register.py +++ b/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_register.py @@ -3,8 +3,7 @@ from __future__ import annotations from typing import TYPE_CHECKING - -from mock import Mock, call +from unittest.mock import Mock, call from runway.core.providers.aws.s3._helpers.sync_strategy import ( DeleteSync, diff --git a/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_size_only.py b/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_size_only.py index 67791867c..af7d7c25a 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_size_only.py +++ b/tests/unit/core/providers/aws/s3/_helpers/sync_strategy/test_size_only.py @@ -33,12 +33,8 @@ def test_determine_should_sync( """Test determine_should_sync.""" src_file = FileStats(src="") dest_file = FileStats(src="") - mock_compare_size = mocker.patch.object( - SizeOnlySync, "compare_size", return_value=is_size - ) - mock_compare_time = mocker.patch.object( - SizeOnlySync, "compare_time", return_value=is_time - ) + mock_compare_size = mocker.patch.object(SizeOnlySync, "compare_size", return_value=is_size) + mock_compare_time = mocker.patch.object(SizeOnlySync, "compare_time", return_value=is_time) assert SizeOnlySync().determine_should_sync(src_file, dest_file) is expected mock_compare_size.assert_called_once_with(src_file, dest_file) mock_compare_time.assert_not_called() diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_action_architecture.py b/tests/unit/core/providers/aws/s3/_helpers/test_action_architecture.py index 67b129cf2..5d5001276 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_action_architecture.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_action_architecture.py @@ -4,9 +4,9 @@ import os from typing import TYPE_CHECKING +from unittest.mock import Mock, call import pytest -from mock import Mock, call from runway.core.providers.aws.s3._helpers.action_architecture import ActionArchitecture from runway.core.providers.aws.s3._helpers.parameters import ParametersDataModel @@ -145,16 +145,10 @@ def test_run_sync( "choose_sync_strategies", return_value={"sync_strategy": "test"}, ) - mocker.patch( - f"{MODULE}.FormatPath", format=Mock(side_effect=[files, rev_files]) - ) + mocker.patch(f"{MODULE}.FormatPath", format=Mock(side_effect=[files, rev_files])) mock_file_generator = Mock(call=Mock(return_value="FileGenerator().call()")) - mock_file_generator_rev = Mock( - call=Mock(return_value="rev:FileGenerator().call()") - ) - mock_file_info_builder = Mock( - call=Mock(return_value="FileInfoBuilder().call()") - ) + mock_file_generator_rev = Mock(call=Mock(return_value="rev:FileGenerator().call()")) + mock_file_info_builder = Mock(call=Mock(return_value="FileInfoBuilder().call()")) mock_comparator = Mock(call=Mock(return_value="Comparator().call()")) mocker.patch(f"{MODULE}.Comparator", return_value=mock_comparator) mocker.patch( @@ -195,9 +189,7 @@ def test_run_sync( mock_comparator.call.assert_called_once_with( mock_filter_inst.call.return_value, mock_filter_inst.call.return_value ) - mock_file_info_builder.call.assert_called_once_with( - mock_comparator.call.return_value - ) + mock_file_info_builder.call.assert_called_once_with(mock_comparator.call.return_value) mock_s3_transfer_handler.call.assert_called_once_with( mock_file_info_builder.call.return_value ) diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_comparator.py b/tests/unit/core/providers/aws/s3/_helpers/test_comparator.py index a12f172ae..fed60411c 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_comparator.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_comparator.py @@ -3,10 +3,9 @@ from __future__ import annotations import datetime -from typing import List, Optional +from unittest.mock import Mock import pytest -from mock import Mock from runway.core.providers.aws.s3._helpers.comparator import Comparator from runway.core.providers.aws.s3._helpers.file_generator import FileStats @@ -38,8 +37,7 @@ def setup_method(self) -> None: def test_call_compare_key_equal_should_not_sync(self) -> None: """Test call compare key equal should not sync.""" self.sync_strategy.determine_should_sync.return_value = False - ref_list: List[FileStats] = [] - result_list: List[FileStats] = [] + ref_list: list[FileStats] = [] src_files = [ FileStats( src="", @@ -64,30 +62,22 @@ def test_call_compare_key_equal_should_not_sync(self) -> None: operation_name="", ) ] - files = self.comparator.call(iter(src_files), iter(dest_files)) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list # Try when the sync strategy says to sync the file. self.sync_strategy.determine_should_sync.return_value = True ref_list = [] - result_list = [] - files = self.comparator.call(iter(src_files), iter(dest_files)) ref_list.append(src_files[0]) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list - def test_call_compare_key_greater(self): + def test_call_compare_key_greater(self) -> None: """Test call compare key greater.""" self.not_at_dest_sync_strategy.determine_should_sync.return_value = False self.not_at_src_sync_strategy.determine_should_sync.return_value = True - src_files: List[FileStats] = [] - dest_files: List[FileStats] = [] - ref_list: List[FileStats] = [] - result_list: List[FileStats] = [] + src_files: list[FileStats] = [] + dest_files: list[FileStats] = [] + ref_list: list[FileStats] = [] src_file = FileStats( src="", dest="", @@ -111,32 +101,24 @@ def test_call_compare_key_greater(self): src_files.append(src_file) dest_files.append(dest_file) ref_list.append(dest_file) - files = self.comparator.call(iter(src_files), iter(dest_files)) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list # Now try when the sync strategy says not to sync the file. self.not_at_src_sync_strategy.determine_should_sync.return_value = False - result_list = [] ref_list = [] - files = self.comparator.call(iter(src_files), iter(dest_files)) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list def test_call_compare_key_less(self) -> None: """Test call compare key less.""" self.not_at_src_sync_strategy.determine_should_sync.return_value = False self.not_at_dest_sync_strategy.determine_should_sync.return_value = True - ref_list: List[FileStats] = [] - result_list: List[FileStats] = [] - src_files: List[FileStats] = [] - dest_files: List[FileStats] = [] + ref_list: list[FileStats] = [] + src_files: list[FileStats] = [] + dest_files: list[FileStats] = [] src_file = FileStats( src="", dest="", - compare_key="bomparator_test.py", + compare_key="bomparator_test.py", # cspell: disable-line size=10, last_update=NOW, src_type="local", @@ -156,27 +138,19 @@ def test_call_compare_key_less(self) -> None: src_files.append(src_file) dest_files.append(dest_file) ref_list.append(src_file) - files = self.comparator.call(iter(src_files), iter(dest_files)) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list # Now try when the sync strategy says not to sync the file. self.not_at_dest_sync_strategy.determine_should_sync.return_value = False - result_list = [] ref_list = [] - files = self.comparator.call(iter(src_files), iter(dest_files)) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list def test_call_empty_dest(self) -> None: """Test call empty dest.""" self.not_at_dest_sync_strategy.determine_should_sync.return_value = True - src_files: List[FileStats] = [] - dest_files: List[FileStats] = [] - ref_list: List[FileStats] = [] - result_list: List[FileStats] = [] + src_files: list[FileStats] = [] + dest_files: list[FileStats] = [] + ref_list: list[FileStats] = [] src_file = FileStats( src="", dest="", @@ -189,27 +163,19 @@ def test_call_empty_dest(self) -> None: ) src_files.append(src_file) ref_list.append(src_file) - files = self.comparator.call(iter(src_files), iter(dest_files)) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list # Now try when the sync strategy says not to sync the file. self.not_at_dest_sync_strategy.determine_should_sync.return_value = False - result_list = [] ref_list = [] - files = self.comparator.call(iter(src_files), iter(dest_files)) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list def test_call_empty_src(self) -> None: """Test call empty src.""" self.not_at_src_sync_strategy.determine_should_sync.return_value = True - src_files: List[FileStats] = [] - dest_files: List[FileStats] = [] - ref_list: List[FileStats] = [] - result_list: List[FileStats] = [] + src_files: list[FileStats] = [] + dest_files: list[FileStats] = [] + ref_list: list[FileStats] = [] dest_file = FileStats( src="", dest="", @@ -222,30 +188,19 @@ def test_call_empty_src(self) -> None: ) dest_files.append(dest_file) ref_list.append(dest_file) - files = self.comparator.call(iter(src_files), iter(dest_files)) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list # Now try when the sync strategy says not to sync the file. self.not_at_src_sync_strategy.determine_should_sync.return_value = False - result_list = [] ref_list = [] - files = self.comparator.call(iter(src_files), iter(dest_files)) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list def test_call_empty_src_dest(self) -> None: """Test call.""" - src_files: List[FileStats] = [] - dest_files: List[FileStats] = [] - ref_list: List[FileStats] = [] - result_list: List[FileStats] = [] - files = self.comparator.call(iter(src_files), iter(dest_files)) - for filename in files: - result_list.append(filename) - assert result_list == ref_list + src_files: list[FileStats] = [] + dest_files: list[FileStats] = [] + ref_list: list[FileStats] = [] + assert list(self.comparator.call(iter(src_files), iter(dest_files))) == ref_list @pytest.mark.parametrize( "src_file, dest_file, expected", @@ -260,9 +215,9 @@ def test_call_empty_src_dest(self) -> None: ) def test_compare_comp_key( self, - dest_file: Optional[FileStats], + dest_file: FileStats | None, expected: str, - src_file: Optional[FileStats], + src_file: FileStats | None, ) -> None: """Test compare_comp_key.""" assert Comparator.compare_comp_key(src_file, dest_file) == expected diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_file_generator.py b/tests/unit/core/providers/aws/s3/_helpers/test_file_generator.py index d3566d788..0fdd79559 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_file_generator.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_file_generator.py @@ -6,13 +6,12 @@ import os import platform import stat -from pathlib import Path from typing import TYPE_CHECKING +from unittest.mock import Mock import pytest from botocore.exceptions import ClientError from dateutil.tz import tzlocal -from mock import Mock from runway.core.providers.aws.s3._helpers.file_generator import ( FileGenerator, @@ -24,6 +23,8 @@ from runway.core.providers.aws.s3._helpers.utils import EPOCH_TIME if TYPE_CHECKING: + from pathlib import Path + from pytest_mock import MockerFixture from runway.core.providers.aws.s3._helpers.file_generator import ( @@ -47,9 +48,7 @@ def test_is_readable(tmp_path: Path) -> None: assert is_readable(tmp_file) -def test_is_readable_unreadable_directory( - mocker: MockerFixture, tmp_path: Path -) -> None: +def test_is_readable_unreadable_directory(mocker: MockerFixture, tmp_path: Path) -> None: """Test is_readable.""" mocker.patch("os.listdir", side_effect=OSError) assert not is_readable(tmp_path) @@ -78,9 +77,7 @@ def test_is_special_file_block_device(mocker: MockerFixture, tmp_path: Path) -> assert is_special_file(tmp_file) -def test_is_special_file_character_device( - mocker: MockerFixture, tmp_path: Path -) -> None: +def test_is_special_file_character_device(mocker: MockerFixture, tmp_path: Path) -> None: """Test is_special_file.""" mocker.patch("stat.S_ISCHR", return_value=True) tmp_file = tmp_path / "foo" @@ -93,7 +90,6 @@ def test_is_special_file_fifo(tmp_path: Path) -> None: """Test is_special_file.""" tmp_file = tmp_path / "foo" # method only exists on linux systems - # pylint: disable=no-member os.mknod(tmp_file, 0o600 | stat.S_IFIFO) # type: ignore assert is_special_file(tmp_file) @@ -150,9 +146,7 @@ def test_call_locals3(self, mocker: MockerFixture, tmp_path: Path) -> None: formatted_path["src"]["path"], formatted_path["dir_op"] ) mock_list_objects.assert_not_called() - mock_find_dest_path_comp_key.assert_called_once_with( - formatted_path, f"{src}test.txt" - ) + mock_find_dest_path_comp_key.assert_called_once_with(formatted_path, f"{src}test.txt") def test_call_s3local(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test call.""" @@ -185,13 +179,9 @@ def test_call_s3local(self, mocker: MockerFixture, tmp_path: Path) -> None: formatted_path["src"]["path"], formatted_path["dir_op"] ) mock_list_files.assert_not_called() - mock_find_dest_path_comp_key.assert_called_once_with( - formatted_path, f"{src}test.txt" - ) + mock_find_dest_path_comp_key.assert_called_once_with(formatted_path, f"{src}test.txt") - def test_list_files_directory( - self, loc_files: LocalFiles, mocker: MockerFixture - ) -> None: + def test_list_files_directory(self, loc_files: LocalFiles, mocker: MockerFixture) -> None: """Test list_files.""" mocker.patch(f"{MODULE}.get_file_stat", return_value=(15, NOW)) mocker.patch.object(FileGenerator, "should_ignore_file", return_value=False) @@ -201,9 +191,7 @@ def test_list_files_directory( assert (loc_files["files"][0], {"Size": 15, "LastModified": NOW}) in result assert (loc_files["files"][1], {"Size": 15, "LastModified": NOW}) in result - def test_list_files_file( - self, loc_files: LocalFiles, mocker: MockerFixture - ) -> None: + def test_list_files_file(self, loc_files: LocalFiles, mocker: MockerFixture) -> None: """Test list_files.""" mocker.patch(f"{MODULE}.get_file_stat", return_value=(15, NOW)) mocker.patch.object(FileGenerator, "should_ignore_file", return_value=False) @@ -223,9 +211,7 @@ def test_list_objects(self, mocker: MockerFixture) -> None: mock_inst = Mock(list_objects=mock_list_objects) mock_class = mocker.patch(f"{MODULE}.BucketLister", return_value=mock_inst) params = {"key": "val"} - obj = FileGenerator( - self.client, "", request_parameters={"ListObjectsV2": params} - ) + obj = FileGenerator(self.client, "", request_parameters={"ListObjectsV2": params}) result = list(obj.list_objects("bucket/", dir_op=True)) mock_class.assert_called_once_with(self.client) mock_list_objects.assert_called_once_with( @@ -244,9 +230,7 @@ def test_list_objects_delete(self, mocker: MockerFixture) -> None: mock_inst = Mock(list_objects=mock_list_objects) mock_class = mocker.patch(f"{MODULE}.BucketLister", return_value=mock_inst) params = {"key": "val"} - obj = FileGenerator( - self.client, "delete", request_parameters={"ListObjectsV2": params} - ) + obj = FileGenerator(self.client, "delete", request_parameters={"ListObjectsV2": params}) result = list(obj.list_objects("bucket/prefix", dir_op=True)) mock_class.assert_called_once_with(self.client) mock_list_objects.assert_called_once_with( @@ -274,9 +258,7 @@ def test_list_objects_incorrect_dir_opt(self, mocker: MockerFixture) -> None: def test_list_objects_single(self) -> None: """Test list_objects.""" - head_object = Mock( - return_value={"ContentLength": "13", "LastModified": NOW.isoformat()} - ) + head_object = Mock(return_value={"ContentLength": "13", "LastModified": NOW.isoformat()}) self.client.head_object = head_object obj = FileGenerator(self.client, "") result = list(obj.list_objects("bucket/key.txt", False)) @@ -295,17 +277,13 @@ def test_list_objects_single_client_error_403(self) -> None: def test_list_objects_single_client_error_404(self) -> None: """Test list_objects.""" - exc = ClientError( - {"Error": {"Code": "404", "Message": "something"}}, "HeadObject" - ) + exc = ClientError({"Error": {"Code": "404", "Message": "something"}}, "HeadObject") head_object = Mock(side_effect=exc) self.client.head_object = head_object with pytest.raises(ClientError) as excinfo: list(FileGenerator(self.client, "").list_objects("bucket/key.txt", False)) assert excinfo.value != exc - assert ( - excinfo.value.response["Error"]["Message"] == 'Key "key.txt" does not exist' - ) + assert excinfo.value.response["Error"]["Message"] == 'Key "key.txt" does not exist' def test_list_objects_single_delete(self) -> None: """Test list_objects.""" @@ -334,9 +312,7 @@ def test_normalize_sort_backslash(self) -> None: def test_safely_get_file_stats(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test safely_get_file_stats.""" - mock_get_file_stat = mocker.patch( - f"{MODULE}.get_file_stat", return_value=(15, NOW) - ) + mock_get_file_stat = mocker.patch(f"{MODULE}.get_file_stat", return_value=(15, NOW)) obj = FileGenerator(self.client, "") assert obj.safely_get_file_stats(tmp_path) == ( tmp_path, @@ -358,9 +334,7 @@ def test_safely_get_file_stats_no_last_update( self, mocker: MockerFixture, tmp_path: Path ) -> None: """Test safely_get_file_stats.""" - mock_create_warning = mocker.patch( - f"{MODULE}.create_warning", return_value="warning" - ) + mock_create_warning = mocker.patch(f"{MODULE}.create_warning", return_value="warning") mocker.patch(f"{MODULE}.get_file_stat", return_value=(15, None)) obj = FileGenerator(self.client, "") assert obj.safely_get_file_stats(tmp_path) == ( @@ -369,8 +343,7 @@ def test_safely_get_file_stats_no_last_update( ) mock_create_warning.assert_called_once_with( path=tmp_path, - error_message="File has an invalid timestamp. Passing epoch " - "time as timestamp.", + error_message="File has an invalid timestamp. Passing epoch time as timestamp.", skip_file=False, ) assert obj.result_queue.get() == "warning" @@ -380,9 +353,7 @@ def test_should_ignore_file(self, mocker: MockerFixture, tmp_path: Path) -> None mock_triggers_warning = mocker.patch.object( FileGenerator, "triggers_warning", return_value=False ) - assert not FileGenerator( - self.client, "", follow_symlinks=True - ).should_ignore_file(tmp_path) + assert not FileGenerator(self.client, "", follow_symlinks=True).should_ignore_file(tmp_path) mock_triggers_warning.assert_called_once_with(tmp_path) def test_should_ignore_file_symlink(self, tmp_path: Path) -> None: @@ -391,64 +362,42 @@ def test_should_ignore_file_symlink(self, tmp_path: Path) -> None: real_path = tmp_path / "real_path" real_path.mkdir() tmp_symlink.symlink_to(real_path) - assert FileGenerator(self.client, "", follow_symlinks=False).should_ignore_file( - tmp_symlink - ) + assert FileGenerator(self.client, "", follow_symlinks=False).should_ignore_file(tmp_symlink) def test_should_ignore_file_triggers_warning( self, mocker: MockerFixture, tmp_path: Path ) -> None: """Test should_ignore_file.""" mocker.patch.object(FileGenerator, "triggers_warning", return_value=True) - assert FileGenerator(self.client, "", follow_symlinks=True).should_ignore_file( - tmp_path - ) + assert FileGenerator(self.client, "", follow_symlinks=True).should_ignore_file(tmp_path) def test_triggers_warning(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test triggers_warning.""" mock_create_warning = mocker.patch(f"{MODULE}.create_warning") - mock_is_special_file = mocker.patch( - f"{MODULE}.is_special_file", return_value=False - ) + mock_is_special_file = mocker.patch(f"{MODULE}.is_special_file", return_value=False) mock_is_readable = mocker.patch(f"{MODULE}.is_readable", return_value=True) - assert not FileGenerator( - self.client, "", follow_symlinks=True - ).triggers_warning(tmp_path) + assert not FileGenerator(self.client, "", follow_symlinks=True).triggers_warning(tmp_path) mock_is_special_file.assert_called_once_with(tmp_path) mock_is_readable.assert_called_once_with(tmp_path) mock_create_warning.assert_not_called() - def test_triggers_warning_does_not_exist( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test_triggers_warning_does_not_exist(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test triggers_warning.""" missing_path = tmp_path / "missing" - mock_create_warning = mocker.patch( - f"{MODULE}.create_warning", return_value="warning" - ) - mock_is_special_file = mocker.patch( - f"{MODULE}.is_special_file", return_value=False - ) + mock_create_warning = mocker.patch(f"{MODULE}.create_warning", return_value="warning") + mock_is_special_file = mocker.patch(f"{MODULE}.is_special_file", return_value=False) mock_is_readable = mocker.patch(f"{MODULE}.is_readable", return_value=True) obj = FileGenerator(self.client, "", follow_symlinks=True) assert obj.triggers_warning(missing_path) mock_is_special_file.assert_not_called() mock_is_readable.assert_not_called() - mock_create_warning.assert_called_once_with( - missing_path, "File does not exist." - ) + mock_create_warning.assert_called_once_with(missing_path, "File does not exist.") assert obj.result_queue.get() == "warning" - def test_triggers_warning_is_special_file( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test_triggers_warning_is_special_file(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test triggers_warning.""" - mock_create_warning = mocker.patch( - f"{MODULE}.create_warning", return_value="warning" - ) - mock_is_special_file = mocker.patch( - f"{MODULE}.is_special_file", return_value=True - ) + mock_create_warning = mocker.patch(f"{MODULE}.create_warning", return_value="warning") + mock_is_special_file = mocker.patch(f"{MODULE}.is_special_file", return_value=True) mock_is_readable = mocker.patch(f"{MODULE}.is_readable", return_value=True) obj = FileGenerator(self.client, "", follow_symlinks=True) assert obj.triggers_warning(tmp_path) @@ -460,24 +409,16 @@ def test_triggers_warning_is_special_file( ) assert obj.result_queue.get() == "warning" - def test_triggers_warning_is_unreadable( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test_triggers_warning_is_unreadable(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test triggers_warning.""" - mock_create_warning = mocker.patch( - f"{MODULE}.create_warning", return_value="warning" - ) - mock_is_special_file = mocker.patch( - f"{MODULE}.is_special_file", return_value=False - ) + mock_create_warning = mocker.patch(f"{MODULE}.create_warning", return_value="warning") + mock_is_special_file = mocker.patch(f"{MODULE}.is_special_file", return_value=False) mock_is_readable = mocker.patch(f"{MODULE}.is_readable", return_value=False) obj = FileGenerator(self.client, "", follow_symlinks=True) assert obj.triggers_warning(tmp_path) mock_is_special_file.assert_called_once_with(tmp_path) mock_is_readable.assert_called_once_with(tmp_path) - mock_create_warning.assert_called_once_with( - tmp_path, "File/Directory is not readable." - ) + mock_create_warning.assert_called_once_with(tmp_path, "File/Directory is not readable.") assert obj.result_queue.get() == "warning" diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_file_info.py b/tests/unit/core/providers/aws/s3/_helpers/test_file_info.py index 4b4297e2f..d684b12a6 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_file_info.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_file_info.py @@ -6,7 +6,6 @@ from typing import TYPE_CHECKING import pytest -from typing_extensions import Literal from runway.core.providers.aws.s3._helpers.file_info import FileInfo from runway.core.providers.aws.s3._helpers.utils import EPOCH_TIME @@ -15,6 +14,7 @@ from pathlib import Path from mypy_boto3_s3.type_defs import ObjectTypeDef + from typing_extensions import Literal NOW = datetime.datetime.now() diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_file_info_builder.py b/tests/unit/core/providers/aws/s3/_helpers/test_file_info_builder.py index 3b61056f7..7c45e02ed 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_file_info_builder.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_file_info_builder.py @@ -2,7 +2,7 @@ from __future__ import annotations -from mock import Mock +from unittest.mock import Mock from runway.core.providers.aws.s3._helpers.file_generator import FileStats from runway.core.providers.aws.s3._helpers.file_info import FileInfo diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_filters.py b/tests/unit/core/providers/aws/s3/_helpers/test_filters.py index 5dc0e0331..b1da765b8 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_filters.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_filters.py @@ -20,12 +20,8 @@ class TestFilter: def test_call_local(self, tmp_path: Path) -> None: """Test call.""" - exclude_md = FileStats( - src=tmp_path / "exclude/README.md", src_type="local", dest="" - ) - include_md = FileStats( - src=tmp_path / "include/README.md", src_type="local", dest="" - ) + exclude_md = FileStats(src=tmp_path / "exclude/README.md", src_type="local", dest="") + include_md = FileStats(src=tmp_path / "include/README.md", src_type="local", dest="") other_file = FileStats(src=tmp_path / "/test.txt", src_type="local", dest="") params = ParametersDataModel( src=str(tmp_path), diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_format_path.py b/tests/unit/core/providers/aws/s3/_helpers/test_format_path.py index 25afa29ba..698ab0487 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_format_path.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_format_path.py @@ -3,10 +3,10 @@ from __future__ import annotations import os -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING +from unittest.mock import call import pytest -from mock import call from runway.core.providers.aws.s3._helpers.format_path import FormatPath @@ -82,9 +82,7 @@ def test_format_local_path(self, tmp_path: Path) -> None: ("s3://bucket/key.txt", False, ("s3://bucket/key.txt", False)), ], ) - def test_format_s3_path( - self, dir_op: bool, expected: Tuple[str, bool], path: str - ) -> None: + def test_format_s3_path(self, dir_op: bool, expected: tuple[str, bool], path: str) -> None: """Test format_s3_path.""" assert FormatPath.format_s3_path(path, dir_op) == expected @@ -99,8 +97,6 @@ def test_format_s3_path( ("s3://test", ("s3", "test")), ], ) - def test_identify_path_type( - self, expected: Tuple[SupportedPathType, str], path: str - ) -> None: + def test_identify_path_type(self, expected: tuple[SupportedPathType, str], path: str) -> None: """Test identify_path_type.""" assert FormatPath.identify_path_type(path) == expected diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_parameters.py b/tests/unit/core/providers/aws/s3/_helpers/test_parameters.py index 8c3a7dd3c..535a6c7b0 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_parameters.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_parameters.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List +from typing import TYPE_CHECKING, Any import pytest from pydantic import ValidationError @@ -45,9 +45,7 @@ def test_init(self, mocker: MockerFixture) -> None: "cmd, expected", [("sync", True), ("mb", True), ("rb", True), ("cp", False), ("mv", False)], ) - def test_init_set_dir_op( - self, cmd: str, expected: bool, mocker: MockerFixture - ) -> None: + def test_init_set_dir_op(self, cmd: str, expected: bool, mocker: MockerFixture) -> None: """Test __init__.""" mocker.patch.object(Parameters, "_validate_path_args") assert Parameters(cmd, self.data_locallocal).data.dir_op == expected @@ -56,9 +54,7 @@ def test_init_set_dir_op( "cmd, expected", [("sync", False), ("mb", False), ("rb", False), ("cp", False), ("mv", True)], ) - def test_init_set_is_move( - self, cmd: str, expected: bool, mocker: MockerFixture - ) -> None: + def test_init_set_is_move(self, cmd: str, expected: bool, mocker: MockerFixture) -> None: """Test __init__.""" mocker.patch.object(Parameters, "_validate_path_args") assert Parameters(cmd, self.data_locallocal).data.is_move == expected @@ -71,9 +67,8 @@ def test_same_path_mv_locallocal(self) -> None: def test_same_path_mv_s3s3(self) -> None: """Test _same_path.""" self.data_s3s3.dest = self.data_s3s3.src - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="Cannot mv a file onto itself"): Parameters("mv", self.data_s3s3) - assert "Cannot mv a file onto itself" in str(excinfo.value) def test_same_path_mv_s3s3_not_same(self) -> None: """Test _same_path.""" @@ -126,9 +121,7 @@ class TestParametersDataModel: ("s3://test-dest", "s3://test-src", "s3s3"), ], ) - def test_determine_paths_type( - self, dest: str, expected: PathsType, src: str - ) -> None: + def test_determine_paths_type(self, dest: str, expected: PathsType, src: str) -> None: """Test _determine_paths_type.""" assert ParametersDataModel(dest=dest, src=src).paths_type == expected @@ -168,9 +161,7 @@ def test_normalize_s3_trailing_slash(self, provided: str, expected: str) -> None "kwargs, error_locs", [({"dest": "test-dest"}, ["src"]), ({"src": "test-src"}, ["dest"])], ) - def test_required_fields( - self, error_locs: List[str], kwargs: Dict[str, Any] - ) -> None: + def test_required_fields(self, error_locs: list[str], kwargs: dict[str, Any]) -> None: """Test required fields.""" with pytest.raises(ValidationError) as excinfo: ParametersDataModel(**kwargs) diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_results.py b/tests/unit/core/providers/aws/s3/_helpers/test_results.py index 429dca6da..0a4b5ff15 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_results.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_results.py @@ -1,21 +1,20 @@ """Test runway.core.providers.aws.s3._helpers.results.""" -# pylint: disable=too-many-lines from __future__ import annotations import time from concurrent.futures import CancelledError from io import StringIO from queue import Queue -from typing import TYPE_CHECKING, Any, ClassVar, Dict, Optional +from typing import TYPE_CHECKING, Any, ClassVar +from unittest.mock import Mock import pytest -from mock import Mock from s3transfer.exceptions import FatalError from runway._logging import LogLevels from runway.core.providers.aws.s3._helpers.results import ( - AnyResult, + AnyResultType, BaseResultHandler, BaseResultSubscriber, CommandResult, @@ -54,7 +53,6 @@ ) if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from s3transfer.futures import TransferFuture @@ -94,13 +92,13 @@ class BaseResultSubscriberTest: """Base class for result submitter test classes.""" bucket: ClassVar[str] = "test-bucket" - dest: Optional[str] + dest: str | None failure_future: TransferFuture filename: ClassVar[str] = "test.txt" future: TransferFuture key: ClassVar[str] = "test.txt" ref_exception: ClassVar[Exception] = Exception() - result_queue: "Queue[Any]" + result_queue: Queue[Any] size: ClassVar[int] = 20 * (1024 * 1024) # 20 MB src: str transfer_type: str @@ -112,8 +110,8 @@ def setup_method(self) -> None: def set_ref_transfer_futures(self) -> None: """Set reference transfer futures.""" - self.future = self.get_success_transfer_future("foo") # type: ignore - self.failure_future = self.get_failed_transfer_future(self.ref_exception) # type: ignore + self.future = self.get_success_transfer_future("foo") + self.failure_future = self.get_failed_transfer_future(self.ref_exception) def get_success_transfer_future(self, result: str) -> TransferFuture: """Create a success transfer future.""" @@ -124,18 +122,16 @@ def get_failed_transfer_future(self, exception: Exception) -> TransferFuture: return self._get_transfer_future(exception=exception) # type: ignore def _get_transfer_future( - self, result: Optional[Any] = None, exception: Optional[Exception] = None + self, result: Any | None = None, exception: Exception | None = None ) -> FakeTransferFuture: call_args = self._get_transfer_future_call_args() meta = FakeTransferFutureMeta(size=self.size, call_args=call_args) return FakeTransferFuture(result=result, exception=exception, meta=meta) def _get_transfer_future_call_args(self) -> FakeTransferFutureCallArgs: - return FakeTransferFutureCallArgs( - fileobj=self.filename, key=self.key, bucket=self.bucket - ) + return FakeTransferFutureCallArgs(fileobj=self.filename, key=self.key, bucket=self.bucket) - def get_queued_result(self) -> AnyResult: + def get_queued_result(self) -> AnyResultType: """Get queued result.""" return self.result_queue.get(block=False) @@ -271,9 +267,7 @@ def setup_method(self) -> None: def test_on_progress(self, mocker: MockerFixture) -> None: """Test on_progress.""" - mocker.patch.object( - BaseResultSubscriber, "_get_src_dest", return_value=(None, None) - ) + mocker.patch.object(BaseResultSubscriber, "_get_src_dest", return_value=(None, None)) assert not self.result_subscriber.on_queued(self.future) assert isinstance(self.get_queued_result(), QueuedResult) assert not self.result_subscriber.on_progress(self.future, 13) @@ -298,7 +292,7 @@ class TestCommandResultRecorder: command_result_recorder: CommandResultRecorder dest: ClassVar[str] = "s3://mybucket/test-key" result_processor: ResultProcessor - result_queue: "Queue[Any]" + result_queue: Queue[Any] result_recorder: ResultRecorder src: ClassVar[str] = "file" total_transfer_size: ClassVar[int] = 20 * (1024 * 1024) # 20 MB @@ -308,9 +302,7 @@ def setup_method(self) -> None: """Run before each test method if run to return the class instance attrs to default.""" self.result_queue = Queue() self.result_recorder = ResultRecorder() - self.result_processor = ResultProcessor( - self.result_queue, [self.result_recorder] - ) + self.result_processor = ResultProcessor(self.result_queue, [self.result_recorder]) self.command_result_recorder = CommandResultRecorder( self.result_queue, self.result_recorder, self.result_processor ) @@ -358,9 +350,7 @@ def test_get_command_result_success(self) -> None: ) ) self.result_queue.put( - SuccessResult( - transfer_type=self.transfer_type, src=self.src, dest=self.dest - ) + SuccessResult(transfer_type=self.transfer_type, src=self.src, dest=self.dest) ) result = self.command_result_recorder.get_command_result() assert result.num_tasks_failed == 0 @@ -384,7 +374,7 @@ def test_notify_total_submissions(self) -> None: class TestCopyResultSubscriber(TestUploadResultSubscriber): """Test CopyResultSubscriber.""" - copy_source: Dict[str, str] + copy_source: dict[str, str] source_bucket: str source_key: str @@ -410,9 +400,7 @@ def _get_transfer_future_call_args(self) -> FakeTransferFutureCallArgs: def test_on_queued_transfer_type_override(self) -> None: """Test on_queued.""" new_transfer_type = "move" - self.result_subscriber = CopyResultSubscriber( - self.result_queue, new_transfer_type - ) + self.result_subscriber = CopyResultSubscriber(self.result_queue, new_transfer_type) self.result_subscriber.on_queued(self.future) result = self.get_queued_result() self.assert_result_queue_is_empty() @@ -480,7 +468,7 @@ def test_does_not_print_progress_result(self) -> None: self.result_printer(progress_result) assert self.out_file.getvalue() == "" - def test_does_print_success_result(self, caplog: LogCaptureFixture) -> None: + def test_does_print_success_result(self, caplog: pytest.LogCaptureFixture) -> None: """Test does print success result.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") transfer_type = "upload" @@ -492,7 +480,7 @@ def test_does_print_success_result(self, caplog: LogCaptureFixture) -> None: assert self.out_file.getvalue() == "" def test_final_total_does_not_try_to_clear_empty_progress( - self, caplog: LogCaptureFixture + self, caplog: pytest.LogCaptureFixture ) -> None: """Test final total does not try to clear empty progress.""" caplog.set_level(LogLevels.INFO, "runway.core.providers.aws.s3") @@ -513,7 +501,7 @@ def test_final_total_does_not_try_to_clear_empty_progress( assert caplog.messages == ["upload: file to s3://mybucket/test-key"] assert self.out_file.getvalue() == "" - def test_print_failure_result(self, caplog: LogCaptureFixture) -> None: + def test_print_failure_result(self, caplog: pytest.LogCaptureFixture) -> None: """Test print failure result.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") transfer_type = "upload" @@ -526,12 +514,10 @@ def test_print_failure_result(self, caplog: LogCaptureFixture) -> None: exception=Exception("my exception"), ) self.result_printer(failure_result) - assert caplog.messages == [ - "upload failed: file to s3://mybucket/test-key my exception" - ] + assert caplog.messages == ["upload failed: file to s3://mybucket/test-key my exception"] assert self.error_file.getvalue() == "" - def test_print_warning_result(self, caplog: LogCaptureFixture) -> None: + def test_print_warning_result(self, caplog: pytest.LogCaptureFixture) -> None: """Test print warning.""" caplog.set_level(LogLevels.WARNING, "runway.core.providers.aws.s3") self.result_printer(PrintTask("warning: my warning")) @@ -557,7 +543,7 @@ def test_does_not_print_progress_result(self) -> None: self.result_printer(progress_result) assert self.out_file.getvalue() == "" - def test_does_not_print_success_result(self, caplog: LogCaptureFixture) -> None: + def test_does_not_print_success_result(self, caplog: pytest.LogCaptureFixture) -> None: """Test does not print success result.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") transfer_type = "upload" @@ -568,7 +554,7 @@ def test_does_not_print_success_result(self, caplog: LogCaptureFixture) -> None: assert not caplog.messages assert not self.out_file.getvalue() - def test_does_print_failure_result(self, caplog: LogCaptureFixture) -> None: + def test_does_print_failure_result(self, caplog: pytest.LogCaptureFixture) -> None: """Test print failure result.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") transfer_type = "upload" @@ -581,12 +567,10 @@ def test_does_print_failure_result(self, caplog: LogCaptureFixture) -> None: exception=Exception("my exception"), ) self.result_printer(failure_result) - assert caplog.messages == [ - "upload failed: file to s3://mybucket/test-key my exception" - ] + assert caplog.messages == ["upload failed: file to s3://mybucket/test-key my exception"] assert not self.error_file.getvalue() - def test_does_print_warning_result(self, caplog: LogCaptureFixture) -> None: + def test_does_print_warning_result(self, caplog: pytest.LogCaptureFixture) -> None: """Test print warning.""" caplog.set_level(LogLevels.WARNING, "runway.core.providers.aws.s3") self.result_printer(PrintTask("warning: my warning")) @@ -594,7 +578,7 @@ def test_does_print_warning_result(self, caplog: LogCaptureFixture) -> None: assert not self.error_file.getvalue() def test_final_total_does_not_try_to_clear_empty_progress( - self, caplog: LogCaptureFixture + self, caplog: pytest.LogCaptureFixture ) -> None: """Test final total does not try to clear empty progress.""" caplog.set_level(LogLevels.INFO, "runway.core.providers.aws.s3") @@ -616,26 +600,23 @@ def test_final_total_does_not_try_to_clear_empty_progress( assert not self.out_file.getvalue() -# pylint: disable=too-many-public-methods class TestResultPrinter(BaseResultPrinterTest): """Test ResultPrinter.""" - def test_ctrl_c_error(self, caplog: LogCaptureFixture) -> None: + def test_ctrl_c_error(self, caplog: pytest.LogCaptureFixture) -> None: """Test Ctrl+C error.""" caplog.set_level(LogLevels.WARNING, "runway.core.providers.aws.s3") self.result_printer(CtrlCResult(Exception())) assert caplog.messages == ["cancelled: ctrl-c received"] - def test_dry_run(self, caplog: LogCaptureFixture) -> None: + def test_dry_run(self, caplog: pytest.LogCaptureFixture) -> None: """Test dry run.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") - result = DryRunResult( - transfer_type="upload", src="s3://mybucket/key", dest="./local/file" - ) + result = DryRunResult(transfer_type="upload", src="s3://mybucket/key", dest="./local/file") self.result_printer(result) assert caplog.messages == [f"(dryrun) upload: {result.src} to {result.dest}"] - def test_dry_run_unicode(self, caplog: LogCaptureFixture) -> None: + def test_dry_run_unicode(self, caplog: pytest.LogCaptureFixture) -> None: """Test dry run.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") result = DryRunResult( @@ -644,19 +625,19 @@ def test_dry_run_unicode(self, caplog: LogCaptureFixture) -> None: self.result_printer(result) assert caplog.messages == [f"(dryrun) upload: {result.src} to {result.dest}"] - def test_error(self, caplog: LogCaptureFixture) -> None: + def test_error(self, caplog: pytest.LogCaptureFixture) -> None: """Test error.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") self.result_printer(ErrorResult(Exception("my exception"))) assert caplog.messages == ["fatal error: my exception"] - def test_error_unicode(self, caplog: LogCaptureFixture) -> None: + def test_error_unicode(self, caplog: pytest.LogCaptureFixture) -> None: """Test error.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") self.result_printer(ErrorResult(Exception("unicode exists \u2713"))) assert caplog.messages == ["fatal error: unicode exists \u2713"] - def test_error_while_progress(self, caplog: LogCaptureFixture) -> None: + def test_error_while_progress(self, caplog: pytest.LogCaptureFixture) -> None: """Test error.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") mb = 1024**2 @@ -669,7 +650,7 @@ def test_error_while_progress(self, caplog: LogCaptureFixture) -> None: assert caplog.messages == ["fatal error: my exception"] assert not self.out_file.getvalue() - def test_failure(self, caplog: LogCaptureFixture) -> None: + def test_failure(self, caplog: pytest.LogCaptureFixture) -> None: """Test failure.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") transfer_type = "upload" @@ -688,7 +669,7 @@ def test_failure(self, caplog: LogCaptureFixture) -> None: assert caplog.messages == [f"upload failed: file to {dest} my exception"] def test_failure_but_no_expected_files_transferred_provided( - self, caplog: LogCaptureFixture + self, caplog: pytest.LogCaptureFixture ) -> None: """Test failure.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") @@ -714,12 +695,11 @@ def test_failure_but_no_expected_files_transferred_provided( ) self.result_printer(failure_result) assert self.out_file.getvalue() == ( - "Completed 1.0 MiB/~1.0 MiB (0 Bytes/s) with ~0 file(s) " - "remaining (calculating...)\r" + "Completed 1.0 MiB/~1.0 MiB (0 Bytes/s) with ~0 file(s) remaining (calculating...)\r" ) assert caplog.messages == [f"upload failed: file to {dest} my exception"] - def test_failure_for_delete(self, caplog: LogCaptureFixture) -> None: + def test_failure_for_delete(self, caplog: pytest.LogCaptureFixture) -> None: """Test failure.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") transfer_type = "delete" @@ -738,7 +718,7 @@ def test_failure_for_delete(self, caplog: LogCaptureFixture) -> None: assert caplog.messages == [f"delete failed: {src} my exception"] def test_failure_for_delete_but_no_expected_files_transferred_provided( - self, caplog: LogCaptureFixture + self, caplog: pytest.LogCaptureFixture ) -> None: """Test failure.""" shared_file = self.out_file @@ -764,7 +744,7 @@ def test_failure_for_delete_but_no_expected_files_transferred_provided( assert caplog.messages == [f"delete failed: {src} my exception"] def test_failure_for_delete_with_files_remaining( - self, caplog: LogCaptureFixture + self, caplog: pytest.LogCaptureFixture ) -> None: """Test failure.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") @@ -791,7 +771,7 @@ def test_failure_for_delete_with_files_remaining( ) assert caplog.messages == [f"delete failed: {src} my exception"] - def test_failure_unicode(self, caplog: LogCaptureFixture) -> None: + def test_failure_unicode(self, caplog: pytest.LogCaptureFixture) -> None: """Test failure.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") transfer_type = "upload" @@ -809,7 +789,7 @@ def test_failure_unicode(self, caplog: LogCaptureFixture) -> None: self.result_printer(failure_result) assert caplog.messages == [f"upload failed: {src} to {dest} my exception"] - def test_failure_with_files_remaining(self, caplog: LogCaptureFixture) -> None: + def test_failure_with_files_remaining(self, caplog: pytest.LogCaptureFixture) -> None: """Test failure.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") shared_file = self.out_file @@ -834,12 +814,11 @@ def test_failure_with_files_remaining(self, caplog: LogCaptureFixture) -> None: ) self.result_printer(failure_result) assert self.out_file.getvalue() == ( - "Completed 1.0 MiB/~4.0 MiB (0 Bytes/s) with ~3 file(s) " - "remaining (calculating...)\r" + "Completed 1.0 MiB/~4.0 MiB (0 Bytes/s) with ~3 file(s) remaining (calculating...)\r" ) assert caplog.messages == [f"upload failed: file to {dest} my exception"] - def test_failure_with_progress(self, caplog: LogCaptureFixture) -> None: + def test_failure_with_progress(self, caplog: pytest.LogCaptureFixture) -> None: """Test failure.""" caplog.set_level(LogLevels.ERROR, "runway.core.providers.aws.s3") shared_file = self.out_file @@ -875,7 +854,7 @@ def test_failure_with_progress(self, caplog: LogCaptureFixture) -> None: assert caplog.messages == [f"upload failed: file to {dest} my exception"] def test_final_total_does_not_print_out_newline_for_no_transfers( - self, caplog: LogCaptureFixture + self, caplog: pytest.LogCaptureFixture ) -> None: """Test final total.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") @@ -884,7 +863,7 @@ def test_final_total_does_not_print_out_newline_for_no_transfers( assert not self.out_file.getvalue() def test_final_total_notification_with_no_more_expected_progress( - self, caplog: LogCaptureFixture + self, caplog: pytest.LogCaptureFixture ) -> None: """Test final total.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") @@ -899,8 +878,7 @@ def test_final_total_notification_with_no_more_expected_progress( success_result = SuccessResult(transfer_type=transfer_type, src=src, dest=dest) self.result_printer(success_result) assert self.out_file.getvalue() == ( - "Completed 1.0 MiB/~1.0 MiB (0 Bytes/s) with ~0 file(s) " - "remaining (calculating...)\r" + "Completed 1.0 MiB/~1.0 MiB (0 Bytes/s) with ~0 file(s) remaining (calculating...)\r" ) assert caplog.messages == [f"upload: file to {dest}"] @@ -943,9 +921,7 @@ def test_get_progress_result_no_expected_transfer_bytes(self) -> None: self.result_recorder.expected_bytes_transferred = 0 progress_result = self.get_progress_result() self.result_printer(progress_result) - assert ( - self.out_file.getvalue() == "Completed 1 file(s) with 3 file(s) remaining\r" - ) + assert self.out_file.getvalue() == "Completed 1 file(s) with 3 file(s) remaining\r" def test_get_progress_result_still_calculating_totals_no_bytes(self) -> None: """Test get_progress_result.""" @@ -970,8 +946,7 @@ def test_get_progress_result_still_calculating_totals(self) -> None: progress_result = self.get_progress_result() self.result_printer(progress_result) assert ( - self.out_file.getvalue() - == "Completed 1.0 MiB/~20.0 MiB (0 Bytes/s) with ~3 file(s) " + self.out_file.getvalue() == "Completed 1.0 MiB/~20.0 MiB (0 Bytes/s) with ~3 file(s) " "remaining (calculating...)\r" ) @@ -1016,15 +991,15 @@ def test_init_no_error_file(self, mocker: MockerFixture) -> None: """Test __init__ no error_file.""" mock_stderr = mocker.patch("sys.stderr", Mock()) result = ResultPrinter(self.result_recorder, out_file=self.out_file) - assert result._error_file == mock_stderr # pylint: disable=protected-access + assert result._error_file == mock_stderr def test_init_no_out_file(self, mocker: MockerFixture) -> None: """Test __init__ no out_file.""" mock_stdout = mocker.patch("sys.stdout", Mock()) result = ResultPrinter(self.result_recorder, error_file=self.error_file) - assert result._out_file == mock_stdout # pylint: disable=protected-access + assert result._out_file == mock_stdout - def test_success(self, caplog: LogCaptureFixture) -> None: + def test_success(self, caplog: pytest.LogCaptureFixture) -> None: """Test success.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") transfer_type = "upload" @@ -1038,7 +1013,7 @@ def test_success(self, caplog: LogCaptureFixture) -> None: assert caplog.messages == [f"upload: file to {dest}"] def test_success_but_no_expected_files_transferred_provided( - self, caplog: LogCaptureFixture + self, caplog: pytest.LogCaptureFixture ) -> None: """Test success but no expected files transferred provided.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") @@ -1054,12 +1029,11 @@ def test_success_but_no_expected_files_transferred_provided( success_result = SuccessResult(transfer_type=transfer_type, src=src, dest=dest) self.result_printer(success_result) assert self.out_file.getvalue() == ( - "Completed 1.0 MiB/~1.0 MiB (0 Bytes/s) with ~0 file(s) " - "remaining (calculating...)\r" + "Completed 1.0 MiB/~1.0 MiB (0 Bytes/s) with ~0 file(s) remaining (calculating...)\r" ) assert caplog.messages == [f"upload: file to {dest}"] - def test_success_delete(self, caplog: LogCaptureFixture) -> None: + def test_success_delete(self, caplog: pytest.LogCaptureFixture) -> None: """Test success for delete.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") transfer_type = "delete" @@ -1072,7 +1046,7 @@ def test_success_delete(self, caplog: LogCaptureFixture) -> None: assert caplog.messages == [f"delete: {src}"] def test_success_delete_but_no_expected_files_transferred_provided( - self, caplog: LogCaptureFixture + self, caplog: pytest.LogCaptureFixture ) -> None: """Test success delete but no expected files transferred provided.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") @@ -1088,9 +1062,7 @@ def test_success_delete_but_no_expected_files_transferred_provided( ) assert caplog.messages == [f"delete: {src}"] - def test_success_delete_with_files_remaining( - self, caplog: LogCaptureFixture - ) -> None: + def test_success_delete_with_files_remaining(self, caplog: pytest.LogCaptureFixture) -> None: """Test success delete with files remaining.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") transfer_type = "delete" @@ -1105,19 +1077,17 @@ def test_success_delete_with_files_remaining( ) assert caplog.messages == [f"delete: {src}"] - def test_success_unicode_src(self, caplog: LogCaptureFixture) -> None: + def test_success_unicode_src(self, caplog: pytest.LogCaptureFixture) -> None: """Test success.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") self.result_recorder.final_expected_files_transferred = 1 self.result_recorder.expected_files_transferred = 1 self.result_recorder.files_transferred = 1 - result = SuccessResult( - transfer_type="delete", src="s3://mybucket/tmp/\u2713", dest=None - ) + result = SuccessResult(transfer_type="delete", src="s3://mybucket/tmp/\u2713", dest=None) self.result_printer(result) assert caplog.messages == [f"delete: {result.src}"] - def test_success_unicode_src_and_dest(self, caplog: LogCaptureFixture) -> None: + def test_success_unicode_src_and_dest(self, caplog: pytest.LogCaptureFixture) -> None: """Test success.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") self.result_recorder.final_expected_files_transferred = 1 @@ -1129,7 +1099,7 @@ def test_success_unicode_src_and_dest(self, caplog: LogCaptureFixture) -> None: self.result_printer(result) assert caplog.messages == [f"upload: {result.src} to {result.dest}"] - def test_success_with_files_remaining(self, caplog: LogCaptureFixture) -> None: + def test_success_with_files_remaining(self, caplog: pytest.LogCaptureFixture) -> None: """Test success with files remaining.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") transfer_type = "upload" @@ -1143,12 +1113,11 @@ def test_success_with_files_remaining(self, caplog: LogCaptureFixture) -> None: success_result = SuccessResult(transfer_type=transfer_type, src=src, dest=dest) self.result_printer(success_result) assert self.out_file.getvalue() == ( - "Completed 1.0 MiB/~4.0 MiB (0 Bytes/s) with ~3 file(s) " - "remaining (calculating...)\r" + "Completed 1.0 MiB/~4.0 MiB (0 Bytes/s) with ~3 file(s) remaining (calculating...)\r" ) assert caplog.messages == [f"upload: file to {dest}"] - def test_success_with_progress(self, caplog: LogCaptureFixture) -> None: + def test_success_with_progress(self, caplog: pytest.LogCaptureFixture) -> None: """Test success with progress.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") mb = 1024**2 @@ -1177,7 +1146,7 @@ def test_unknown_result_object(self) -> None: assert self.out_file.getvalue() == "" assert self.error_file.getvalue() == "" - def test_warning(self, caplog: LogCaptureFixture) -> None: + def test_warning(self, caplog: pytest.LogCaptureFixture) -> None: """Test warning.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") self.result_recorder.final_expected_files_transferred = 1 @@ -1186,7 +1155,7 @@ def test_warning(self, caplog: LogCaptureFixture) -> None: self.result_printer(PrintTask("warning: my warning")) assert caplog.messages == ["warning: my warning"] - def test_warning_unicode(self, caplog: LogCaptureFixture) -> None: + def test_warning_unicode(self, caplog: pytest.LogCaptureFixture) -> None: """Test warning.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") self.result_recorder.final_expected_files_transferred = 1 @@ -1195,7 +1164,7 @@ def test_warning_unicode(self, caplog: LogCaptureFixture) -> None: self.result_printer(PrintTask("warning: unicode exists \u2713")) assert caplog.messages == ["warning: unicode exists \u2713"] - def test_warning_with_progress(self, caplog: LogCaptureFixture) -> None: + def test_warning_with_progress(self, caplog: pytest.LogCaptureFixture) -> None: """Test warning.""" caplog.set_level(LogLevels.NOTICE, "runway.core.providers.aws.s3") shared_file = self.out_file @@ -1224,7 +1193,7 @@ class TestResultProcessor: """Test ResultProcessor.""" result_processor: ResultProcessor - result_queue: "Queue[Any]" + result_queue: Queue[Any] def setup_method(self) -> None: """Run before each test method if run to return the class instance attrs to default.""" @@ -1239,7 +1208,6 @@ def test_run_error(self, mocker: MockerFixture) -> None: self.result_queue.put(ShutdownThreadRequest()) assert not self.result_processor.run() mock_process_result.assert_called_once_with(error_result) - # pylint: disable=protected-access assert not self.result_processor._result_handlers_enabled def test_process_result_handle_error(self) -> None: @@ -1265,8 +1233,7 @@ def setup_method(self) -> None: def test_get_ongoing_dict_key(self) -> None: """Test _get_ongoing_dict_key.""" with pytest.raises(TypeError): - # pylint: disable=protected-access - self.result_recorder._get_ongoing_dict_key(Mock()) # type: ignore + self.result_recorder._get_ongoing_dict_key(Mock()) def test_record_error_result(self) -> None: """Test _record_error_result.""" @@ -1277,9 +1244,7 @@ def test_record_error_result(self) -> None: def test_record_final_expected_files(self) -> None: """Test _record_final_expected_files.""" assert not self.result_recorder.final_expected_files_transferred - assert not self.result_recorder( - FinalTotalSubmissionsResult(total_submissions=13) - ) + assert not self.result_recorder(FinalTotalSubmissionsResult(total_submissions=13)) assert self.result_recorder.final_expected_files_transferred == 13 def test_record_progress_result_start_time(self, mocker: MockerFixture) -> None: @@ -1287,9 +1252,7 @@ def test_record_progress_result_start_time(self, mocker: MockerFixture) -> None: mock_time = mocker.patch("time.time", return_value=time.time()) assert not self.result_recorder.start_time assert not self.result_recorder( - ProgressResult( - total_transfer_size=13, timestamp=time.time(), bytes_transferred=0 - ) + ProgressResult(total_transfer_size=13, timestamp=time.time(), bytes_transferred=0) ) assert self.result_recorder.start_time == mock_time.return_value diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_s3handler.py b/tests/unit/core/providers/aws/s3/_helpers/test_s3handler.py index 1f4a8cbe8..ba438e388 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_s3handler.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_s3handler.py @@ -1,14 +1,13 @@ """Test runway.core.providers.aws.s3._helpers.s3handler.""" -# pylint: disable=redefined-outer-name,too-many-lines from __future__ import annotations from pathlib import Path from queue import Queue -from typing import TYPE_CHECKING, Any, ClassVar, Dict, NamedTuple, Optional, cast +from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, cast +from unittest.mock import MagicMock, Mock import pytest -from mock import MagicMock, Mock from s3transfer.manager import TransferManager from runway.core.providers.aws.s3._helpers.file_info import FileInfo @@ -70,25 +69,23 @@ class MockSubmitters(NamedTuple): """Named tuple return value of mock_submitters.""" - classes: Dict[str, Mock] - instances: Dict[str, Mock] + classes: dict[str, Mock] + instances: dict[str, Mock] -@pytest.fixture(scope="function") +@pytest.fixture() def mock_submitters(mocker: MockerFixture) -> MockSubmitters: """Mock handler submitters.""" classes = { "copy": mocker.patch(f"{MODULE}.CopyRequestSubmitter", Mock()), "delete": mocker.patch(f"{MODULE}.DeleteRequestSubmitter", Mock()), "download": mocker.patch(f"{MODULE}.DownloadRequestSubmitter", Mock()), - "download_stream": mocker.patch( - f"{MODULE}.DownloadStreamRequestSubmitter", Mock() - ), + "download_stream": mocker.patch(f"{MODULE}.DownloadStreamRequestSubmitter", Mock()), "local_delete": mocker.patch(f"{MODULE}.LocalDeleteRequestSubmitter", Mock()), "upload": mocker.patch(f"{MODULE}.UploadRequestSubmitter", Mock()), "upload_stream": mocker.patch(f"{MODULE}.UploadStreamRequestSubmitter", Mock()), } - instances: Dict[str, Mock] = {} + instances: dict[str, Mock] = {} for name, mock_class in classes.items(): inst = Mock(can_submit=Mock(return_value=False), submit=Mock(return_value=True)) mock_class.return_value = inst @@ -103,7 +100,7 @@ class BaseTransferRequestSubmitterTest: config_params: ParametersDataModel filename: ClassVar[str] = "test-file.txt" key: ClassVar[str] = "test-key.txt" - result_queue: "Queue[Any]" + result_queue: Queue[Any] transfer_manager: Mock def setup_method(self) -> None: @@ -135,12 +132,9 @@ def test_can_submit(self) -> None: ("s3://test", "s3://test"), ], ) - def test_format_s3_path( - self, expected: Optional[str], path: Optional[AnyPath] - ) -> None: + def test_format_s3_path(self, expected: str | None, path: AnyPath | None) -> None: """Test _format_s3_path.""" assert ( - # pylint: disable=protected-access BaseTransferRequestSubmitter( Mock(), Mock(), ParametersDataModel(dest="", src="") )._format_s3_path(path) @@ -202,7 +196,7 @@ def test_submit(self) -> None: self.config_params["guess_mime_type"] = True future = self.transfer_request_submitter.submit(fileinfo) assert self.transfer_manager.copy.return_value is future - call_kwargs = cast(Dict[str, Any], self.transfer_manager.copy.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.copy.call_args[1]) assert call_kwargs["copy_source"] == { "Bucket": self.source_bucket, "Key": self.source_key, @@ -230,7 +224,7 @@ def test_submit_content_type_specified(self) -> None: self.config_params["content_type"] = "text/plain" self.transfer_request_submitter.submit(fileinfo) - copy_call_kwargs = cast(Dict[str, Any], self.transfer_manager.copy.call_args[1]) + copy_call_kwargs = cast(dict[str, Any], self.transfer_manager.copy.call_args[1]) assert copy_call_kwargs["extra_args"] == {"ContentType": "text/plain"} ref_subscribers = [ProvideSizeSubscriber, CopyResultSubscriber] actual_subscribers = copy_call_kwargs["subscribers"] @@ -269,7 +263,7 @@ def test_submit_extra_args(self) -> None: self.config_params["storage_class"] = "STANDARD_IA" self.transfer_request_submitter.submit(fileinfo) - call_kwargs = cast(Dict[str, Any], self.transfer_manager.copy.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.copy.call_args[1]) assert call_kwargs["extra_args"] == {"StorageClass": "STANDARD_IA"} def test_submit_move_adds_delete_source_subscriber(self) -> None: @@ -287,7 +281,7 @@ def test_submit_move_adds_delete_source_subscriber(self) -> None: DeleteSourceObjectSubscriber, CopyResultSubscriber, ] - call_kwargs = cast(Dict[str, Any], self.transfer_manager.copy.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.copy.call_args[1]) actual_subscribers = call_kwargs["subscribers"] assert len(ref_subscribers) == len(actual_subscribers) for i, actual_subscriber in enumerate(actual_subscribers): @@ -301,7 +295,7 @@ def test_submit_no_guess_content_mime_type(self) -> None: ) self.config_params["guess_mime_type"] = False self.transfer_request_submitter.submit(fileinfo) - call_kwargs = cast(Dict[str, Any], self.transfer_manager.copy.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.copy.call_args[1]) ref_subscribers = [ProvideSizeSubscriber, CopyResultSubscriber] actual_subscribers = call_kwargs["subscribers"] assert len(ref_subscribers) == len(actual_subscribers) @@ -348,10 +342,7 @@ def test_submit_warn_glacier_incompatible(self) -> None: warning_result = self.result_queue.get() assert isinstance(warning_result, PrintTask) - assert ( - "Unable to perform copy operations on GLACIER objects" - in warning_result.message - ) + assert "Unable to perform copy operations on GLACIER objects" in warning_result.message assert future is None assert len(self.transfer_manager.copy.call_args_list) == 0 # type: ignore @@ -392,13 +383,11 @@ def test_can_submit_local_delete(self) -> None: def test_submit(self) -> None: """Test submit.""" - fileinfo = FileInfo( - src=self.bucket + "/" + self.key, dest=None, operation_name="delete" - ) + fileinfo = FileInfo(src=self.bucket + "/" + self.key, dest=None, operation_name="delete") future = self.transfer_request_submitter.submit(fileinfo) assert self.transfer_manager.delete.return_value is future - call_kwargs = cast(Dict[str, Any], self.transfer_manager.delete.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.delete.call_args[1]) assert call_kwargs["bucket"] == self.bucket assert call_kwargs["key"] == self.key assert call_kwargs["extra_args"] == {} @@ -445,13 +434,11 @@ def setup_method(self) -> None: def assert_no_downloads_happened(self) -> None: """Assert not downloads.""" - assert len(self.transfer_manager.download.call_args_list) == 0 # type: ignore + assert len(self.transfer_manager.download.call_args_list) == 0 - def create_file_info( - self, key: str, response_data: Optional[Dict[str, Any]] = None - ) -> FileInfo: + def create_file_info(self, key: str, response_data: dict[str, Any] | None = None) -> FileInfo: """Create FileInfo.""" - kwargs: Dict[str, Any] = { + kwargs: dict[str, Any] = { "src": self.bucket + "/" + key, "src_type": "s3", "dest": self.filename, @@ -479,7 +466,7 @@ def test_submit(self) -> None: fileinfo = self.create_file_info(self.key) future = self.transfer_request_submitter.submit(fileinfo) assert self.transfer_manager.download.return_value is future - call_kwargs = cast(Dict[str, Any], self.transfer_manager.download.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.download.call_args[1]) assert call_kwargs["fileobj"] == self.filename assert call_kwargs["bucket"] == self.bucket assert call_kwargs["key"] == self.key @@ -523,7 +510,7 @@ def test_submit_extra_args(self) -> None: self.config_params["sse_c"] = "AES256" self.config_params["sse_c_key"] = "test-key" self.transfer_request_submitter.submit(fileinfo) - call_kwargs = cast(Dict[str, Any], self.transfer_manager.download.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.download.call_args[1]) assert call_kwargs["extra_args"] == { "SSECustomerAlgorithm": "AES256", "SSECustomerKey": "test-key", @@ -542,7 +529,7 @@ def test_submit_move_adds_delete_source_subscriber(self) -> None: DeleteSourceObjectSubscriber, DownloadResultSubscriber, ] - call_kwargs = cast(Dict[str, Any], self.transfer_manager.download.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.download.call_args[1]) actual_subscribers = call_kwargs["subscribers"] assert len(ref_subscribers) == len(actual_subscribers) for i, actual_subscriber in enumerate(actual_subscribers): @@ -581,9 +568,7 @@ def test_warn_and_ignore_with_leading_chars(self) -> None: def test_submit_warn_glacier_force(self) -> None: """Test submit.""" self.config_params["force_glacier_transfer"] = True - fileinfo = self.create_file_info( - self.key, response_data={"StorageClass": "GLACIER"} - ) + fileinfo = self.create_file_info(self.key, response_data={"StorageClass": "GLACIER"}) future = self.transfer_request_submitter.submit(fileinfo) assert self.result_queue.empty() assert self.transfer_manager.download.return_value is future @@ -614,10 +599,7 @@ def test_submit_warn_glacier_incompatible(self) -> None: future = self.transfer_request_submitter.submit(fileinfo) warning_result = self.result_queue.get() assert isinstance(warning_result, PrintTask) - assert ( - "Unable to perform download operations on GLACIER objects" - in warning_result.message - ) + assert "Unable to perform download operations on GLACIER objects" in warning_result.message assert not future self.assert_no_downloads_happened() @@ -655,7 +637,7 @@ def test_submit(self) -> None: future = self.transfer_request_submitter.submit(fileinfo) assert self.transfer_manager.download.return_value is future - call_kwargs = cast(Dict[str, Any], self.transfer_manager.download.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.download.call_args[1]) assert isinstance(call_kwargs["fileobj"], StdoutBytesWriter) assert call_kwargs["bucket"] == self.bucket assert call_kwargs["key"] == self.key @@ -701,27 +683,21 @@ def setup_method(self) -> None: def test_can_submit(self) -> None: """Test can_submit.""" - fileinfo = FileInfo( - src=self.filename, dest=None, operation_name="delete", src_type="local" - ) + fileinfo = FileInfo(src=self.filename, dest=None, operation_name="delete", src_type="local") assert self.transfer_request_submitter.can_submit(fileinfo) fileinfo.operation_name = "foo" assert not self.transfer_request_submitter.can_submit(fileinfo) def test_can_submit_remote_deletes(self) -> None: """Test can_submit.""" - fileinfo = FileInfo( - src=self.filename, dest=None, operation_name="delete", src_type="s3" - ) + fileinfo = FileInfo(src=self.filename, dest=None, operation_name="delete", src_type="s3") assert not self.transfer_request_submitter.can_submit(fileinfo) def test_submit(self, tmp_path: Path) -> None: """Test submit.""" full_filename = tmp_path / self.filename full_filename.write_text("content") - fileinfo = FileInfo( - src=full_filename, dest=None, operation_name="delete", src_type="local" - ) + fileinfo = FileInfo(src=full_filename, dest=None, operation_name="delete", src_type="local") result = self.transfer_request_submitter.submit(fileinfo) assert result queued_result = self.result_queue.get() @@ -758,9 +734,7 @@ def test_dry_run(self) -> None: def test_submit_with_exception(self) -> None: """Test submit.""" - fileinfo = FileInfo( - src=self.filename, dest=None, operation_name="delete", src_type="local" - ) + fileinfo = FileInfo(src=self.filename, dest=None, operation_name="delete", src_type="local") result = self.transfer_request_submitter.submit(fileinfo) assert result @@ -783,7 +757,7 @@ class TestS3TransferHandler: config_params: ClassVar[ParametersDataModel] = ParametersDataModel(dest="", src="") result_command_recorder: CommandResultRecorder - result_queue: "Queue[Any]" + result_queue: Queue[Any] transfer_manager: TransferManager def setup_method(self) -> None: @@ -804,11 +778,9 @@ def test_call(self, mock_submitters: MockSubmitters, tmp_path: Path) -> None: ) fileinfos = [FileInfo(src=tmp_path)] assert handler.call(fileinfos) == "success" # type: ignore - mock_submitters.instances["copy"].can_submit.assert_called_once_with( - fileinfos[0] - ) + mock_submitters.instances["copy"].can_submit.assert_called_once_with(fileinfos[0]) mock_submitters.instances["copy"].submit.assert_called_once_with(fileinfos[0]) - self.result_command_recorder.notify_total_submissions.assert_called_once_with(1) # type: ignore # noqa + self.result_command_recorder.notify_total_submissions.assert_called_once_with(1) # type: ignore self.result_command_recorder.get_command_result.assert_called_once_with() # type: ignore @@ -817,7 +789,7 @@ class TestS3TransferHandlerFactory: config_params: ParametersDataModel client: S3Client - result_queue: "Queue[Any]" + result_queue: Queue[Any] runtime_config: TransferConfigDict def setup_method(self) -> None: @@ -839,12 +811,10 @@ def test_call_is_stream(self, mocker: MockerFixture) -> None: assert S3TransferHandlerFactory(self.config_params, self.runtime_config)( self.client, self.result_queue ) - call_kwargs = cast(Dict[str, Any], mock_processor.call_args[1]) + call_kwargs = cast(dict[str, Any], mock_processor.call_args[1]) assert len(call_kwargs["result_handlers"]) == 2 assert isinstance(call_kwargs["result_handlers"][0], ResultRecorder) - assert isinstance( - call_kwargs["result_handlers"][1], OnlyShowErrorsResultPrinter - ) + assert isinstance(call_kwargs["result_handlers"][1], OnlyShowErrorsResultPrinter) def test_call_no_progress(self, mocker: MockerFixture) -> None: """Test __call__.""" @@ -853,7 +823,7 @@ def test_call_no_progress(self, mocker: MockerFixture) -> None: assert S3TransferHandlerFactory(self.config_params, self.runtime_config)( self.client, self.result_queue ) - call_kwargs = cast(Dict[str, Any], mock_processor.call_args[1]) + call_kwargs = cast(dict[str, Any], mock_processor.call_args[1]) assert len(call_kwargs["result_handlers"]) == 2 assert isinstance(call_kwargs["result_handlers"][0], ResultRecorder) assert isinstance(call_kwargs["result_handlers"][1], NoProgressResultPrinter) @@ -865,12 +835,10 @@ def test_call_only_show_errors(self, mocker: MockerFixture) -> None: assert S3TransferHandlerFactory(self.config_params, self.runtime_config)( self.client, self.result_queue ) - call_kwargs = cast(Dict[str, Any], mock_processor.call_args[1]) + call_kwargs = cast(dict[str, Any], mock_processor.call_args[1]) assert len(call_kwargs["result_handlers"]) == 2 assert isinstance(call_kwargs["result_handlers"][0], ResultRecorder) - assert isinstance( - call_kwargs["result_handlers"][1], OnlyShowErrorsResultPrinter - ) + assert isinstance(call_kwargs["result_handlers"][1], OnlyShowErrorsResultPrinter) def test_call_quiet(self, mocker: MockerFixture) -> None: """Test __call__.""" @@ -879,7 +847,7 @@ def test_call_quiet(self, mocker: MockerFixture) -> None: assert S3TransferHandlerFactory(self.config_params, self.runtime_config)( self.client, self.result_queue ) - call_kwargs = cast(Dict[str, Any], mock_processor.call_args[1]) + call_kwargs = cast(dict[str, Any], mock_processor.call_args[1]) assert len(call_kwargs["result_handlers"]) == 1 assert isinstance(call_kwargs["result_handlers"][0], ResultRecorder) @@ -914,7 +882,7 @@ def test_submit(self) -> None: future = self.transfer_request_submitter.submit(fileinfo) assert self.transfer_manager.upload.return_value is future - call_kwargs = cast(Dict[str, Any], self.transfer_manager.upload.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.upload.call_args[1]) assert call_kwargs["fileobj"] == self.filename assert call_kwargs["bucket"] == self.bucket assert call_kwargs["key"] == self.key @@ -936,7 +904,7 @@ def test_submit_content_type_specified(self) -> None: self.config_params["content_type"] = "text/plain" self.transfer_request_submitter.submit(fileinfo) - call_kwargs = cast(Dict[str, Any], self.transfer_manager.upload.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.upload.call_args[1]) assert call_kwargs["extra_args"] == {"ContentType": "text/plain"} ref_subscribers = [ProvideSizeSubscriber, UploadResultSubscriber] actual_subscribers = call_kwargs["subscribers"] @@ -991,7 +959,7 @@ def test_submit_extra_args(self) -> None: self.config_params["storage_class"] = "STANDARD_IA" self.transfer_request_submitter.submit(fileinfo) - call_kwargs = cast(Dict[str, Any], self.transfer_manager.upload.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.upload.call_args[1]) assert call_kwargs["extra_args"] == {"StorageClass": "STANDARD_IA"} def test_submit_move_adds_delete_source_subscriber(self) -> None: @@ -1006,7 +974,7 @@ def test_submit_move_adds_delete_source_subscriber(self) -> None: DeleteSourceFileSubscriber, UploadResultSubscriber, ] - call_kwargs = cast(Dict[str, Any], self.transfer_manager.upload.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.upload.call_args[1]) actual_subscribers = call_kwargs["subscribers"] assert len(ref_subscribers) == len(actual_subscribers) for i, actual_subscriber in enumerate(actual_subscribers): @@ -1018,7 +986,7 @@ def test_submit_no_guess_content_mime_type(self) -> None: self.config_params["guess_mime_type"] = False self.transfer_request_submitter.submit(fileinfo) - call_kwargs = cast(Dict[str, Any], self.transfer_manager.upload.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.upload.call_args[1]) ref_subscribers = [ProvideSizeSubscriber, UploadResultSubscriber] actual_subscribers = call_kwargs["subscribers"] assert len(ref_subscribers) == len(actual_subscribers) @@ -1072,7 +1040,7 @@ def test_submit(self) -> None: future = self.transfer_request_submitter.submit(fileinfo) assert self.transfer_manager.upload.return_value is future - call_kwargs = cast(Dict[str, Any], self.transfer_manager.upload.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.upload.call_args[1]) assert isinstance(call_kwargs["fileobj"], NonSeekableStream) assert call_kwargs["bucket"] == self.bucket assert call_kwargs["key"] == self.key @@ -1108,7 +1076,7 @@ def test_submit_expected_size_provided(self) -> None: self.config_params["expected_size"] = provided_size fileinfo = FileInfo(src=self.filename, dest=self.bucket + "/" + self.key) self.transfer_request_submitter.submit(fileinfo) - call_kwargs = cast(Dict[str, Any], self.transfer_manager.upload.call_args[1]) + call_kwargs = cast(dict[str, Any], self.transfer_manager.upload.call_args[1]) ref_subscribers = [ProvideSizeSubscriber, UploadStreamResultSubscriber] actual_subscribers = call_kwargs["subscribers"] @@ -1129,7 +1097,4 @@ def test_submit_raise_stdin_missing(self, mocker: MockerFixture) -> None: ) with pytest.raises(StdinMissingError) as excinfo: self.transfer_request_submitter.submit(fileinfo) - assert ( - excinfo.value.message - == "stdin is required for this operation, but is not available" - ) + assert excinfo.value.message == "stdin is required for this operation, but is not available" diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_transfer_config.py b/tests/unit/core/providers/aws/s3/_helpers/test_transfer_config.py index fee3606f9..9b8c7dd50 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_transfer_config.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_transfer_config.py @@ -2,8 +2,6 @@ from __future__ import annotations -from typing import Dict - import pytest from s3transfer.manager import TransferConfig @@ -26,17 +24,12 @@ def test_build_config(self) -> None: def test_build_config_human_readable_rates_converted_to_bytes(self) -> None: """Test build_config.""" - assert ( - RuntimeConfig.build_config(max_bandwidth="1MB/s")["max_bandwidth"] - == 1024**2 - ) + assert RuntimeConfig.build_config(max_bandwidth="1MB/s")["max_bandwidth"] == 1024**2 def test_build_config_human_readable_sizes_converted_to_bytes(self) -> None: """Test build_config.""" assert ( - RuntimeConfig.build_config(multipart_threshold="10MB")[ - "multipart_threshold" - ] + RuntimeConfig.build_config(multipart_threshold="10MB")["multipart_threshold"] == 10 * 1024 * 1024 ) @@ -69,7 +62,7 @@ def test_build_config_partial_override(self) -> None: {"max_queue_size": "not an int"}, ], ) - def test_build_config_validates_integer_types(self, kwargs: Dict[str, str]) -> None: + def test_build_config_validates_integer_types(self, kwargs: dict[str, str]) -> None: """Test build_config.""" with pytest.raises(InvalidConfigError): RuntimeConfig.build_config(**kwargs) @@ -84,9 +77,7 @@ def test_build_config_validates_integer_types(self, kwargs: Dict[str, str]) -> N {"multipart_threshold": -15}, ], ) - def test_build_config_validates_positive_integers( - self, kwargs: Dict[str, str] - ) -> None: + def test_build_config_validates_positive_integers(self, kwargs: dict[str, str]) -> None: """Test build_config.""" with pytest.raises(InvalidConfigError): RuntimeConfig.build_config(**kwargs) diff --git a/tests/unit/core/providers/aws/s3/_helpers/test_utils.py b/tests/unit/core/providers/aws/s3/_helpers/test_utils.py index 799b89b88..3020ca911 100644 --- a/tests/unit/core/providers/aws/s3/_helpers/test_utils.py +++ b/tests/unit/core/providers/aws/s3/_helpers/test_utils.py @@ -1,6 +1,5 @@ """Test runway.core.providers.aws.s3._helpers.utils.""" -# pylint: disable=too-many-lines from __future__ import annotations import datetime @@ -13,7 +12,8 @@ from io import BytesIO from pathlib import Path from queue import Queue -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional +from typing import TYPE_CHECKING, Any, ClassVar +from unittest.mock import Mock, PropertyMock, sentinel import boto3 import pytest @@ -21,7 +21,6 @@ from botocore.hooks import HierarchicalEmitter from botocore.stub import Stubber from dateutil.tz import tzlocal -from mock import Mock, PropertyMock, sentinel from s3transfer.compat import seekable from s3transfer.futures import TransferFuture @@ -90,9 +89,9 @@ class TestBucketLister: date_parser: ClassVar[Mock] = Mock(return_value=sentinel.now) emitter: ClassVar[HierarchicalEmitter] = HierarchicalEmitter() client: ClassVar[Mock] = Mock(meta=Mock(events=emitter)) - responses: List[Any] = [] + responses: list[Any] = [] - def fake_paginate(self, *_args: Any, **_kwargs: Any) -> List[Any]: + def fake_paginate(self, *_args: Any, **_kwargs: Any) -> list[Any]: """Fake paginate.""" for response in self.responses: self.emitter.emit("after-call.s3.ListObjectsV2", parsed=response) @@ -121,7 +120,7 @@ def test_list_objects(self) -> None: for individual_response in individual_response_elements: assert individual_response["LastModified"] == now - def test_list_objects_pass_extra_args(self): + def test_list_objects_pass_extra_args(self) -> None: """Test list_objects.""" self.client.get_paginator.return_value.paginate = Mock( return_value=[ @@ -137,18 +136,14 @@ def test_list_objects_pass_extra_args(self): ] ) lister = BucketLister(self.client, self.date_parser) - list( - lister.list_objects( - bucket="mybucket", extra_args={"RequestPayer": "requester"} - ) - ) + list(lister.list_objects(bucket="mybucket", extra_args={"RequestPayer": "requester"})) self.client.get_paginator.return_value.paginate.assert_called_with( Bucket="mybucket", PaginationConfig={"PageSize": None}, RequestPayer="requester", ) - def test_list_objects_pass_prefix(self): + def test_list_objects_pass_prefix(self) -> None: """Test list_objects.""" self.client.get_paginator.return_value.paginate = Mock( return_value=[ @@ -183,9 +178,7 @@ def test_on_done_delete(self) -> None: """Test on_done.""" client = boto3.client("s3") stubber = Stubber(client) - stubber.add_response( - "delete_object", {}, {"Bucket": self.bucket, "Key": self.key} - ) + stubber.add_response("delete_object", {}, {"Bucket": self.bucket, "Key": self.key}) future = Mock(meta=self.meta) with stubber: assert not DeleteCopySourceObjectSubscriber(client).on_done(future) @@ -232,9 +225,7 @@ def test_on_done_delete(self, tmp_path: Path) -> None: tmp_file = tmp_path / "test.txt" tmp_file.write_text("data") future = Mock( - meta=FakeTransferFutureMeta( - call_args=FakeTransferFutureCallArgs(fileobj=str(tmp_file)) - ) + meta=FakeTransferFutureMeta(call_args=FakeTransferFutureCallArgs(fileobj=str(tmp_file))) ) DeleteSourceFileSubscriber().on_done(future) assert not tmp_file.exists() @@ -244,9 +235,7 @@ def test_on_done_exception(self, tmp_path: Path) -> None: """Test on_done.""" tmp_file = tmp_path / "test.txt" future = Mock( - meta=FakeTransferFutureMeta( - call_args=FakeTransferFutureCallArgs(fileobj=str(tmp_file)) - ) + meta=FakeTransferFutureMeta(call_args=FakeTransferFutureCallArgs(fileobj=str(tmp_file))) ) DeleteSourceFileSubscriber().on_done(future) assert not tmp_file.exists() @@ -267,9 +256,7 @@ def test_on_done_delete(self) -> None: """Test on_done.""" client = boto3.client("s3") stubber = Stubber(client) - stubber.add_response( - "delete_object", {}, {"Bucket": self.bucket, "Key": self.key} - ) + stubber.add_response("delete_object", {}, {"Bucket": self.bucket, "Key": self.key}) future = Mock(meta=self.meta) with stubber: assert not DeleteSourceObjectSubscriber(client).on_done(future) @@ -348,7 +335,7 @@ def test_on_queued_exists(self, tmp_path: Path) -> None: assert tmp_dir.is_dir() future.set_exception.assert_not_called() - def test_on_queued_handle_eexist( + def test_on_queued_handle_eexist( # cspell: disable-line self, mocker: MockerFixture, tmp_path: Path ) -> None: """Test on_queued.""" @@ -359,8 +346,8 @@ def test_on_queued_handle_eexist( ) ) exc = OSError() - exc.errno = errno.EEXIST - mocker.patch("os.makedirs", side_effect=exc) + exc.errno = errno.EEXIST # cspell: disable-line + mocker.patch("pathlib.Path.mkdir", side_effect=exc) assert not DirectoryCreatorSubscriber().on_queued(future) assert not tmp_dir.exists() @@ -372,7 +359,7 @@ def test_on_queued_os_error(self, mocker: MockerFixture, tmp_path: Path) -> None call_args=FakeTransferFutureCallArgs(fileobj=tmp_dir / "test.txt") ) ) - mocker.patch("os.makedirs", side_effect=OSError()) + mocker.patch("pathlib.Path.mkdir", side_effect=OSError()) with pytest.raises(CreateDirectoryError): DirectoryCreatorSubscriber().on_queued(future) assert not tmp_dir.exists() @@ -400,10 +387,10 @@ class TestOnDoneFilteredSubscriber: class Subscriber(OnDoneFilteredSubscriber): """Subscriber subclass to test.""" - def __init__(self): + def __init__(self) -> None: """Instantiate class.""" - self.on_success_calls: List[Any] = [] - self.on_failure_calls: List[Any] = [] + self.on_success_calls: list[Any] = [] + self.on_failure_calls: list[Any] = [] def _on_success(self, future: Any) -> None: self.on_success_calls.append(future) @@ -411,26 +398,24 @@ def _on_success(self, future: Any) -> None: def _on_failure(self, future: Any, exception: Exception) -> None: self.on_failure_calls.append((future, exception)) - def test_on_done_failure(self): + def test_on_done_failure(self) -> None: """Test on_done.""" subscriber = self.Subscriber() exception = Exception("my exception") future = FakeTransferFuture(exception=exception) subscriber.on_done(future) # type: ignore assert subscriber.on_failure_calls == [(future, exception)] - assert not subscriber.on_success_calls and isinstance( - subscriber.on_success_calls, list - ) + assert not subscriber.on_success_calls + assert isinstance(subscriber.on_success_calls, list) - def test_on_done_success(self): + def test_on_done_success(self) -> None: """Test on_done.""" subscriber = self.Subscriber() future = FakeTransferFuture("return-value") subscriber.on_done(future) # type: ignore assert subscriber.on_success_calls == [future] - assert not subscriber.on_failure_calls and isinstance( - subscriber.on_failure_calls, list - ) + assert not subscriber.on_failure_calls + assert isinstance(subscriber.on_failure_calls, list) class TestProvideCopyContentTypeSubscriber: @@ -459,25 +444,19 @@ class TestProvideLastModifiedTimeSubscriber: desired_utime: ClassVar[datetime.datetime] = datetime.datetime( 2016, 1, 18, 7, 0, 0, tzinfo=tzlocal() ) - result_queue: ClassVar["Queue[Any]"] = Queue() - subscriber: ClassVar[ProvideLastModifiedTimeSubscriber] = ( - ProvideLastModifiedTimeSubscriber(desired_utime, result_queue) + result_queue: ClassVar[Queue[Any]] = Queue() + subscriber: ClassVar[ProvideLastModifiedTimeSubscriber] = ProvideLastModifiedTimeSubscriber( + desired_utime, result_queue ) - def test_on_done_handle_exception( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test_on_done_handle_exception(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test on_done.""" tmp_file = tmp_path / "test.txt" tmp_file.touch() future = FakeTransferFuture( - meta=FakeTransferFutureMeta( - call_args=FakeTransferFutureCallArgs(fileobj=tmp_file) - ) - ) - mock_create_warning = mocker.patch( - f"{MODULE}.create_warning", return_value="warning" + meta=FakeTransferFutureMeta(call_args=FakeTransferFutureCallArgs(fileobj=tmp_file)) ) + mock_create_warning = mocker.patch(f"{MODULE}.create_warning", return_value="warning") assert not ProvideLastModifiedTimeSubscriber( None, self.result_queue # type: ignore ).on_done( @@ -485,10 +464,7 @@ def test_on_done_handle_exception( ) mock_create_warning.assert_called_once() assert mock_create_warning.call_args[0][0] == tmp_file - assert ( - "was unable to update the last modified time." - in mock_create_warning.call_args[0][1] - ) + assert "was unable to update the last modified time." in mock_create_warning.call_args[0][1] assert self.result_queue.get() == "warning" def test_on_done_modifies_utime(self, tmp_path: Path) -> None: @@ -496,9 +472,7 @@ def test_on_done_modifies_utime(self, tmp_path: Path) -> None: tmp_file = tmp_path / "test.txt" tmp_file.touch() future = FakeTransferFuture( - meta=FakeTransferFutureMeta( - call_args=FakeTransferFutureCallArgs(fileobj=tmp_file) - ) + meta=FakeTransferFutureMeta(call_args=FakeTransferFutureCallArgs(fileobj=tmp_file)) ) assert not self.subscriber.on_done(future) # type: ignore _, utime = get_file_stat(tmp_file) @@ -523,9 +497,7 @@ class TestProvideUploadContentTypeSubscriber: def test_on_queued(self) -> None: """Test on_queued.""" future = FakeTransferFuture( - meta=FakeTransferFutureMeta( - call_args=FakeTransferFutureCallArgs(fileobj="test.txt") - ) + meta=FakeTransferFutureMeta(call_args=FakeTransferFutureCallArgs(fileobj="test.txt")) ) assert not ProvideUploadContentTypeSubscriber().on_queued(future) # type: ignore assert future.meta.call_args.extra_args.get("ContentType") == "text/plain" @@ -534,7 +506,7 @@ def test_on_queued(self) -> None: class TestRequestParamsMapper: """Test RequestParamsMapper.""" - params: ClassVar[Dict[str, str]] = { + params: ClassVar[dict[str, str]] = { "sse": "AES256", "sse_kms_key_id": "my-kms-key", "sse_c": "AES256", @@ -545,7 +517,7 @@ class TestRequestParamsMapper: def test_map_copy_object_params(self) -> None: """Test map_copy_object_params.""" - params: Dict[str, str] = {} + params: dict[str, str] = {} assert not RequestParamsMapper.map_copy_object_params( params, {"metadata": "something", **self.params} ) @@ -562,7 +534,7 @@ def test_map_copy_object_params(self) -> None: def test_map_copy_object_params_metadata_directive(self) -> None: """Test map_copy_object_params.""" - params: Dict[str, str] = {} + params: dict[str, str] = {} assert not RequestParamsMapper.map_copy_object_params( params, {"metadata_directive": "something", **self.params} ) @@ -578,10 +550,8 @@ def test_map_copy_object_params_metadata_directive(self) -> None: def test_map_create_multipart_upload_params(self) -> None: """Test map_create_multipart_upload_params.""" - params: Dict[str, str] = {} - assert not RequestParamsMapper.map_create_multipart_upload_params( - params, self.params - ) + params: dict[str, str] = {} + assert not RequestParamsMapper.map_create_multipart_upload_params(params, self.params) assert params == { "SSECustomerAlgorithm": "AES256", "SSECustomerKey": "my-sse-c-key", @@ -591,7 +561,7 @@ def test_map_create_multipart_upload_params(self) -> None: def test_map_delete_object_params(self) -> None: """Test map_delete_object_params.""" - params: Dict[str, Any] = {} + params: dict[str, Any] = {} assert not RequestParamsMapper.map_delete_object_params( params, {"request_payer": "requester", **self.params} ) @@ -599,7 +569,7 @@ def test_map_delete_object_params(self) -> None: def test_map_get_object_params(self) -> None: """Test map_get_object_params.""" - params: Dict[str, str] = {} + params: dict[str, str] = {} assert not RequestParamsMapper.map_get_object_params(params, self.params) assert params == { "SSECustomerAlgorithm": "AES256", @@ -608,7 +578,7 @@ def test_map_get_object_params(self) -> None: def test_map_head_object_params(self) -> None: """Test map_head_object_params.""" - params: Dict[str, str] = {} + params: dict[str, str] = {} assert not RequestParamsMapper.map_head_object_params(params, self.params) assert params == { "SSECustomerAlgorithm": "AES256", @@ -617,7 +587,7 @@ def test_map_head_object_params(self) -> None: def test_map_list_objects_v2_params(self) -> None: """Test map_list_objects_v2_params.""" - params: Dict[str, Any] = {} + params: dict[str, Any] = {} assert not RequestParamsMapper.map_list_objects_v2_params( params, {"request_payer": "requester", **self.params} ) @@ -625,7 +595,7 @@ def test_map_list_objects_v2_params(self) -> None: def test_map_put_object_params(self) -> None: """Test map_put_object_params.""" - params: Dict[str, str] = {} + params: dict[str, str] = {} assert not RequestParamsMapper.map_put_object_params( params, { @@ -653,28 +623,25 @@ def test_map_put_object_params(self) -> None: def test_map_put_object_params_raise_value_error_format(self) -> None: """Test map_put_object_params.""" - params: Dict[str, str] = {} - with pytest.raises(ValueError) as excinfo: + params: dict[str, str] = {} + with pytest.raises(ValueError, match="grants should be of the form permission=principal"): RequestParamsMapper.map_put_object_params( params, {"grants": ["invalid"], **self.params} ) - assert str(excinfo.value) == "grants should be of the form permission=principal" def test_map_put_object_params_raise_value_error_permission(self) -> None: """Test map_put_object_params.""" - params: Dict[str, str] = {} - with pytest.raises(ValueError) as excinfo: + params: dict[str, str] = {} + with pytest.raises( + ValueError, match="permission must be one of: read|readacl|writeacl|full" + ): RequestParamsMapper.map_put_object_params( params, {"grants": ["invalid=test-read"], **self.params} ) - assert ( - str(excinfo.value) - == "permission must be one of: read|readacl|writeacl|full" - ) def test_map_upload_part_params(self) -> None: """Test map_upload_part_params.""" - params: Dict[str, str] = {} + params: dict[str, str] = {} assert not RequestParamsMapper.map_upload_part_params(params, self.params) assert params == { "SSECustomerAlgorithm": "AES256", @@ -683,7 +650,7 @@ def test_map_upload_part_params(self) -> None: def test_map_upload_part_copy_params(self) -> None: """Test map_upload_part_copy_params.""" - params: Dict[str, str] = {} + params: dict[str, str] = {} assert not RequestParamsMapper.map_upload_part_copy_params(params, self.params) assert params == { "CopySourceSSECustomerAlgorithm": "AES256", @@ -725,22 +692,18 @@ def test_write_no_stdout(self, mocker: MockerFixture) -> None: def test_block_s3_object_lambda_raise_colon() -> None: """Test block_s3_object_lambda.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="S3 action does not support S3 Object Lambda resources"): block_s3_object_lambda( - "arn:aws:s3-object-lambda:us-west-2:123456789012:" - "accesspoint:my-accesspoint" + "arn:aws:s3-object-lambda:us-west-2:123456789012:accesspoint:my-accesspoint" ) - assert "does not support S3 Object Lambda resources" in str(excinfo.value) def test_block_s3_object_lambda_raise_slash() -> None: """Test block_s3_object_lambda.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="S3 action does not support S3 Object Lambda resources"): block_s3_object_lambda( - "arn:aws:s3-object-lambda:us-west-2:123456789012:" - "accesspoint/my-accesspoint" + "arn:aws:s3-object-lambda:us-west-2:123456789012:accesspoint/my-accesspoint" ) - assert "does not support S3 Object Lambda resources" in str(excinfo.value) def test_create_warning() -> None: @@ -806,28 +769,28 @@ def test_date_parser_datetime() -> None: "pre/key", ), ( - "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", # noqa - "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", # noqa + "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", + "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", "", ), ( - "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint/key", # noqa - "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", # noqa + "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint/key", + "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", "key", ), ( - "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint/key:name", # noqa - "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", # noqa + "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint/key:name", + "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", "key:name", ), ( - "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint/key/name", # noqa - "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", # noqa + "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint/key/name", + "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", "key/name", ), ( - "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint/prefix/key:name", # noqa - "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", # noqa + "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint/prefix/key:name", + "arn:aws:s3-outposts:us-west-2:123456789012:outpost:op-12334:accesspoint:my-accesspoint", "prefix/key:name", ), ], @@ -888,12 +851,12 @@ def test_find_dest_path_comp_key_locals3_file_no_dir_op(tmp_path: Path) -> None: def test_get_file_stat(tmp_path: Path) -> None: """Test get_file_stat.""" tmp_file = tmp_path / "test.txt" - now = datetime.datetime.now(tzlocal()) - epoch_now = time.mktime(now.timetuple()) tmp_file.write_text("foo") size, update_time = get_file_stat(tmp_file) assert size == 3 - assert time.mktime(update_time.timetuple()) == epoch_now # type: ignore + assert update_time, ( + "just ensure that it has a truthy value, checking exact value is inconsistent", + ) @pytest.mark.parametrize("exc", [ValueError(), OSError(), OverflowError()]) @@ -909,11 +872,10 @@ def test_get_file_stat_handle_timestamp_error( def test_get_file_stat_raise_value_error(mocker: MockerFixture, tmp_path: Path) -> None: """Test get_file_stat.""" - mocker.patch.object(Path, "stat", PropertyMock(side_effect=IOError("msg"))) + mocker.patch.object(Path, "stat", PropertyMock(side_effect=OSError("msg"))) tmp_file = tmp_path / "test.txt" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="Could not retrieve file stat"): get_file_stat(tmp_file) - assert str(excinfo.value) == f"Could not retrieve file stat of {tmp_file}: msg" def test_guess_content_type(mocker: MockerFixture, tmp_path: Path) -> None: @@ -955,7 +917,7 @@ def test_guess_content_type_handle_unicode_decode_error( (1024**7, None), ], ) -def test_human_readable_size(expected: Optional[str], value: float) -> None: +def test_human_readable_size(expected: str | None, value: float) -> None: """Test human_readable_size.""" assert human_readable_size(value) == expected @@ -984,23 +946,18 @@ def test_human_readable_to_bytes(expected: int, value: str) -> None: def test_human_readable_to_bytes_raise_value_error() -> None: """Test human_readable_to_bytes.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="Invalid size value"): human_readable_to_bytes("test") - assert str(excinfo.value) == "Invalid size value: test" -@pytest.mark.skipif( - platform.system() == "Windows", reason="crashes xdist worker on Windows" -) -def test_relative_path_handle_value_error( - mocker: MockerFixture, tmp_path: Path -) -> None: +@pytest.mark.skipif(platform.system() == "Windows", reason="crashes xdist worker on Windows") +def test_relative_path_handle_value_error(mocker: MockerFixture, tmp_path: Path) -> None: """Test relative_path.""" tmp_file = tmp_path / "test.txt" mocker.patch("os.path.split", side_effect=ValueError()) result = relative_path(tmp_file, tmp_path) assert isinstance(result, str) - assert os.path.isabs(result) + assert Path(result).is_absolute() @pytest.mark.parametrize( @@ -1008,7 +965,7 @@ def test_relative_path_handle_value_error( [("/tmp/foo/bar", "/tmp/foo", f".{os.sep}bar"), (None, "/foo", None)], ) def test_relative_path_posix( - expected: Optional[str], filename: Optional[str], mocker: MockerFixture, start: str + expected: str | None, filename: str | None, mocker: MockerFixture, start: str ) -> None: """Test relative_path.""" mocker.patch("os.path.relpath", posixpath.relpath) @@ -1021,7 +978,7 @@ def test_relative_path_posix( [(None, "/foo", None), (r"C:\tmp\foo\bar", r"C:\tmp\foo", f".{os.sep}bar")], ) def test_relative_path_windows( - expected: Optional[str], filename: Optional[str], mocker: MockerFixture, start: str + expected: str | None, filename: str | None, mocker: MockerFixture, start: str ) -> None: """Test relative_path.""" mocker.patch("os.path.relpath", ntpath.relpath) @@ -1057,7 +1014,7 @@ def test_set_file_utime_raise_os_error(mocker: MockerFixture, tmp_path: Path) -> mocker.patch("os.utime", side_effect=OSError(2, "")) now = datetime.datetime.now(tzlocal()) epoch_now = time.mktime(now.timetuple()) - with pytest.raises(OSError): + with pytest.raises(OSError): # noqa: PT011 set_file_utime(tmp_file, epoch_now) @@ -1073,9 +1030,7 @@ def test_uni_print_handle_unicode_encoding_error() -> None: """Test uni_print.""" out_file = Mock( encoding=None, - write=Mock( - side_effect=[UnicodeEncodeError("test", "test", 0, 0, "test"), None] - ), + write=Mock(side_effect=[UnicodeEncodeError("test", "test", 0, 0, "test"), None]), ) assert not uni_print("test", out_file) assert out_file.write.call_count == 2 diff --git a/tests/unit/core/providers/aws/s3/test_bucket.py b/tests/unit/core/providers/aws/s3/test__bucket.py similarity index 90% rename from tests/unit/core/providers/aws/s3/test_bucket.py rename to tests/unit/core/providers/aws/s3/test__bucket.py index e603fe8ff..d64e9ecd9 100644 --- a/tests/unit/core/providers/aws/s3/test_bucket.py +++ b/tests/unit/core/providers/aws/s3/test__bucket.py @@ -1,20 +1,18 @@ """Test runway.core.providers.aws.s3._bucket.""" -# pyright: basic from __future__ import annotations import logging from http import HTTPStatus from typing import TYPE_CHECKING +from unittest.mock import MagicMock import pytest -from mock import MagicMock from runway.core.providers.aws import BaseResponse from runway.core.providers.aws.s3 import Bucket if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from .....factories import MockRunwayContext @@ -87,7 +85,7 @@ def test_create(self, runway_context: MockRunwayContext) -> None: stubber.assert_no_pending_responses() def test_create_exists( - self, caplog: LogCaptureFixture, runway_context: MockRunwayContext + self, caplog: pytest.LogCaptureFixture, runway_context: MockRunwayContext ) -> None: """Test create with exists=True.""" caplog.set_level(logging.DEBUG, logger="runway.core.providers.aws.s3.bucket") @@ -106,7 +104,7 @@ def test_create_exists( assert "bucket already exists" in "\n".join(caplog.messages) def test_create_forbidden( - self, caplog: LogCaptureFixture, runway_context: MockRunwayContext + self, caplog: pytest.LogCaptureFixture, runway_context: MockRunwayContext ) -> None: """Test create with forbidden=True.""" caplog.set_level(logging.DEBUG, logger="runway.core.providers.aws.s3.bucket") @@ -178,7 +176,7 @@ def test_enable_versioning(self, runway_context: MockRunwayContext) -> None: stubber.assert_no_pending_responses() def test_enable_versioning_skipped( - self, caplog: LogCaptureFixture, runway_context: MockRunwayContext + self, caplog: pytest.LogCaptureFixture, runway_context: MockRunwayContext ) -> None: """Test enable_versioning with Status=Enabled.""" caplog.set_level(logging.DEBUG, logger="runway.core.providers.aws.s3.bucket") @@ -231,9 +229,7 @@ def test_forbidden( ) -> None: """Test forbidden.""" response = BaseResponse() - response.metadata.http_status_code = ( - HTTPStatus.FORBIDDEN if forbidden else HTTPStatus.OK - ) + response.metadata.http_status_code = HTTPStatus.FORBIDDEN if forbidden else HTTPStatus.OK mocker.patch.object(Bucket, "head", response) assert Bucket(runway_context, "test-bucket").forbidden is expected @@ -256,9 +252,7 @@ def test_get_versioning(self, runway_context: MockRunwayContext) -> None: response = {"Status": "Enabled", "MFADelete": "Enabled"} - stubber.add_response( - "get_bucket_versioning", response, {"Bucket": "test-bucket"} - ) + stubber.add_response("get_bucket_versioning", response, {"Bucket": "test-bucket"}) with stubber: assert bucket.get_versioning() == response @@ -281,7 +275,7 @@ def test_head(self, runway_context: MockRunwayContext) -> None: stubber.assert_no_pending_responses() def test_head_clienterror( - self, caplog: LogCaptureFixture, runway_context: MockRunwayContext + self, caplog: pytest.LogCaptureFixture, runway_context: MockRunwayContext ) -> None: """Test head with ClientError.""" caplog.set_level(logging.DEBUG, logger="runway.core.providers.aws.s3.bucket") @@ -311,9 +305,7 @@ def test_not_found( ) -> None: """Test not_found.""" response = BaseResponse() - response.metadata.http_status_code = ( - HTTPStatus.NOT_FOUND if not_found else HTTPStatus.OK - ) + response.metadata.http_status_code = HTTPStatus.NOT_FOUND if not_found else HTTPStatus.OK mocker.patch.object(Bucket, "head", response) assert Bucket(runway_context, "test-bucket").not_found is expected @@ -322,9 +314,7 @@ def test_sync_from_local( ) -> None: """Test sync_from_local.""" mock_handler = MagicMock() - mock_handler_class = mocker.patch( - f"{MODULE}.S3SyncHandler", return_value=mock_handler - ) + mock_handler_class = mocker.patch(f"{MODULE}.S3SyncHandler", return_value=mock_handler) runway_context.add_stubber("s3") src_directory = "/test/" obj = Bucket(runway_context, "test-bucket") @@ -343,20 +333,14 @@ def test_sync_from_local( ) mock_handler.run.assert_called_once_with() - def test_sync_to_local( - self, mocker: MockerFixture, runway_context: MockRunwayContext - ) -> None: + def test_sync_to_local(self, mocker: MockerFixture, runway_context: MockRunwayContext) -> None: """Test sync_to_local.""" mock_handler = MagicMock() - mock_handler_class = mocker.patch( - f"{MODULE}.S3SyncHandler", return_value=mock_handler - ) + mock_handler_class = mocker.patch(f"{MODULE}.S3SyncHandler", return_value=mock_handler) runway_context.add_stubber("s3") dest_directory = "/test/" obj = Bucket(runway_context, "test-bucket") - assert not obj.sync_to_local( - dest_directory, follow_symlinks=True, include=["something"] - ) + assert not obj.sync_to_local(dest_directory, follow_symlinks=True, include=["something"]) mock_handler_class.assert_called_once_with( context=runway_context, delete=False, diff --git a/tests/unit/core/providers/aws/s3/test_sync_handler.py b/tests/unit/core/providers/aws/s3/test__sync_handler.py similarity index 84% rename from tests/unit/core/providers/aws/s3/test_sync_handler.py rename to tests/unit/core/providers/aws/s3/test__sync_handler.py index 6909e8154..d4188e069 100644 --- a/tests/unit/core/providers/aws/s3/test_sync_handler.py +++ b/tests/unit/core/providers/aws/s3/test__sync_handler.py @@ -1,11 +1,9 @@ """Test runway.core.providers.aws.s3._sync_handler.""" -# pylint: disable=protected-access from __future__ import annotations from typing import TYPE_CHECKING - -from mock import Mock +from unittest.mock import Mock from runway.core.providers.aws.s3._sync_handler import S3SyncHandler @@ -27,17 +25,11 @@ def test_client(self, runway_context: MockRunwayContext) -> None: runway_context, dest="", src="" ).client == runway_context.get_session().client("s3") - def test_run( - self, mocker: MockerFixture, runway_context: MockRunwayContext - ) -> None: + def test_run(self, mocker: MockerFixture, runway_context: MockRunwayContext) -> None: """Test run.""" - mock_register_sync_strategies = mocker.patch( - f"{MODULE}.register_sync_strategies" - ) + mock_register_sync_strategies = mocker.patch(f"{MODULE}.register_sync_strategies") mock_action = mocker.patch(f"{MODULE}.ActionArchitecture") - transfer_config = mocker.patch.object( - S3SyncHandler, "transfer_config", {"key": "val"} - ) + transfer_config = mocker.patch.object(S3SyncHandler, "transfer_config", {"key": "val"}) obj = S3SyncHandler(runway_context, dest="", src="") assert not obj.run() mock_register_sync_strategies.assert_called_once_with(obj._botocore_session) diff --git a/tests/unit/core/providers/aws/test_account.py b/tests/unit/core/providers/aws/test__account.py similarity index 92% rename from tests/unit/core/providers/aws/test_account.py rename to tests/unit/core/providers/aws/test__account.py index f55299a05..78ba55c05 100644 --- a/tests/unit/core/providers/aws/test_account.py +++ b/tests/unit/core/providers/aws/test__account.py @@ -1,6 +1,5 @@ """Test runway.core.providers.aws._account.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING @@ -50,7 +49,5 @@ def test_id_raise_value_error(self, runway_context: MockRunwayContext) -> None: {"UserId": "test-user", "Arn": arn}, ) account = AccountDetails(runway_context) - with stubber, pytest.raises( - ValueError, match="get_caller_identity did not return Account" - ): + with stubber, pytest.raises(ValueError, match="get_caller_identity did not return Account"): assert not account.id diff --git a/tests/unit/core/providers/aws/test_assume_role.py b/tests/unit/core/providers/aws/test__assume_role.py similarity index 89% rename from tests/unit/core/providers/aws/test_assume_role.py rename to tests/unit/core/providers/aws/test__assume_role.py index 9b31693ff..6dfc75657 100644 --- a/tests/unit/core/providers/aws/test_assume_role.py +++ b/tests/unit/core/providers/aws/test__assume_role.py @@ -1,6 +1,5 @@ """Test runway.core.providers.aws._assume_role.""" -# pyright: basic from __future__ import annotations import logging @@ -12,7 +11,6 @@ from runway.core.providers.aws import AssumeRole if TYPE_CHECKING: - from pytest import LogCaptureFixture from ....factories import MockRunwayContext @@ -82,13 +80,16 @@ def test_assume_role_no_revert_on_exit(runway_context: MockRunwayContext) -> Non assert runway_context.env.aws_credentials != NEW_CREDENTIALS - with stubber, AssumeRole( - runway_context, - role_arn=ROLE_ARN, - duration_seconds=900, - revert_on_exit=False, - session_name="runway-test", - ) as result: + with ( + stubber, + AssumeRole( + runway_context, + role_arn=ROLE_ARN, + duration_seconds=900, + revert_on_exit=False, + session_name="runway-test", + ) as result, + ): assert runway_context.env.aws_credentials == NEW_CREDENTIALS assert result.role_arn == ROLE_ARN assert result.duration_seconds == 900 @@ -100,7 +101,7 @@ def test_assume_role_no_revert_on_exit(runway_context: MockRunwayContext) -> Non def test_assume_role_no_role( - caplog: LogCaptureFixture, runway_context: MockRunwayContext + caplog: pytest.LogCaptureFixture, runway_context: MockRunwayContext ) -> None: """Test AssumeRole with no role_arn.""" caplog.set_level(logging.DEBUG, logger="runway") @@ -123,8 +124,9 @@ def test_assume_role_raise_value_error(runway_context: MockRunwayContext) -> Non {"RoleArn": ROLE_ARN, "RoleSessionName": "runway", "DurationSeconds": 3600}, ) - with stubber, pytest.raises( - ValueError, match="assume_role did not return Credentials" + with ( + stubber, + pytest.raises(ValueError, match="assume_role did not return Credentials"), + AssumeRole(runway_context, role_arn=ROLE_ARN), ): - with AssumeRole(runway_context, role_arn=ROLE_ARN): - raise AssertionError + raise AssertionError diff --git a/tests/unit/core/providers/aws/test_response.py b/tests/unit/core/providers/aws/test__response.py similarity index 99% rename from tests/unit/core/providers/aws/test_response.py rename to tests/unit/core/providers/aws/test__response.py index 1c1397e18..e916da694 100644 --- a/tests/unit/core/providers/aws/test_response.py +++ b/tests/unit/core/providers/aws/test__response.py @@ -1,6 +1,5 @@ """Test runway.core.providers.aws._response.""" -# pyright: basic from __future__ import annotations from runway.core.providers.aws import BaseResponse, ResponseError, ResponseMetadata diff --git a/tests/unit/core/test_core.py b/tests/unit/core/test_core.py index c0c99be95..6139beff2 100644 --- a/tests/unit/core/test_core.py +++ b/tests/unit/core/test_core.py @@ -1,18 +1,17 @@ """Test runway.core.""" -# pyright: basic from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any +from unittest.mock import MagicMock, call import pytest -from mock import MagicMock, call +from packaging.specifiers import SpecifierSet from runway.core import Runway if TYPE_CHECKING: - from pytest import LogCaptureFixture, MonkeyPatch from pytest_mock import MockerFixture from ..factories import MockRunwayConfig, MockRunwayContext @@ -38,25 +37,27 @@ def test___init___( def test___init___undetermined_version( self, - caplog: LogCaptureFixture, - monkeypatch: MonkeyPatch, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, runway_config: MockRunwayConfig, runway_context: MockRunwayContext, ) -> None: """Test __init__ with unsupported version.""" monkeypatch.setattr(MODULE + ".__version__", "0.1.0-dev1") + runway_config.runway_version = SpecifierSet(">=1.10") caplog.set_level(logging.WARNING, logger=MODULE) assert Runway(runway_config, runway_context) # type: ignore assert "shallow clone of the repo" in "\n".join(caplog.messages) def test___init___unsupported_version( self, - monkeypatch: MonkeyPatch, + monkeypatch: pytest.MonkeyPatch, runway_config: MockRunwayConfig, runway_context: MockRunwayContext, ) -> None: """Test __init__ with unsupported version.""" monkeypatch.setattr(MODULE + ".__version__", "1.3") + runway_config.runway_version = SpecifierSet(">=1.10") with pytest.raises(SystemExit) as excinfo: assert not Runway(runway_config, runway_context) # type: ignore assert excinfo.value.code == 1 @@ -220,8 +221,8 @@ def test_reverse_deployments(self) -> None: def test_test( self, - caplog: LogCaptureFixture, - monkeypatch: MonkeyPatch, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, runway_config: MockRunwayConfig, runway_context: MockRunwayContext, ) -> None: @@ -229,12 +230,8 @@ def test_test( caplog.set_level(logging.ERROR, logger="runway") test_handlers = { "exception": MagicMock(handle=MagicMock(side_effect=Exception())), - "fail_system_exit_0": MagicMock( - handle=MagicMock(side_effect=SystemExit(0)) - ), - "fail_system_exit_1": MagicMock( - handle=MagicMock(side_effect=SystemExit(1)) - ), + "fail_system_exit_0": MagicMock(handle=MagicMock(side_effect=SystemExit(0))), + "fail_system_exit_1": MagicMock(handle=MagicMock(side_effect=SystemExit(1))), "success": MagicMock(), } monkeypatch.setattr(MODULE + "._TEST_HANDLERS", test_handlers) @@ -246,9 +243,7 @@ def test_test( ] assert not obj.test() assert "the following tests failed" not in "\n".join(caplog.messages) - test_handlers["success"].handle.assert_called_with( - obj.tests[0].name, obj.tests[0].args - ) + test_handlers["success"].handle.assert_called_with(obj.tests[0].name, obj.tests[0].args) test_handlers["fail_system_exit_0"].handle.assert_called_with( obj.tests[1].name, obj.tests[1].args ) @@ -281,25 +276,20 @@ def test_test( assert not obj.test() assert excinfo.value.code == 1 assert "exception:running test (fail)" in caplog.messages - assert ( - "exception:test required; the remaining tests have been skipped" - in caplog.messages - ) - test_handlers["exception"].handle.assert_called_with( - obj.tests[0].name, obj.tests[0].args - ) + assert "exception:test required; the remaining tests have been skipped" in caplog.messages + test_handlers["exception"].handle.assert_called_with(obj.tests[0].name, obj.tests[0].args) assert test_handlers["success"].handle.call_count == 1 def test_test_keyerror( self, - caplog: LogCaptureFixture, - monkeypatch: MonkeyPatch, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, runway_config: MockRunwayConfig, runway_context: MockRunwayContext, ) -> None: """Test test with handler not found.""" caplog.set_level(logging.ERROR, logger="runway") - test_handlers: Dict[str, Any] = {} + test_handlers: dict[str, Any] = {} monkeypatch.setattr(MODULE + "._TEST_HANDLERS", test_handlers) obj = Runway(runway_config, runway_context) # type: ignore @@ -319,7 +309,7 @@ def test_test_keyerror( def test_test_no_tests( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, runway_config: MockRunwayConfig, runway_context: MockRunwayContext, ) -> None: diff --git a/tests/unit/dependency_managers/test__pip.py b/tests/unit/dependency_managers/test__pip.py index a922ef3fa..48c7017c8 100644 --- a/tests/unit/dependency_managers/test__pip.py +++ b/tests/unit/dependency_managers/test__pip.py @@ -5,16 +5,15 @@ import logging import subprocess from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Union +from typing import TYPE_CHECKING, Any +from unittest.mock import Mock import pytest -from mock import Mock from runway.compat import shlex_join from runway.dependency_managers import Pip, PipInstallFailedError if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture MODULE = "runway.dependency_managers._pip" @@ -36,9 +35,7 @@ def test_config_files(self) -> None: ({"file_name": "foo.txt"}, True), ], ) - def test_dir_is_project( - self, expected: bool, kwargs: Dict[str, str], tmp_path: Path - ) -> None: + def test_dir_is_project(self, expected: bool, kwargs: dict[str, str], tmp_path: Path) -> None: """Test dir_is_project.""" requirements_txt = tmp_path / kwargs.get("file_name", "requirements.txt") if expected: @@ -50,8 +47,8 @@ def test_dir_is_project( @pytest.mark.parametrize("command", ["test", ["test"]]) def test_generate_command( self, - caplog: LogCaptureFixture, - command: Union[List[str], str], + caplog: pytest.LogCaptureFixture, + command: list[str] | str, mocker: MockerFixture, ) -> None: """Test generate_command.""" @@ -89,7 +86,7 @@ def test_generate_command( ], ) def test_generate_install_command( - self, call_args: Dict[str, Any], expected: Dict[str, Any], mocker: MockerFixture + self, call_args: dict[str, Any], expected: dict[str, Any], mocker: MockerFixture ) -> None: """Test generate_install_command.""" expected.setdefault("cache_dir", None) @@ -117,9 +114,7 @@ def test_install( mock_generate_install_command = mocker.patch.object( Pip, "generate_install_command", return_value=["generate_install_command"] ) - mock_run_command = mocker.patch.object( - Pip, "_run_command", return_value="_run_command" - ) + mock_run_command = mocker.patch.object(Pip, "_run_command", return_value="_run_command") assert ( Pip(Mock(), tmp_path).install( @@ -138,7 +133,7 @@ def test_install( target=target, ) mock_run_command.assert_called_once_with( - mock_generate_install_command.return_value + ["--foo", "bar"], + [*mock_generate_install_command.return_value, "--foo", "bar"], suppress_output=False, ) @@ -158,9 +153,7 @@ def test_install_raise_from_called_process_error( ) with pytest.raises(PipInstallFailedError) as excinfo: - assert Pip(Mock(), tmp_path).install( - requirements=requirements_txt, target=target - ) + assert Pip(Mock(), tmp_path).install(requirements=requirements_txt, target=target) assert ( excinfo.value.message == "pip failed to install dependencies; " "review pip's output above to troubleshoot" @@ -180,9 +173,7 @@ def test_python_version( self, cmd_output: str, expected: str, mocker: MockerFixture, tmp_path: Path ) -> None: """Test python_version.""" - mock_run_command = mocker.patch.object( - Pip, "_run_command", return_value=cmd_output - ) + mock_run_command = mocker.patch.object(Pip, "_run_command", return_value=cmd_output) version_cls = mocker.patch(f"{MODULE}.Version", return_value="success") assert Pip(Mock(), tmp_path).python_version == version_cls.return_value mock_run_command.assert_called_once_with([Pip.EXECUTABLE, "--version"]) @@ -202,9 +193,7 @@ def test_version( self, cmd_output: str, expected: str, mocker: MockerFixture, tmp_path: Path ) -> None: """Test version.""" - mock_run_command = mocker.patch.object( - Pip, "_run_command", return_value=cmd_output - ) + mock_run_command = mocker.patch.object(Pip, "_run_command", return_value=cmd_output) version_cls = mocker.patch(f"{MODULE}.Version", return_value="success") assert Pip(Mock(), tmp_path).version == version_cls.return_value mock_run_command.assert_called_once_with([Pip.EXECUTABLE, "--version"]) diff --git a/tests/unit/dependency_managers/test__pipenv.py b/tests/unit/dependency_managers/test__pipenv.py index 081414b0e..8ac918da2 100644 --- a/tests/unit/dependency_managers/test__pipenv.py +++ b/tests/unit/dependency_managers/test__pipenv.py @@ -4,17 +4,16 @@ import logging import subprocess -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any +from unittest.mock import Mock import pytest -from mock import Mock from runway.dependency_managers import Pipenv, PipenvExportFailedError if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture from pytest_mock import MockerFixture MODULE = "runway.dependency_managers._pipenv" @@ -33,7 +32,7 @@ def test_config_files(self) -> None: ) def test_dir_is_project( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, lock_exists: bool, pipfile_exists: bool, tmp_path: Path, @@ -61,7 +60,7 @@ def test_dir_is_project( ) def test_export( self, - export_kwargs: Dict[str, Any], + export_kwargs: dict[str, Any], mocker: MockerFixture, tmp_path: Path, ) -> None: @@ -70,9 +69,7 @@ def test_export( mock_generate_command = mocker.patch.object( Pipenv, "generate_command", return_value="generate_command" ) - mock_run_command = mocker.patch.object( - Pipenv, "_run_command", return_value="_run_command" - ) + mock_run_command = mocker.patch.object(Pipenv, "_run_command", return_value="_run_command") obj = Pipenv(Mock(), tmp_path) assert obj.export(output=expected, **export_kwargs) == expected assert expected.is_file() @@ -116,9 +113,7 @@ def test_version( self, cmd_output: str, expected: str, mocker: MockerFixture, tmp_path: Path ) -> None: """Test version.""" - mock_run_command = mocker.patch.object( - Pipenv, "_run_command", return_value=cmd_output - ) + mock_run_command = mocker.patch.object(Pipenv, "_run_command", return_value=cmd_output) version_cls = mocker.patch(f"{MODULE}.Version", return_value="success") assert Pipenv(Mock(), tmp_path).version == version_cls.return_value mock_run_command.assert_called_once_with([Pipenv.EXECUTABLE, "--version"]) diff --git a/tests/unit/dependency_managers/test__poetry.py b/tests/unit/dependency_managers/test__poetry.py index a74469206..dc88fb3e7 100644 --- a/tests/unit/dependency_managers/test__poetry.py +++ b/tests/unit/dependency_managers/test__poetry.py @@ -3,11 +3,11 @@ from __future__ import annotations import subprocess -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any +from unittest.mock import Mock import pytest import tomli_w -from mock import Mock from runway.dependency_managers import Poetry, PoetryExportFailedError @@ -42,7 +42,7 @@ def test_config_files(self) -> None: ], ) def test_dir_is_project( - self, build_system: Dict[str, Any], expected: bool, tmp_path: Path + self, build_system: dict[str, Any], expected: bool, tmp_path: Path ) -> None: """Test dir_is_project.""" pyproject_contents = {"build-system": build_system} @@ -69,7 +69,7 @@ def test_dir_is_project_file_not_found(self, tmp_path: Path) -> None: ) def test_export( self, - export_kwargs: Dict[str, Any], + export_kwargs: dict[str, Any], mocker: MockerFixture, tmp_path: Path, ) -> None: @@ -78,18 +78,14 @@ def test_export( mock_generate_command = mocker.patch.object( Poetry, "generate_command", return_value="generate_command" ) - mock_run_command = mocker.patch.object( - Poetry, "_run_command", return_value="_run_command" - ) + mock_run_command = mocker.patch.object(Poetry, "_run_command", return_value="_run_command") (tmp_path / "test.requirements.txt").touch() # created by _run_command obj = Poetry(Mock(), tmp_path) assert obj.export(output=expected, **export_kwargs) == expected assert expected.is_file() export_kwargs.update({"output": expected.name}) - export_kwargs.update( - {"format": export_kwargs.pop("output_format", "requirements.txt")} - ) + export_kwargs.update({"format": export_kwargs.pop("output_format", "requirements.txt")}) export_kwargs.setdefault("dev", False) export_kwargs.setdefault("extras", None) export_kwargs.setdefault("with_credentials", True) @@ -120,10 +116,7 @@ def test_export_raise_from_called_process_error( with pytest.raises(PoetryExportFailedError) as excinfo: assert Poetry(Mock(), tmp_path).export(output=output) - assert ( - excinfo.value.message - == "poetry export failed with the following output:\nstderr" - ) + assert excinfo.value.message == "poetry export failed with the following output:\nstderr" def test_export_raise_when_output_does_not_exist( self, @@ -133,9 +126,7 @@ def test_export_raise_when_output_does_not_exist( """Test export raise PoetryExportFailedError from CalledProcessError.""" output = tmp_path / "expected" / "test.requirements.txt" mocker.patch.object(Poetry, "generate_command", return_value="generate_command") - mock_run_command = mocker.patch.object( - Poetry, "_run_command", return_value="_run_command" - ) + mock_run_command = mocker.patch.object(Poetry, "_run_command", return_value="_run_command") with pytest.raises(PoetryExportFailedError) as excinfo: assert Poetry(Mock(), tmp_path).export(output=output) @@ -152,9 +143,7 @@ def test_version( self, cmd_output: str, expected: str, mocker: MockerFixture, tmp_path: Path ) -> None: """Test version.""" - mock_run_command = mocker.patch.object( - Poetry, "_run_command", return_value=cmd_output - ) + mock_run_command = mocker.patch.object(Poetry, "_run_command", return_value=cmd_output) version_cls = mocker.patch(f"{MODULE}.Version", return_value="success") assert Poetry(Mock(), tmp_path).version == version_cls.return_value mock_run_command.assert_called_once_with([Poetry.EXECUTABLE, "--version"]) diff --git a/tests/unit/dependency_managers/test_base_classes.py b/tests/unit/dependency_managers/test_base_classes.py index 15b3b484d..3f5bdeb9f 100644 --- a/tests/unit/dependency_managers/test_base_classes.py +++ b/tests/unit/dependency_managers/test_base_classes.py @@ -3,9 +3,9 @@ from __future__ import annotations from typing import TYPE_CHECKING +from unittest.mock import Mock import pytest -from mock import Mock from runway.dependency_managers.base_classes import DependencyManager diff --git a/tests/unit/env_mgr/test_env_mgr.py b/tests/unit/env_mgr/test_env_mgr.py index cc6400412..5d5f08890 100644 --- a/tests/unit/env_mgr/test_env_mgr.py +++ b/tests/unit/env_mgr/test_env_mgr.py @@ -1,11 +1,9 @@ """Test runway.env_mgr.""" -# pylint: disable=unused-argument -# pyright: basic from __future__ import annotations import logging -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import pytest @@ -14,7 +12,6 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture, MonkeyPatch from pytest_mock import MockerFixture @@ -22,7 +19,7 @@ class TestEnvManager: """Test runway.env_mgr.EnvManager.""" def test___init___darwin( - self, platform_darwin: None, cd_tmp_path: Path, mocker: MockerFixture + self, platform_darwin: None, cd_tmp_path: Path, mocker: MockerFixture # noqa: ARG002 ) -> None: """Test __init__ on Darwin platform.""" home = cd_tmp_path / "home" @@ -37,10 +34,10 @@ def test___init___darwin( def test___init___windows( self, - platform_windows: None, + platform_windows: None, # noqa: ARG002 cd_tmp_path: Path, mocker: MockerFixture, - monkeypatch: MonkeyPatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test __init__ on Windows platform.""" home = cd_tmp_path / "home" @@ -57,7 +54,10 @@ def test___init___windows( assert obj.versions_dir == expected_env_dir / "versions" def test___init___windows_appdata( - self, platform_windows: None, cd_tmp_path: Path, monkeypatch: MonkeyPatch + self, + platform_windows: None, # noqa: ARG002 + cd_tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test __init__ on Windows platform.""" monkeypatch.setenv("APPDATA", str(cd_tmp_path / "custom_path")) @@ -72,7 +72,7 @@ def test___init___windows_appdata( assert obj.versions_dir == expected_env_dir / "versions" def test_bin( - self, platform_darwin: None, cd_tmp_path: Path, mocker: MockerFixture + self, platform_darwin: None, cd_tmp_path: Path, mocker: MockerFixture # noqa: ARG002 ) -> None: """Test bin.""" home = cd_tmp_path / "home" @@ -83,7 +83,7 @@ def test_bin( assert obj.bin == home / ".test-dir" / "versions" / "1.0.0" / "test-bin" @pytest.mark.parametrize("version", ["1.0.0", None]) - def test_install(self, version: Optional[str]) -> None: + def test_install(self, version: str | None) -> None: """Test install.""" with pytest.raises(NotImplementedError): assert EnvManager("", "").install(version) @@ -101,7 +101,7 @@ def test_path(self, cd_tmp_path: Path) -> None: @pytest.mark.parametrize("exists", [False, True]) def test_uninstall( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, exists: bool, mocker: MockerFixture, tmp_path: Path, @@ -119,10 +119,7 @@ def test_uninstall( version_dir.mkdir() (version_dir / "foo").touch() assert obj.uninstall(version) - assert ( - f"uninstalling {bin_name} {version} from {tmp_path}..." - in caplog.messages - ) + assert f"uninstalling {bin_name} {version} from {tmp_path}..." in caplog.messages assert f"uninstalled {bin_name} {version}" in caplog.messages else: assert not obj.uninstall(version) diff --git a/tests/unit/env_mgr/test_kbenv.py b/tests/unit/env_mgr/test_kbenv.py index ad3a7da70..5df3858db 100644 --- a/tests/unit/env_mgr/test_kbenv.py +++ b/tests/unit/env_mgr/test_kbenv.py @@ -1,10 +1,10 @@ """Test runway.env_mgr.kbenv.""" -# pyright: basic, reportFunctionMemberAccess=none +# pyright: reportFunctionMemberAccess=none from __future__ import annotations import re -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import pytest @@ -44,11 +44,7 @@ def test_install_version_requested( obj = KBEnvManager(tmp_path) assert obj.install(version_requested) == str(obj.bin) mock_download_kb_release.assert_called_once_with( - ( - version_requested - if version_requested.startswith("v") - else f"v{version_requested}" - ), + (version_requested if version_requested.startswith("v") else f"v{version_requested}"), obj.versions_dir, ) @@ -77,9 +73,7 @@ def test_list_installed_none(self, mocker: MockerFixture, tmp_path: Path) -> Non ("v0.15.0-alpha.13", Version("v0.15.0-alpha.13")), ], ) - def test_parse_version_string( - self, provided: str, expected: Optional[Version] - ) -> None: + def test_parse_version_string(self, provided: str, expected: Version | None) -> None: """Test parse_version_string.""" assert KBEnvManager.parse_version_string(provided) == expected @@ -114,9 +108,7 @@ def test_set_version_same(self, mocker: MockerFixture, tmp_path: Path) -> None: def test_version(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test version.""" - get_version_from_file = mocker.patch.object( - KBEnvManager, "get_version_from_file" - ) + get_version_from_file = mocker.patch.object(KBEnvManager, "get_version_from_file") parse_version_string = mocker.patch.object( KBEnvManager, "parse_version_string", return_value="success" ) @@ -126,9 +118,7 @@ def test_version(self, mocker: MockerFixture, tmp_path: Path) -> None: get_version_from_file.assert_not_called() parse_version_string.assert_called_once_with("version") - def test_version_get_version_from_file( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test_version_get_version_from_file(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test version.""" get_version_from_file = mocker.patch.object( KBEnvManager, "get_version_from_file", return_value="version" @@ -179,6 +169,4 @@ def test_version_file(self, tmp_path: Path) -> None: expected = overlay_path / KB_VERSION_FILENAME expected.touch() assert obj.version_file == mod_path / KB_VERSION_FILENAME - assert ( - KBEnvManager(mod_path, overlay_path=overlay_path).version_file == expected - ) + assert KBEnvManager(mod_path, overlay_path=overlay_path).version_file == expected diff --git a/tests/unit/env_mgr/test_tfenv.py b/tests/unit/env_mgr/test_tfenv.py index 7f6c66113..b4e53bfb3 100644 --- a/tests/unit/env_mgr/test_tfenv.py +++ b/tests/unit/env_mgr/test_tfenv.py @@ -1,17 +1,17 @@ """Test runway.env_mgr.tfenv.""" -# pyright: basic, reportFunctionMemberAccess=none +# pyright: reportFunctionMemberAccess=none from __future__ import annotations import json import re import subprocess -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any +from unittest.mock import MagicMock, call import hcl import hcl2 import pytest -from mock import MagicMock, call from runway._logging import LogLevels from runway.env_mgr.tfenv import ( @@ -28,7 +28,6 @@ from pathlib import Path from types import ModuleType - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from pytest_subprocess import FakeProcess @@ -64,9 +63,7 @@ def test_get_available_tf_versions(mocker: MockerFixture) -> None: """Test runway.env_mgr.tfenv.get_available_tf_versions.""" mock_requests = mocker.patch(f"{MODULE}.requests") - response: Dict[str, Any] = { - "terraform": {"versions": {"0.12.0": {}, "0.12.0-beta": {}}} - } + response: dict[str, Any] = {"terraform": {"versions": {"0.12.0": {}, "0.12.0-beta": {}}}} mock_requests.get.return_value = MagicMock(text=json.dumps(response)) assert get_available_tf_versions() == ["0.12.0"] assert get_available_tf_versions(include_prerelease=True) == [ @@ -94,7 +91,7 @@ def test_get_latest_tf_version(mocker: MockerFixture) -> None: ], ) def test_load_terraform_module( - parser: ModuleType, expected: Dict[str, Any], tmp_path: Path + parser: ModuleType, expected: dict[str, Any], tmp_path: Path ) -> None: """Test runway.env_mgr.tfenv.load_terraform_module.""" tf_file = tmp_path / "module.tf" @@ -153,8 +150,8 @@ class TestTFEnvManager: def test_backend( self, mocker: MockerFixture, - response: Dict[str, Any], - expected: Dict[str, Any], + response: dict[str, Any], + expected: dict[str, Any], tmp_path: Path, ) -> None: """Test backend.""" @@ -195,26 +192,18 @@ def test_get_min_required(self, mocker: MockerFixture, tmp_path: Path) -> None: ) def test_get_version_from_executable( self, - expected: Optional[Version], + expected: Version | None, fake_process: FakeProcess, output: str, ) -> None: """Test get_version_from_executable.""" - fake_process.register_subprocess( - ["usr/tfenv/terraform", "-version"], stdout=output - ) - assert ( - TFEnvManager.get_version_from_executable("usr/tfenv/terraform") == expected - ) + fake_process.register_subprocess(["usr/tfenv/terraform", "-version"], stdout=output) + assert TFEnvManager.get_version_from_executable("usr/tfenv/terraform") == expected def test_get_version_from_executable_raise(self, fake_process: FakeProcess) -> None: """Test get_version_from_executable raise exception.""" - fake_process.register_subprocess( - ["usr/tfenv/terraform", "-version"], returncode=1 - ) - with pytest.raises( - subprocess.CalledProcessError, match="returned non-zero exit status 1" - ): + fake_process.register_subprocess(["usr/tfenv/terraform", "-version"], returncode=1) + with pytest.raises(subprocess.CalledProcessError, match="returned non-zero exit status 1"): TFEnvManager.get_version_from_executable("usr/tfenv/terraform") def test_get_version_from_file(self, tmp_path: Path) -> None: @@ -248,9 +237,7 @@ def test_install(self, mocker: MockerFixture, tmp_path: Path) -> None: str(version), tfenv.versions_dir, tfenv.command_suffix ) - def test_install_already_installed( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test_install_already_installed(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test install.""" version = Version("0.15.5") mocker.patch.object(TFEnvManager, "version", version) @@ -267,9 +254,7 @@ def test_install_set_version(self, mocker: MockerFixture, tmp_path: Path) -> Non mocker.patch.object(TFEnvManager, "version", version) mocker.patch.object(TFEnvManager, "versions_dir", tmp_path) mock_download = mocker.patch(f"{MODULE}.download_tf_release") - mock_set_version = mocker.patch.object( - TFEnvManager, "set_version", return_value=None - ) + mock_set_version = mocker.patch.object(TFEnvManager, "set_version", return_value=None) tfenv = TFEnvManager(tmp_path) assert tfenv.install(str(version)) mock_download.assert_called_once_with( @@ -277,15 +262,11 @@ def test_install_set_version(self, mocker: MockerFixture, tmp_path: Path) -> Non ) mock_set_version.assert_called_once_with(str(version)) - def test_install_version_undefined( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test_install_version_undefined(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test install.""" mocker.patch.object(TFEnvManager, "version", None) tfenv = TFEnvManager(tmp_path) - with pytest.raises( - ValueError, match=r"^version not provided and unable to find .*" - ): + with pytest.raises(ValueError, match=r"^version not provided and unable to find .*"): tfenv.install() def test_list_installed(self, mocker: MockerFixture, tmp_path: Path) -> None: @@ -311,9 +292,7 @@ def test_list_installed_none(self, mocker: MockerFixture, tmp_path: Path) -> Non ("0.15.0-alpha13", Version("0.15.0-alpha13")), ], ) - def test_parse_version_string( - self, provided: str, expected: Optional[Version] - ) -> None: + def test_parse_version_string(self, provided: str, expected: Version | None) -> None: """Test parse_version_string.""" assert TFEnvManager.parse_version_string(provided) == expected @@ -404,10 +383,10 @@ def test_set_version_same(self, mocker: MockerFixture, tmp_path: Path) -> None: ) def test_terraform_block( self, - caplog: LogCaptureFixture, - expected: Dict[str, Any], + caplog: pytest.LogCaptureFixture, + expected: dict[str, Any], mocker: MockerFixture, - response: List[Any], + response: list[Any], tmp_path: Path, ) -> None: """Test terraform_block.""" @@ -461,9 +440,7 @@ def test_version_latest(self, mocker: MockerFixture, tmp_path: Path) -> None: mock_get_version_from_file.assert_called_once_with() mock_get_available_tf_versions.assert_called_once_with(False) - def test_version_latest_partial( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test_version_latest_partial(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test version latest.""" version = Version("0.14.3") mocker.patch.object(TFEnvManager, "versions_dir", tmp_path) diff --git a/tests/unit/factories.py b/tests/unit/factories.py index 61db31633..c4fb0b3e4 100644 --- a/tests/unit/factories.py +++ b/tests/unit/factories.py @@ -1,48 +1,53 @@ """Test classes.""" -# pyright: basic, reportIncompatibleMethodOverride=none +# pyright: reportIncompatibleMethodOverride=none from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, MutableMapping, Optional, Tuple +from functools import cached_property +from typing import TYPE_CHECKING, Any, cast +from unittest.mock import MagicMock import boto3 import yaml from botocore.stub import Stubber -from mock import MagicMock from packaging.specifiers import SpecifierSet from runway.config.components.runway import RunwayDeploymentDefinition from runway.context import CfnginContext, RunwayContext -from runway.core.components import DeployEnvironment from runway.utils import MutableMap if TYPE_CHECKING: + from collections.abc import MutableMapping from pathlib import Path from boto3.resources.base import ServiceResource from botocore.client import BaseClient + from mypy_boto3_s3.client import S3Client from runway.config import CfnginConfig + from runway.core.components import DeployEnvironment from runway.core.type_defs import RunwayActionTypeDef class MockBoto3Session: - """Mock class that acts like a boto3.session. + """Mock class that acts like a :class:`boto3.session.Session`. - Must be preloaded with stubbers. + Clients must be registered using :meth:`~pytest_runway.MockBoto3Session.register_client` + before the can be created with the usual :meth:`~pytest_runway.MockBoto3Session.client` + call. This is to ensure that all AWS calls are stubbed. """ def __init__( self, *, - clients: Optional[MutableMap] = None, - aws_access_key_id: Optional[str] = None, - aws_secret_access_key: Optional[str] = None, - aws_session_token: Optional[str] = None, - profile_name: Optional[str] = None, - region_name: Optional[str] = None, - ): + clients: MutableMap | None = None, + aws_access_key_id: str | None = None, + aws_secret_access_key: str | None = None, + aws_session_token: str | None = None, + profile_name: str | None = None, + region_name: str | None = None, + ) -> None: """Instantiate class. Args: @@ -55,7 +60,6 @@ def __init__( """ self._clients = clients or MutableMap() - self._client_calls: Dict[str, Any] = {} self._session = MagicMock() self.aws_access_key_id = aws_access_key_id self.aws_secret_access_key = aws_secret_access_key @@ -63,81 +67,95 @@ def __init__( self.profile_name = profile_name self.region_name = region_name - def assert_client_called_with(self, service_name: str, **kwargs: Any) -> None: - """Assert a client was created with the provided kwargs.""" - key = f"{service_name}.{kwargs.get('region_name', self.region_name)}" - assert self._client_calls[key] == kwargs - def client(self, service_name: str, **kwargs: Any) -> BaseClient: """Return a stubbed client. Args: service_name: The name of a service, e.g. 's3' or 'ec2'. + **kwargs: Arbitrary keyword arguments. Returns: Stubbed boto3 client. Raises: - KeyError: Client was not stubbed from Context before trying to use. + ValueError: Client was not stubbed from Context before trying to use. """ - key = f"{service_name}.{kwargs.get('region_name', self.region_name)}" - self._client_calls[key] = kwargs - return self._clients[key] + key = f"{service_name}.{kwargs.get('region_name') or self.region_name}" + try: + return self._clients[key] + except AttributeError: + raise ValueError(f"client not registered for {key}") from None def register_client( - self, service_name: str, region_name: Optional[str] = None - ) -> Tuple[Any, Stubber]: + self, service_name: str, *, region: str | None = None + ) -> tuple[Any, Stubber]: """Register a client for the boto3 session. Args: service_name: The name of a service, e.g. 's3' or 'ec2'. - region_name: AWS region. + region: AWS region. """ - key = f"{service_name}.{region_name or self.region_name}" - client = boto3.client( # type: ignore - service_name, # type: ignore - region_name=region_name or self.region_name, + key = f"{service_name}.{region or self.region_name}" + client = cast( + "BaseClient", + boto3.client( + service_name, # pyright: ignore[reportCallIssue, reportArgumentType] + region_name=region or self.region_name, + ), ) - stubber = Stubber(client) # type: ignore - self._clients[key] = client # type: ignore - return client, stubber # type: ignore + stubber = Stubber(client) + self._clients[key] = client + return client, stubber def resource(self, service_name: str, **kwargs: Any) -> ServiceResource: """Return a stubbed resource.""" kwargs.setdefault("region_name", self.region_name) - resource: ServiceResource = boto3.resource(service_name, **kwargs) # type: ignore - resource.meta.client = self._clients[f"{service_name}.{kwargs['region_name']}"] + resource = cast( + "ServiceResource", + boto3.resource( + service_name, # pyright: ignore[reportCallIssue, reportArgumentType] + **kwargs, + ), + ) + resource.meta.client = self.client(service_name, **kwargs) return resource - def service(self, service_name: str, region_name: Optional[str] = None) -> None: - """Not implimented.""" + def service(self, service_name: str, *, region_name: str | None = None) -> None: + """Not implemented.""" raise NotImplementedError -class MockCFNginContext(CfnginContext): - """Subclass CFNgin context object for tests.""" +class MockCfnginContext(CfnginContext): + """Subclass of :class:`~runway.context.CfnginContext` for tests.""" def __init__( self, *, - config_path: Optional[Path] = None, - config: Optional[CfnginConfig] = None, - deploy_environment: Optional[DeployEnvironment] = None, - parameters: Optional[MutableMapping[str, Any]] = None, - force_stacks: Optional[List[str]] = None, - region: Optional[str] = "us-east-1", - stack_names: Optional[List[str]] = None, - work_dir: Optional[Path] = None, + config: CfnginConfig | None = None, + config_path: Path | None = None, + deploy_environment: DeployEnvironment, + force_stacks: list[str] | None = None, + parameters: MutableMapping[str, Any] | None = None, + stack_names: list[str] | None = None, + work_dir: Path | None = None, **_: Any, ) -> None: - """Instantiate class.""" - self._boto3_test_client = MutableMap() - self._boto3_test_stubber = MutableMap() + """Instantiate class. - # used during init process - self.s3_stubber = self.add_stubber("s3", region=region) + Args: + config: The CFNgin configuration being operated on. + config_path: Path to the config file that was provided. + deploy_environment: The current deploy environment. + force_stacks: A list of stacks to force work on. Used to work on locked stacks. + parameters: Parameters passed from Runway or read from a file. + stack_names: A list of stack_names to operate on. If not passed, + all stacks defined in the config will be operated on. + work_dir: Working directory used by CFNgin. + + """ + self._boto3_sessions: dict[str, MockBoto3Session] = {} super().__init__( config_path=config_path, @@ -149,42 +167,113 @@ def __init__( work_dir=work_dir, ) - def add_stubber(self, service_name: str, region: Optional[str] = None) -> Stubber: + @cached_property + def s3_client(self) -> S3Client: + """AWS S3 client. + + Adds an S3 stubber prior to returning from :attr:`~runway.context.CfnginContext.s3_client`. + + """ + self.add_stubber("s3", region=self.bucket_region) + return super().s3_client + + def add_stubber( + self, + service_name: str, + *, + aws_access_key_id: str | None = None, + aws_secret_access_key: str | None = None, + aws_session_token: str | None = None, + profile: str | None = None, + region: str | None = None, + ) -> Stubber: """Add a stubber to context. Args: - service_name: The name of a service, e.g. 's3' or 'ec2'. - region: AWS region. + service_name: The name of the service to stub. + aws_access_key_id: AWS Access Key ID. + aws_secret_access_key: AWS secret Access Key. + aws_session_token: AWS session token. + profile: The profile for the session. + region: The region for the session. """ - key = f"{service_name}.{region or self.env.aws_region}" - - self._boto3_test_client[key] = boto3.client( # type: ignore - service_name, # type: ignore - region_name=region or self.env.aws_region, + session = self._get_mocked_session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + profile=profile, + region=region or self.env.aws_region, ) - self._boto3_test_stubber[key] = Stubber(self._boto3_test_client[key]) - return self._boto3_test_stubber[key] + _client, stubber = session.register_client(service_name, region=region) + return stubber - def get_session( + def _get_mocked_session( self, *, - aws_access_key_id: Optional[str] = None, - aws_secret_access_key: Optional[str] = None, - aws_session_token: Optional[str] = None, - profile: Optional[str] = None, - region: Optional[str] = None, + aws_access_key_id: str | None = None, + aws_secret_access_key: str | None = None, + aws_session_token: str | None = None, + profile: str | None = None, + region: str | None = None, ) -> MockBoto3Session: - """Wrap get_session to enable stubbing.""" - return MockBoto3Session( - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - clients=self._boto3_test_client, - profile_name=profile, - region_name=region or self.env.aws_region, + """Get a mocked boto3 session.""" + region = region or self.env.aws_region + if region not in self._boto3_sessions: + self._boto3_sessions[region] = MockBoto3Session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + profile_name=profile, + region_name=region or self.env.aws_region, + ) + return self._boto3_sessions[region] + + def get_session( + self, + *, + aws_access_key_id: str | None = None, + aws_secret_access_key: str | None = None, + aws_session_token: str | None = None, + profile: str | None = None, + region: str | None = None, + ) -> boto3.Session: + """Wrap get_session to enable stubbing. + + A stubber must exist before ``get_session`` is called or an error will be raised. + + Args: + aws_access_key_id: AWS Access Key ID. + aws_secret_access_key: AWS secret Access Key. + aws_session_token: AWS session token. + profile: The profile for the session. + region: The region for the session. + + """ + return cast( + boto3.Session, + self._get_mocked_session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + profile=profile, + region=region or self.env.aws_region, + ), ) + def get_stubbed_client(self, service_name: str, *, region: str | None = None) -> BaseClient: + """Get an existing stubbed client. + + This can be used after :meth:`~pytest_runway.MockCfnginContext.add_stubber` has + been called to get the stubber client. + + Args: + service_name: The name of the service that was stubbed. + region: The region of the session. + + """ + return self._get_mocked_session(region=region).client(service_name, region_name=region) + class MockRunwayConfig(MutableMap): """Mock Runway config object.""" @@ -197,13 +286,11 @@ def __init__(self, **kwargs: Any) -> None: self.future = MagicMock() self.tests = [] self.ignore_git_branch = False - self.runway_version = SpecifierSet(">=1.10", prereleases=True) + self.runway_version = SpecifierSet(">=0.0.0", prereleases=True) self.variables = MutableMap() # classmethods - self.find_config_file = MagicMock( - name="find_config_file", return_value="./runway.yml" - ) + self.find_config_file = MagicMock(name="find_config_file", return_value="./runway.yml") self.load_from_file = MagicMock(name="load_from_file", return_value=self) def __call__(self, **kwargs: Any) -> MockRunwayConfig: @@ -213,72 +300,37 @@ def __call__(self, **kwargs: Any) -> MockRunwayConfig: class MockRunwayContext(RunwayContext): - """Subclass Runway context object for tests.""" + """Subclass of :class:`~runway.context.RunwayContext` for tests.""" - _use_concurrent: bool + _use_concurrent: bool = True def __init__( self, *, - command: Optional[RunwayActionTypeDef] = None, - deploy_environment: Any = None, - work_dir: Optional[Path] = None, + command: RunwayActionTypeDef | None = None, + deploy_environment: DeployEnvironment, + work_dir: Path | None = None, **_: Any, ) -> None: - """Instantiate class.""" - if not deploy_environment: - deploy_environment = DeployEnvironment(environ={}, explicit_name="test") - super().__init__( - command=command, deploy_environment=deploy_environment, work_dir=work_dir - ) - self._boto3_test_client = MutableMap() - self._boto3_test_stubber = MutableMap() - self._use_concurrent = True - - def add_stubber(self, service_name: str, region: Optional[str] = None) -> Stubber: - """Add a stubber to context. + """Instantiate class. Args: - service_name: The name of a service, e.g. 's3' or 'ec2'. - region: AWS region name. + command: Runway command/action being run. + deploy_environment: The current deploy environment. + work_dir: Working directory used by Runway. """ - key = f"{service_name}.{region or self.env.aws_region}" + self._boto3_sessions: dict[str, MockBoto3Session] = {} - self._boto3_test_client[key] = boto3.client( # type: ignore - service_name, # type: ignore - region_name=region or self.env.aws_region, - **self.boto3_credentials, - ) - self._boto3_test_stubber[key] = Stubber(self._boto3_test_client[key]) - return self._boto3_test_stubber[key] - - def get_session( - self, - *, - aws_access_key_id: Optional[str] = None, - aws_secret_access_key: Optional[str] = None, - aws_session_token: Optional[str] = None, - profile: Optional[str] = None, - region: Optional[str] = None, - ) -> MockBoto3Session: - """Wrap get_session to enable stubbing.""" - return MockBoto3Session( - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - clients=self._boto3_test_client, - profile_name=profile, - region_name=region or self.env.aws_region, - ) + super().__init__(command=command, deploy_environment=deploy_environment, work_dir=work_dir) @property - def use_concurrent(self) -> bool: # pylint: disable=invalid-overridden-method + def use_concurrent(self) -> bool: """Override property of parent with something that can be set.""" return self._use_concurrent - @use_concurrent.setter # type: ignore - def use_concurrent( # pylint: disable=invalid-overridden-method + @use_concurrent.setter + def use_concurrent( # pyright: ignore[reportIncompatibleVariableOverride] self, value: bool ) -> None: """Override property of parent with something that can be set. @@ -289,12 +341,109 @@ def use_concurrent( # pylint: disable=invalid-overridden-method """ self._use_concurrent = value + def add_stubber( + self, + service_name: str, + *, + aws_access_key_id: str | None = None, + aws_secret_access_key: str | None = None, + aws_session_token: str | None = None, + profile: str | None = None, + region: str | None = None, + ) -> Stubber: + """Add a stubber to context. + + Args: + service_name: The name of the service to stub. + aws_access_key_id: AWS Access Key ID. + aws_secret_access_key: AWS secret Access Key. + aws_session_token: AWS session token. + profile: The profile for the session. + region: The region for the session. + + """ + session = self._get_mocked_session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + profile=profile, + region=region or self.env.aws_region, + ) + _client, stubber = session.register_client(service_name, region=region) + return stubber + + def _get_mocked_session( + self, + *, + aws_access_key_id: str | None = None, + aws_secret_access_key: str | None = None, + aws_session_token: str | None = None, + profile: str | None = None, + region: str | None = None, + ) -> MockBoto3Session: + """Get a mocked boto3 session.""" + region = region or self.env.aws_region + if region not in self._boto3_sessions: + self._boto3_sessions[region] = MockBoto3Session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + profile_name=profile, + region_name=region or self.env.aws_region, + ) + return self._boto3_sessions[region] + + def get_session( + self, + *, + aws_access_key_id: str | None = None, + aws_secret_access_key: str | None = None, + aws_session_token: str | None = None, + profile: str | None = None, + region: str | None = None, + ) -> boto3.Session: + """Wrap get_session to enable stubbing. + + A stubber must exist before ``get_session`` is called or an error will be raised. + + Args: + aws_access_key_id: AWS Access Key ID. + aws_secret_access_key: AWS secret Access Key. + aws_session_token: AWS session token. + profile: The profile for the session. + region: The region for the session. + + """ + return cast( + boto3.Session, + self._get_mocked_session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + profile=profile, + region=region or self.env.aws_region, + ), + ) + + def get_stubbed_client(self, service_name: str, *, region: str | None = None) -> BaseClient: + """Get an existing stubbed client. + + This can be used after :meth:`~pytest_runway.MockCfnginContext.add_stubber` has + been called to get the stubber client. + + Args: + service_name: The name of the service that was stubbed. + region: The region of the session. + + """ + return self._get_mocked_session(region=region).client(service_name, region_name=region) + class YamlLoader: """Load YAML files from a directory.""" def __init__( - self, root: Path, load_class: Optional[type] = None, load_type: str = "default" + self, root: Path, load_class: type | None = None, load_type: str = "default" ) -> None: """Instantiate class. diff --git a/tests/unit/fixtures/config.runway.variables.yml b/tests/unit/fixtures/config.runway.variables.yml index 98b4d5e4e..3e603cc18 100644 --- a/tests/unit/fixtures/config.runway.variables.yml +++ b/tests/unit/fixtures/config.runway.variables.yml @@ -1,6 +1,6 @@ test_value: basic value -require_test: True +require_test: true test_list: - list value 1 diff --git a/tests/unit/fixtures/stack_policies/default.json b/tests/unit/fixtures/stack_policies/default.json index 6a3513825..04ba8c8c1 100644 --- a/tests/unit/fixtures/stack_policies/default.json +++ b/tests/unit/fixtures/stack_policies/default.json @@ -1,10 +1,10 @@ { - "Statement" : [ - { - "Effect" : "Allow", - "Action" : "Update:*", - "Principal": "*", - "Resource" : "*" - } - ] + "Statement": [ + { + "Action": "Update:*", + "Effect": "Allow", + "Principal": "*", + "Resource": "*" + } + ] } diff --git a/tests/unit/fixtures/stack_policies/none.json b/tests/unit/fixtures/stack_policies/none.json index daf7f8424..f66cbde7d 100644 --- a/tests/unit/fixtures/stack_policies/none.json +++ b/tests/unit/fixtures/stack_policies/none.json @@ -1,10 +1,10 @@ { - "Statement" : [ - { - "Effect" : "Deny", - "Action" : "Update:*", - "Principal": "*", - "Resource" : "*" - } - ] + "Statement": [ + { + "Action": "Update:*", + "Effect": "Deny", + "Principal": "*", + "Resource": "*" + } + ] } diff --git a/tests/unit/lookups/handlers/test_base.py b/tests/unit/lookups/handlers/test_base.py index 8810bdd31..6966df71d 100644 --- a/tests/unit/lookups/handlers/test_base.py +++ b/tests/unit/lookups/handlers/test_base.py @@ -1,15 +1,12 @@ """Tests for lookup handler base class.""" -# pylint: disable=duplicate-value -# pyright: basic from __future__ import annotations import json -from typing import Dict, Optional +from unittest.mock import MagicMock import pytest import yaml -from mock import MagicMock from runway.lookups.handlers.base import LookupHandler from runway.utils import MutableMap @@ -30,9 +27,7 @@ def test_dependencies(self) -> None: This should always return an empty set. """ - assert isinstance( - LookupHandler.dependencies(MagicMock(autospec=VariableValue)), set - ) + assert isinstance(LookupHandler.dependencies(MagicMock(autospec=VariableValue)), set) def test_format_results(self) -> None: """Test format_results.""" @@ -45,18 +40,9 @@ def test_format_results(self) -> None: assert LookupHandler.format_results(test_dict) == test_dict assert LookupHandler.format_results(mute_map) == test_dict - assert ( - LookupHandler.format_results(test_dict, get="test_key") - == test_dict["test_key"] - ) - assert ( - LookupHandler.format_results(mute_map, get="test_key") - == mute_map["test_key"] - ) - assert ( - LookupHandler.format_results(mute_map, get="nested") - == mute_map["nested"].data - ) + assert LookupHandler.format_results(test_dict, get="test_key") == test_dict["test_key"] + assert LookupHandler.format_results(mute_map, get="test_key") == mute_map["test_key"] + assert LookupHandler.format_results(mute_map, get="nested") == mute_map["nested"].data assert ( LookupHandler.format_results(mute_map, get="nested.nested_key") == mute_map["nested"]["nested_key"] @@ -66,12 +52,11 @@ def test_format_results(self) -> None: assert LookupHandler.format_results(mute_map, transform="str") == json.dumps( json.dumps(test_dict, indent=0) ) - assert LookupHandler.format_results( - mute_map, transform="str", indent=2 - ) == json.dumps(json.dumps(test_dict, indent=2)) + assert LookupHandler.format_results(mute_map, transform="str", indent=2) == json.dumps( + json.dumps(test_dict, indent=2) + ) assert ( - LookupHandler.format_results(mute_map, get="nested.bool", transform="str") - == '"True"' + LookupHandler.format_results(mute_map, get="nested.bool", transform="str") == '"True"' ) with pytest.raises(TypeError): @@ -89,9 +74,7 @@ def test_format_results(self) -> None: ("undefined", "undefined"), ], ) - def test_format_results_handle_none( - self, value: str, expected: Optional[str] - ) -> None: + def test_format_results_handle_none(self, value: str, expected: str | None) -> None: """Test format_results.""" assert LookupHandler.format_results(value) == expected if isinstance(expected, str): @@ -133,8 +116,8 @@ def test_load_list(self) -> None: def test_parse( self, query: str, - raw_args: Optional[Dict[str, str]], - expected_args: Dict[str, str], + raw_args: dict[str, str] | None, + expected_args: dict[str, str], ) -> None: """Test parse.""" value = f"{query}::{raw_args}" @@ -147,7 +130,8 @@ def test_transform_bool_to_bool(self) -> None: result_true = LookupHandler.transform(True, to_type="bool") result_false = LookupHandler.transform(False, to_type="bool") - assert isinstance(result_true, bool) and isinstance(result_false, bool) + assert isinstance(result_true, bool) + assert isinstance(result_false, bool) assert result_true assert not result_false @@ -162,7 +146,8 @@ def test_transform_str_to_bool(self) -> None: result_true = LookupHandler.transform("true", to_type="bool") result_false = LookupHandler.transform("false", to_type="bool") - assert isinstance(result_true, bool) and isinstance(result_false, bool) + assert isinstance(result_true, bool) + assert isinstance(result_false, bool) assert result_true assert not result_false @@ -194,15 +179,11 @@ def test_transform_str_direct(self) -> None: def test_transform_str_list(self) -> None: """Test list type joined to create string.""" assert LookupHandler.transform(["val1", "val2"], to_type="str") == "val1,val2" - assert ( - LookupHandler.transform({"val", "val"}, to_type="str") # noqa: B033 - == "val" - ) + assert LookupHandler.transform({"val"}, to_type="str") == "val" assert LookupHandler.transform(("val1", "val2"), to_type="str") == "val1,val2" def test_transform_str_list_delimiter(self) -> None: """Test list to string with a specified delimiter.""" assert ( - LookupHandler.transform(["val1", "val2"], to_type="str", delimiter="|") - == "val1|val2" + LookupHandler.transform(["val1", "val2"], to_type="str", delimiter="|") == "val1|val2" ) diff --git a/tests/unit/lookups/handlers/test_cfn.py b/tests/unit/lookups/handlers/test_cfn.py index b1bbafb44..ccc7cba99 100644 --- a/tests/unit/lookups/handlers/test_cfn.py +++ b/tests/unit/lookups/handlers/test_cfn.py @@ -1,18 +1,18 @@ """Test runway.lookups.handlers.cfn.""" -# pyright: basic, reportFunctionMemberAccess=none +# pyright: reportFunctionMemberAccess=none from __future__ import annotations import json import logging from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple +from typing import TYPE_CHECKING, Any +from unittest.mock import MagicMock import boto3 import pytest from botocore.exceptions import ClientError from botocore.stub import Stubber -from mock import MagicMock from runway.cfngin.exceptions import StackDoesNotExist from runway.cfngin.providers.aws.default import Provider @@ -21,7 +21,6 @@ if TYPE_CHECKING: from mypy_boto3_cloudformation.client import CloudFormationClient - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from ...factories import MockRunwayContext @@ -29,16 +28,16 @@ def generate_describe_stacks_stack( stack_name: str, - outputs: Dict[str, str], - creation_time: Optional[datetime] = None, + outputs: dict[str, str], + creation_time: datetime | None = None, stack_status: str = "CREATE_COMPLETE", -) -> Dict[str, Any]: +) -> dict[str, Any]: """Generate describe stacks stack. Args: stack_name: Name of the stack. outputs: Dictionary to be converted to stack outputs. - creation_time (Optional[datetime.datetime]): Stack creation time. + creation_time: Stack creation time. stack_status: Current stack status. Returns: @@ -59,7 +58,7 @@ def generate_describe_stacks_stack( } -def setup_cfn_client() -> Tuple[CloudFormationClient, Stubber]: +def setup_cfn_client() -> tuple[CloudFormationClient, Stubber]: """Create a CloudFormation client & Stubber.""" client = boto3.client("cloudformation") return client, Stubber(client) @@ -96,10 +95,7 @@ def test_handle(self, mocker: MockerFixture) -> None: ) # test happy path when used from CFNgin (provider) - assert ( - CfnLookup.handle(value, context=mock_context, provider=mock_provider) - == "success" - ) + assert CfnLookup.handle(value, context=mock_context, provider=mock_provider) == "success" mock_parse.assert_called_once_with(value) mock_provider.get_output.assert_called_once_with(*query) mock_should_use.assert_called_once_with({"region": region}, mock_provider) @@ -129,16 +125,14 @@ def test_handle(self, mocker: MockerFixture) -> None: ) def test_handle_exception( self, - caplog: LogCaptureFixture, - default: Optional[str], + caplog: pytest.LogCaptureFixture, + default: str | None, exception: Exception, mocker: MockerFixture, ) -> None: """Test handle cls.get_stack_output raise exception.""" caplog.set_level(logging.DEBUG, logger="runway.lookups.handlers.cfn") - mock_should_use = mocker.patch.object( - CfnLookup, "should_use_provider", return_value=False - ) + mock_should_use = mocker.patch.object(CfnLookup, "should_use_provider", return_value=False) mock_context = MagicMock(name="context") mock_session = MagicMock(name="session") mock_context.get_session.return_value = mock_session @@ -150,10 +144,7 @@ def test_handle_exception( query = OutputQuery(*raw_query.split(".")) if default: - assert ( - CfnLookup.handle(raw_query + "::default=" + default, mock_context) - == default - ) + assert CfnLookup.handle(raw_query + "::default=" + default, mock_context) == default mock_should_use.assert_called_once_with({"default": default}, None) assert ( "unable to resolve lookup for CloudFormation Stack output " @@ -190,17 +181,15 @@ def test_handle_exception( ) def test_handle_provider_exception( self, - caplog: LogCaptureFixture, - default: Optional[str], + caplog: pytest.LogCaptureFixture, + default: str | None, exception: Exception, mocker: MockerFixture, runway_context: MockRunwayContext, ) -> None: """Test handle provider raise exception.""" caplog.set_level(logging.DEBUG, logger="runway.lookups.handlers.cfn") - mock_should_use = mocker.patch.object( - CfnLookup, "should_use_provider", return_value=True - ) + mock_should_use = mocker.patch.object(CfnLookup, "should_use_provider", return_value=True) mock_provider = MagicMock(region="us-east-1") mock_provider.get_output.side_effect = exception raw_query = "test-stack.output1" @@ -237,14 +226,10 @@ def test_handle_provider_exception( def test_handle_valueerror(self, runway_context: MockRunwayContext) -> None: """Test handle raising ValueError.""" - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="query must be ."): assert CfnLookup.handle("something", runway_context) - assert ( - str(excinfo.value) - == 'query must be .; got "something"' - ) - def test_get_stack_output(self, caplog: LogCaptureFixture) -> None: + def test_get_stack_output(self, caplog: pytest.LogCaptureFixture) -> None: """Test get_stack_output.""" caplog.set_level(logging.DEBUG, logger="runway.lookups.handlers.cfn") client, stubber = setup_cfn_client() @@ -265,7 +250,7 @@ def test_get_stack_output(self, caplog: LogCaptureFixture) -> None: assert f"describing stack: {stack_name}" in caplog.messages assert f"{stack_name} stack outputs: {json.dumps(outputs)}" in caplog.messages - def test_get_stack_output_clienterror(self, caplog: LogCaptureFixture) -> None: + def test_get_stack_output_clienterror(self, caplog: pytest.LogCaptureFixture) -> None: """Test get_stack_output raising ClientError.""" caplog.set_level(logging.DEBUG, logger="runway.lookups.handlers.cfn") client, stubber = setup_cfn_client() @@ -285,7 +270,7 @@ def test_get_stack_output_clienterror(self, caplog: LogCaptureFixture) -> None: stubber.assert_no_pending_responses() assert f"describing stack: {stack_name}" in caplog.messages - def test_get_stack_output_keyerror(self, caplog: LogCaptureFixture) -> None: + def test_get_stack_output_keyerror(self, caplog: pytest.LogCaptureFixture) -> None: """Test get_stack_output raising KeyError.""" caplog.set_level(logging.DEBUG, logger="runway.lookups.handlers.cfn") client, stubber = setup_cfn_client() @@ -315,17 +300,15 @@ def test_get_stack_output_keyerror(self, caplog: LogCaptureFixture) -> None: ) def test_should_use_provider_falsy( self, - args: Dict[str, Any], - caplog: LogCaptureFixture, - provider: Optional[Provider], + args: dict[str, Any], + caplog: pytest.LogCaptureFixture, + provider: Provider | None, ) -> None: """Test should_use_provider with falsy cases.""" caplog.set_level(logging.DEBUG, logger="runway.lookups.handlers.cfn") assert not CfnLookup.should_use_provider(args, provider) if provider: - assert ( - "not using provider; requested region does not match" in caplog.messages - ) + assert "not using provider; requested region does not match" in caplog.messages assert "using provider" not in caplog.messages @pytest.mark.parametrize( @@ -337,9 +320,9 @@ def test_should_use_provider_falsy( ) def test_should_use_provider_truthy( self, - args: Dict[str, Any], - caplog: LogCaptureFixture, - provider: Optional[Provider], + args: dict[str, Any], + caplog: pytest.LogCaptureFixture, + provider: Provider | None, ) -> None: """Test should_use_provider with truthy cases.""" caplog.set_level(logging.DEBUG, logger="runway.lookups.handlers.cfn") diff --git a/tests/unit/lookups/handlers/test_ecr.py b/tests/unit/lookups/handlers/test_ecr.py index 2c48e308b..7cc395e79 100644 --- a/tests/unit/lookups/handlers/test_ecr.py +++ b/tests/unit/lookups/handlers/test_ecr.py @@ -1,7 +1,5 @@ """Test runway.lookups.handlers.ecr.""" -# pylint: disable=redefined-outer-name -# pyright: basic from __future__ import annotations import base64 @@ -13,15 +11,16 @@ from runway.lookups.handlers.ecr import EcrLookup if TYPE_CHECKING: - from mock import MagicMock + from unittest.mock import MagicMock + from pytest_mock import MockerFixture - from ...factories import MockCFNginContext, MockRunwayContext + from ...factories import MockCfnginContext, MockRunwayContext MODULE = "runway.lookups.handlers.ecr" -@pytest.fixture(scope="function") +@pytest.fixture() def mock_format_results(mocker: MockerFixture) -> MagicMock: """Mock EcrLookup.format_results.""" return mocker.patch.object( @@ -33,7 +32,7 @@ class TestEcrLookup: """Test runway.lookups.handlers.ecr.EcrLookup.""" def test_get_login_password( - self, cfngin_context: MockCFNginContext, runway_context: MockRunwayContext + self, cfngin_context: MockCfnginContext, runway_context: MockRunwayContext ) -> None: """Test get_login_password.""" cfngin_stubber = cfngin_context.add_stubber("ecr") @@ -43,9 +42,7 @@ def test_get_login_password( response = { "authorizationData": [ { - "authorizationToken": base64.b64encode( - ("AWS:" + password).encode() - ).decode(), + "authorizationToken": base64.b64encode(("AWS:" + password).encode()).decode(), "expiresAt": datetime.datetime(2015, 1, 1), "proxyEndpoint": "string", } @@ -71,14 +68,15 @@ def test_get_login_password( cfngin_stubber.assert_no_pending_responses() runway_stubber.assert_no_pending_responses() - def test_get_login_password_raise_value_error( - self, runway_context: MockRunwayContext - ) -> None: + def test_get_login_password_raise_value_error(self, runway_context: MockRunwayContext) -> None: """Test get_login_password.""" runway_stubber = runway_context.add_stubber("ecr") runway_stubber.add_response("get_authorization_token", {}, {}) - with runway_stubber, pytest.raises( - ValueError, match="get_authorization_token did not return authorizationData" + with ( + runway_stubber, + pytest.raises( + ValueError, match="get_authorization_token did not return authorizationData" + ), ): assert EcrLookup.get_login_password( runway_context.get_session().client("ecr") # type: ignore @@ -98,19 +96,16 @@ def test_handle_login_password( return_value="EcrLookup.get_login_password()", ) assert ( - EcrLookup.handle("login-password", runway_context) - == mock_format_results.return_value + EcrLookup.handle("login-password", runway_context) == mock_format_results.return_value ) mock_get_login_password.assert_called_once() - mock_format_results.assert_called_once_with( - mock_get_login_password.return_value - ) + mock_format_results.assert_called_once_with(mock_get_login_password.return_value) def test_handle_value_error(self, runway_context: MockRunwayContext) -> None: """Test handle raise ValueError.""" runway_context.add_stubber("ecr") - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError, match="ecr lookup does not support") as excinfo: EcrLookup.handle("unsupported", runway_context) assert str(excinfo.value) == "ecr lookup does not support 'unsupported'" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="ecr lookup does not support"): EcrLookup.handle("unsupported::default=something", runway_context) diff --git a/tests/unit/lookups/handlers/test_env.py b/tests/unit/lookups/handlers/test_env.py index 4a85ced3a..ab676219f 100644 --- a/tests/unit/lookups/handlers/test_env.py +++ b/tests/unit/lookups/handlers/test_env.py @@ -1,6 +1,5 @@ """Tests for lookup handler for env.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING @@ -27,5 +26,5 @@ def test_handle(self, runway_context: MockRunwayContext) -> None: def test_handle_not_found(self, runway_context: MockRunwayContext) -> None: """Validate exception when lookup cannot be resolved.""" runway_context.env.vars = ENV_VARS.copy() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="does not exist in the environment"): EnvLookup.handle("NOT_VALID", context=runway_context) diff --git a/tests/unit/lookups/handlers/test_random_string.py b/tests/unit/lookups/handlers/test_random_string.py index 26fa49e7a..4bd7ed8af 100644 --- a/tests/unit/lookups/handlers/test_random_string.py +++ b/tests/unit/lookups/handlers/test_random_string.py @@ -4,9 +4,9 @@ import string from typing import TYPE_CHECKING +from unittest.mock import Mock import pytest -from mock import Mock from runway.lookups.handlers.random_string import ArgsDataModel, RandomStringLookup @@ -95,10 +95,7 @@ class TestRandomStringLookup: ) def test_calculate_char_set(self, args: object, expected: str) -> None: """Test calculate_char_set.""" - assert ( - RandomStringLookup.calculate_char_set(ArgsDataModel.parse_obj(args)) - == expected - ) + assert RandomStringLookup.calculate_char_set(ArgsDataModel.model_validate(args)) == expected @pytest.mark.parametrize( "args, value, expected", @@ -127,7 +124,7 @@ def test_calculate_char_set(self, args: object, expected: str) -> None: def test_ensure_has_one_of(self, args: object, expected: bool, value: str) -> None: """Test ensure_has_one_of.""" assert ( - RandomStringLookup.ensure_has_one_of(ArgsDataModel.parse_obj(args), value) + RandomStringLookup.ensure_has_one_of(ArgsDataModel.model_validate(args), value) is expected ) @@ -137,10 +134,7 @@ def test_generate_random_string(self, length: int, mocker: MockerFixture) -> Non char_set = "0123456789" choice = Mock(side_effect=list(char_set)) mocker.patch(f"{MODULE}.secrets", choice=choice) - assert ( - RandomStringLookup.generate_random_string(char_set, length) - == char_set[:length] - ) + assert RandomStringLookup.generate_random_string(char_set, length) == char_set[:length] assert choice.call_count == length choice.assert_called_with(char_set) @@ -161,12 +155,8 @@ def test_handle(self, mocker: MockerFixture) -> None: ) assert RandomStringLookup.handle("12", Mock()) == format_results.return_value calculate_char_set.assert_called_once_with(args) - generate_random_string.assert_called_once_with( - calculate_char_set.return_value, 12 - ) - ensure_has_one_of.assert_called_once_with( - args, generate_random_string.return_value - ) + generate_random_string.assert_called_once_with(calculate_char_set.return_value, 12) + ensure_has_one_of.assert_called_once_with(args, generate_random_string.return_value) format_results.assert_called_once_with(generate_random_string.return_value) def test_handle_digit(self, mocker: MockerFixture) -> None: @@ -200,7 +190,7 @@ def test_handle_digit(self, mocker: MockerFixture) -> None: def test_handle_raise_value_error(self) -> None: """Test handle.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 RandomStringLookup.handle("test", Mock()) @pytest.mark.parametrize("value, expected", [(">!?test", False), ("t3st", True)]) diff --git a/tests/unit/lookups/handlers/test_ssm.py b/tests/unit/lookups/handlers/test_ssm.py index 0d8a57135..576af1403 100644 --- a/tests/unit/lookups/handlers/test_ssm.py +++ b/tests/unit/lookups/handlers/test_ssm.py @@ -1,11 +1,10 @@ """Test runway.lookups.handlers.ssm.""" -# pyright: basic from __future__ import annotations import json from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, Optional, Union +from typing import TYPE_CHECKING, Any import pytest import yaml @@ -14,16 +13,17 @@ from runway.variables import Variable if TYPE_CHECKING: - from ...factories import MockCFNginContext, MockRunwayContext + from ...factories import MockCfnginContext, MockRunwayContext def get_parameter_response( name: str, value: str, + *, value_type: str = "String", - label: Optional[str] = None, + label: str | None = None, version: int = 1, -) -> Dict[str, Any]: +) -> dict[str, Any]: """Generate a mock ssm.get_parameter response.""" selector = f"{name}/{label or version}" return { @@ -40,9 +40,7 @@ def get_parameter_response( } -def get_parameter_request( - name: str, decrypt: bool = True -) -> Dict[str, Union[bool, str]]: +def get_parameter_request(name: str, decrypt: bool = True) -> dict[str, bool | str]: """Generate the expected request parameters for ssm.get_parameter.""" return {"Name": name, "WithDecryption": decrypt} @@ -50,8 +48,8 @@ def get_parameter_request( class TestSsmLookup: """Test runway.lookups.handlers.ssm.SsmLookup.""" - def test_basic( - self, cfngin_context: MockCFNginContext, runway_context: MockRunwayContext + def test_handle_basic( + self, cfngin_context: MockCfnginContext, runway_context: MockRunwayContext ) -> None: """Test resolution of a basic lookup.""" name = "/test/param" @@ -78,7 +76,7 @@ def test_basic( cfn_stub.assert_no_pending_responses() rw_stub.assert_no_pending_responses() - def test_default(self, runway_context: MockRunwayContext) -> None: + def test_handle_default(self, runway_context: MockRunwayContext) -> None: """Test resolution of a default value.""" name = "/test/param" value = "test value" @@ -105,14 +103,12 @@ def test_default(self, runway_context: MockRunwayContext) -> None: assert var.value == value stub.assert_no_pending_responses() - def test_different_region(self, runway_context: MockRunwayContext) -> None: + def test_handle_different_region(self, runway_context: MockRunwayContext) -> None: """Test Lookup in region other than that set in Context.""" name = "/test/param" value = "test value" stubber = runway_context.add_stubber("ssm", region="us-west-2") - var = Variable( - "test_var", f"${{ssm {name}::region=us-west-2}}", variable_type="runway" - ) + var = Variable("test_var", f"${{ssm {name}::region=us-west-2}}", variable_type="runway") stubber.add_response( "get_parameter", @@ -125,7 +121,7 @@ def test_different_region(self, runway_context: MockRunwayContext) -> None: assert var.value == value stub.assert_no_pending_responses() - def test_loaded_value(self, runway_context: MockRunwayContext) -> None: + def test_handle_loaded_value(self, runway_context: MockRunwayContext) -> None: """Test resolution of a JSON value.""" name = "/test/param" raw_value = { @@ -175,7 +171,7 @@ def test_loaded_value(self, runway_context: MockRunwayContext) -> None: assert var.value == test["expected"] stub.assert_no_pending_responses() - def test_not_found(self, runway_context: MockRunwayContext) -> None: + def test_handle_not_found(self, runway_context: MockRunwayContext) -> None: """Test raises ParameterNotFound.""" name = "/test/param" stubber = runway_context.add_stubber("ssm") @@ -192,3 +188,35 @@ def test_not_found(self, runway_context: MockRunwayContext) -> None: assert "ParameterNotFound" in str(err.value.__cause__) stub.assert_no_pending_responses() + + def test_handle_no_value(self, runway_context: MockRunwayContext) -> None: + """Test handle no ``Value`` in response.""" + name = "/test/param" + value = "foo" + stubber = runway_context.add_stubber("ssm") + var = Variable("test_var", f"${{ssm {name}}}", variable_type="runway") + response = get_parameter_response(name, value) + response["Parameter"].pop("Value", None) + stubber.add_response("get_parameter", response, get_parameter_request(name)) + + with stubber: + var.resolve(context=runway_context) + assert var.value is None + stubber.assert_no_pending_responses() + + def test_handle_string_list(self, runway_context: MockRunwayContext) -> None: + """Test handle ``StringList`` returned as list.""" + name = "/test/param" + value = ["foo", "bar"] + stubber = runway_context.add_stubber("ssm") + var = Variable("test_var", f"${{ssm {name}}}", variable_type="runway") + stubber.add_response( + "get_parameter", + get_parameter_response(name, ",".join(value), value_type="StringList"), + get_parameter_request(name), + ) + + with stubber: + var.resolve(context=runway_context) + assert var.value == value + stubber.assert_no_pending_responses() diff --git a/tests/unit/lookups/handlers/test_var.py b/tests/unit/lookups/handlers/test_var.py index 30178c247..bccf08e38 100644 --- a/tests/unit/lookups/handlers/test_var.py +++ b/tests/unit/lookups/handlers/test_var.py @@ -1,6 +1,5 @@ """Tests for lookup handler for var.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING @@ -13,7 +12,7 @@ if TYPE_CHECKING: from ...factories import MockRunwayContext -VARIABLES = MutableMap(**{"str_val": "test", "false_val": False}) +VARIABLES = MutableMap(str_val="test", false_val=False) class TestVarLookup: @@ -21,18 +20,13 @@ class TestVarLookup: def test_handle(self, runway_context: MockRunwayContext) -> None: """Validate handle base functionality.""" - assert ( - VarLookup.handle("str_val", context=runway_context, variables=VARIABLES) - == "test" - ) + assert VarLookup.handle("str_val", context=runway_context, variables=VARIABLES) == "test" def test_handle_false_result(self, runway_context: MockRunwayContext) -> None: """Validate that a bool value of False can be resolved.""" - assert not VarLookup.handle( - "false_val", context=runway_context, variables=VARIABLES - ) + assert not VarLookup.handle("false_val", context=runway_context, variables=VARIABLES) def test_handle_not_found(self, runway_context: MockRunwayContext) -> None: """Validate exception when lookup cannot be resolved.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="does not exist in the variable definition"): VarLookup.handle("NOT_VALID", context=runway_context, variables=VARIABLES) diff --git a/tests/unit/lookups/test_registry.py b/tests/unit/lookups/test_registry.py index 7899709e9..416f8a54a 100644 --- a/tests/unit/lookups/test_registry.py +++ b/tests/unit/lookups/test_registry.py @@ -1,6 +1,5 @@ """Tests for lookup registry and common lookup functionality.""" -# pyright: basic from __future__ import annotations from typing import TYPE_CHECKING, Any @@ -21,7 +20,7 @@ from ..factories import MockRunwayContext VALUES = {"str_val": "test"} -CONTEXT = MutableMap(**{"env_vars": VALUES}) +CONTEXT = MutableMap(env_vars=VALUES) VARIABLES = MutableMap(**VALUES) @@ -30,9 +29,7 @@ def test_autoloaded_lookup_handlers(mocker: MockerFixture) -> None: mocker.patch.dict(RUNWAY_LOOKUP_HANDLERS, {}) handlers = ["cfn", "ecr", "env", "random.string", "ssm", "var"] for handler in handlers: - assert ( - handler in RUNWAY_LOOKUP_HANDLERS - ), f'Lookup handler: "{handler}" not registered' + assert handler in RUNWAY_LOOKUP_HANDLERS, f'Lookup handler: "{handler}" not registered' assert len(RUNWAY_LOOKUP_HANDLERS) == len( handlers ), f"expected {len(handlers)} autoloaded handlers but found {len(RUNWAY_LOOKUP_HANDLERS)}" diff --git a/tests/unit/mock_docker/fake_api.py b/tests/unit/mock_docker/fake_api.py index b24209064..173d80ae9 100644 --- a/tests/unit/mock_docker/fake_api.py +++ b/tests/unit/mock_docker/fake_api.py @@ -1,9 +1,10 @@ """Fake Docker API.""" # cspell:disable -# flake8: noqa=D103 -# pylint: disable=consider-using-f-string,invalid-name -from typing import Any, Callable, Dict, Tuple, Union +# ruff: noqa: D103 +from __future__ import annotations + +from typing import Any, Callable from docker import constants @@ -29,7 +30,7 @@ # for clarity and readability -def get_fake_version() -> Tuple[int, Any]: +def get_fake_version() -> tuple[int, Any]: status_code = 200 response = { "ApiVersion": "1.35", @@ -64,7 +65,7 @@ def get_fake_version() -> Tuple[int, Any]: return status_code, response -def get_fake_info() -> Tuple[int, Any]: +def get_fake_info() -> tuple[int, Any]: status_code = 200 response = { "Containers": 1, @@ -77,23 +78,23 @@ def get_fake_info() -> Tuple[int, Any]: return status_code, response -def post_fake_auth() -> Tuple[int, Any]: +def post_fake_auth() -> tuple[int, Any]: status_code = 200 response = {"Status": "Login Succeeded", "IdentityToken": "9cbaf023786cd7"} return status_code, response -def get_fake_ping() -> Tuple[int, Any]: +def get_fake_ping() -> tuple[int, Any]: return 200, "OK" -def get_fake_search() -> Tuple[int, Any]: +def get_fake_search() -> tuple[int, Any]: status_code = 200 response = [{"Name": "busybox", "Description": "Fake Description"}] return status_code, response -def get_fake_images() -> Tuple[int, Any]: +def get_fake_images() -> tuple[int, Any]: status_code = 200 response = [ { @@ -106,7 +107,7 @@ def get_fake_images() -> Tuple[int, Any]: return status_code, response -def get_fake_image_history() -> Tuple[int, Any]: +def get_fake_image_history() -> tuple[int, Any]: status_code = 200 response = [ {"Id": "b750fe79269d", "Created": 1364102658, "CreatedBy": "/bin/bash"}, @@ -116,14 +117,14 @@ def get_fake_image_history() -> Tuple[int, Any]: return status_code, response -def post_fake_import_image() -> Tuple[int, Any]: +def post_fake_import_image() -> tuple[int, Any]: status_code = 200 response = "Import messages..." return status_code, response -def get_fake_containers() -> Tuple[int, Any]: +def get_fake_containers() -> tuple[int, Any]: status_code = 200 response = [ { @@ -137,27 +138,27 @@ def get_fake_containers() -> Tuple[int, Any]: return status_code, response -def post_fake_start_container() -> Tuple[int, Any]: +def post_fake_start_container() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def post_fake_resize_container() -> Tuple[int, Any]: +def post_fake_resize_container() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def post_fake_create_container() -> Tuple[int, Any]: +def post_fake_create_container() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def get_fake_inspect_container(tty: bool = False) -> Tuple[int, Any]: +def get_fake_inspect_container(tty: bool = False) -> tuple[int, Any]: status_code = 200 - response: Dict[str, Any] = { + response: dict[str, Any] = { "Id": FAKE_CONTAINER_ID, "Config": {"Labels": {"foo": "bar"}, "Privileged": True, "Tty": tty}, "ID": FAKE_CONTAINER_ID, @@ -177,7 +178,7 @@ def get_fake_inspect_container(tty: bool = False) -> Tuple[int, Any]: return status_code, response -def get_fake_inspect_image() -> Tuple[int, Any]: +def get_fake_inspect_image() -> tuple[int, Any]: status_code = 200 response = { "Id": FAKE_IMAGE_ID, @@ -210,19 +211,19 @@ def get_fake_inspect_image() -> Tuple[int, Any]: return status_code, response -def get_fake_insert_image() -> Tuple[int, Any]: +def get_fake_insert_image() -> tuple[int, Any]: status_code = 200 response = {"StatusCode": 0} return status_code, response -def get_fake_wait() -> Tuple[int, Any]: +def get_fake_wait() -> tuple[int, Any]: status_code = 200 response = {"StatusCode": 0} return status_code, response -def get_fake_logs() -> Tuple[int, Any]: +def get_fake_logs() -> tuple[int, Any]: status_code = 200 response = ( b"\x01\x00\x00\x00\x00\x00\x00\x00" @@ -233,13 +234,13 @@ def get_fake_logs() -> Tuple[int, Any]: return status_code, response -def get_fake_diff() -> Tuple[int, Any]: +def get_fake_diff() -> tuple[int, Any]: status_code = 200 response = [{"Path": "/test", "Kind": 1}] return status_code, response -def get_fake_events() -> Tuple[int, Any]: +def get_fake_events() -> tuple[int, Any]: status_code = 200 response = [ { @@ -252,19 +253,19 @@ def get_fake_events() -> Tuple[int, Any]: return status_code, response -def get_fake_export() -> Tuple[int, Any]: +def get_fake_export() -> tuple[int, Any]: status_code = 200 response = "Byte Stream...." return status_code, response -def post_fake_exec_create() -> Tuple[int, Any]: +def post_fake_exec_create() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_EXEC_ID} return status_code, response -def post_fake_exec_start() -> Tuple[int, Any]: +def post_fake_exec_start() -> tuple[int, Any]: status_code = 200 response = ( b"\x01\x00\x00\x00\x00\x00\x00\x11bin\nboot\ndev\netc\n" @@ -274,12 +275,12 @@ def post_fake_exec_start() -> Tuple[int, Any]: return status_code, response -def post_fake_exec_resize() -> Tuple[int, Any]: +def post_fake_exec_resize() -> tuple[int, Any]: status_code = 201 return status_code, "" -def get_fake_exec_inspect() -> Tuple[int, Any]: +def get_fake_exec_inspect() -> tuple[int, Any]: return ( 200, { @@ -301,102 +302,102 @@ def get_fake_exec_inspect() -> Tuple[int, Any]: ) -def post_fake_stop_container() -> Tuple[int, Any]: +def post_fake_stop_container() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def post_fake_kill_container() -> Tuple[int, Any]: +def post_fake_kill_container() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def post_fake_pause_container() -> Tuple[int, Any]: +def post_fake_pause_container() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def post_fake_unpause_container() -> Tuple[int, Any]: +def post_fake_unpause_container() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def post_fake_restart_container() -> Tuple[int, Any]: +def post_fake_restart_container() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def post_fake_rename_container() -> Tuple[int, Any]: +def post_fake_rename_container() -> tuple[int, Any]: status_code = 204 return status_code, None -def delete_fake_remove_container() -> Tuple[int, Any]: +def delete_fake_remove_container() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def post_fake_image_create() -> Tuple[int, Any]: +def post_fake_image_create() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_IMAGE_ID} return status_code, response -def delete_fake_remove_image() -> Tuple[int, Any]: +def delete_fake_remove_image() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_IMAGE_ID} return status_code, response -def get_fake_get_image() -> Tuple[int, Any]: +def get_fake_get_image() -> tuple[int, Any]: status_code = 200 response = "Byte Stream...." return status_code, response -def post_fake_load_image() -> Tuple[int, Any]: +def post_fake_load_image() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_IMAGE_ID} return status_code, response -def post_fake_commit() -> Tuple[int, Any]: +def post_fake_commit() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def post_fake_push() -> Tuple[int, Any]: +def post_fake_push() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_IMAGE_ID} return status_code, response -def post_fake_build_container() -> Tuple[int, Any]: +def post_fake_build_container() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_CONTAINER_ID} return status_code, response -def post_fake_tag_image() -> Tuple[int, Any]: +def post_fake_tag_image() -> tuple[int, Any]: status_code = 200 response = {"Id": FAKE_IMAGE_ID} return status_code, response -def get_fake_stats() -> Tuple[int, Any]: +def get_fake_stats() -> tuple[int, Any]: status_code = 200 response = fake_stat.OBJ return status_code, response -def get_fake_top() -> Tuple[int, Any]: +def get_fake_top() -> tuple[int, Any]: return ( 200, { @@ -417,7 +418,7 @@ def get_fake_top() -> Tuple[int, Any]: ) -def get_fake_volume_list() -> Tuple[int, Any]: +def get_fake_volume_list() -> tuple[int, Any]: status_code = 200 response = { "Volumes": [ @@ -438,7 +439,7 @@ def get_fake_volume_list() -> Tuple[int, Any]: return status_code, response -def get_fake_volume() -> Tuple[int, Any]: +def get_fake_volume() -> tuple[int, Any]: status_code = 200 response = { "Name": "perfectcherryblossom", @@ -450,23 +451,23 @@ def get_fake_volume() -> Tuple[int, Any]: return status_code, response -def fake_remove_volume() -> Tuple[int, Any]: +def fake_remove_volume() -> tuple[int, Any]: return 204, None -def post_fake_update_container() -> Tuple[int, Any]: +def post_fake_update_container() -> tuple[int, Any]: return 200, {"Warnings": []} -def post_fake_update_node() -> Tuple[int, Any]: +def post_fake_update_node() -> tuple[int, Any]: return 200, None -def post_fake_join_swarm() -> Tuple[int, Any]: +def post_fake_join_swarm() -> tuple[int, Any]: return 200, None -def get_fake_network_list() -> Tuple[int, Any]: +def get_fake_network_list() -> tuple[int, Any]: return ( 200, [ @@ -490,7 +491,7 @@ def get_fake_network_list() -> Tuple[int, Any]: "com.docker.network.bridge.default_bridge": "true", "com.docker.network.bridge.enable_icc": "true", "com.docker.network.bridge.enable_ip_masquerade": "true", - "com.docker.network.bridge.host_binding_ipv4": "0.0.0.0", + "com.docker.network.bridge.host_binding_ipv4": "0.0.0.0", # noqa: S104 "com.docker.network.bridge.name": "docker0", "com.docker.network.driver.mtu": "1500", }, @@ -499,23 +500,23 @@ def get_fake_network_list() -> Tuple[int, Any]: ) -def get_fake_network() -> Tuple[int, Any]: +def get_fake_network() -> tuple[int, Any]: return 200, get_fake_network_list()[1][0] -def post_fake_network() -> Tuple[int, Any]: +def post_fake_network() -> tuple[int, Any]: return 201, {"Id": FAKE_NETWORK_ID, "Warnings": []} -def delete_fake_network() -> Tuple[int, Any]: +def delete_fake_network() -> tuple[int, Any]: return 204, None -def post_fake_network_connect() -> Tuple[int, Any]: +def post_fake_network_connect() -> tuple[int, Any]: return 200, None -def post_fake_network_disconnect() -> Tuple[int, Any]: +def post_fake_network_disconnect() -> tuple[int, Any]: return 200, None @@ -524,145 +525,86 @@ def post_fake_network_disconnect() -> Tuple[int, Any]: if constants.IS_WINDOWS_PLATFORM: prefix = "http+docker://localnpipe" -fake_responses: Dict[Union[str, Tuple[str, str]], Callable[..., Tuple[int, Any]]] = { - "{0}/version".format(prefix): get_fake_version, - "{1}/{0}/version".format(CURRENT_VERSION, prefix): get_fake_version, - "{1}/{0}/info".format(CURRENT_VERSION, prefix): get_fake_info, - "{1}/{0}/auth".format(CURRENT_VERSION, prefix): post_fake_auth, - "{1}/{0}/_ping".format(CURRENT_VERSION, prefix): get_fake_ping, - "{1}/{0}/images/search".format(CURRENT_VERSION, prefix): get_fake_search, - "{1}/{0}/images/json".format(CURRENT_VERSION, prefix): get_fake_images, - "{1}/{0}/images/test_image/history".format( - CURRENT_VERSION, prefix - ): get_fake_image_history, - "{1}/{0}/images/create".format(CURRENT_VERSION, prefix): post_fake_import_image, - "{1}/{0}/containers/json".format(CURRENT_VERSION, prefix): get_fake_containers, - "{1}/{0}/containers/3cc2351ab11b/start".format( - CURRENT_VERSION, prefix - ): post_fake_start_container, - "{1}/{0}/containers/3cc2351ab11b/resize".format( - CURRENT_VERSION, prefix - ): post_fake_resize_container, - "{1}/{0}/containers/3cc2351ab11b/json".format( - CURRENT_VERSION, prefix - ): get_fake_inspect_container, - "{1}/{0}/containers/3cc2351ab11b/rename".format( - CURRENT_VERSION, prefix - ): post_fake_rename_container, - "{1}/{0}/images/e9aa60c60128/tag".format( - CURRENT_VERSION, prefix - ): post_fake_tag_image, - "{1}/{0}/containers/3cc2351ab11b/wait".format( - CURRENT_VERSION, prefix - ): get_fake_wait, - "{1}/{0}/containers/3cc2351ab11b/logs".format( - CURRENT_VERSION, prefix - ): get_fake_logs, - "{1}/{0}/containers/3cc2351ab11b/changes".format( - CURRENT_VERSION, prefix - ): get_fake_diff, - "{1}/{0}/containers/3cc2351ab11b/export".format( - CURRENT_VERSION, prefix - ): get_fake_export, - "{1}/{0}/containers/3cc2351ab11b/update".format( - CURRENT_VERSION, prefix - ): post_fake_update_container, - "{1}/{0}/containers/3cc2351ab11b/exec".format( - CURRENT_VERSION, prefix - ): post_fake_exec_create, - "{1}/{0}/exec/d5d177f121dc/start".format( - CURRENT_VERSION, prefix - ): post_fake_exec_start, - "{1}/{0}/exec/d5d177f121dc/json".format( - CURRENT_VERSION, prefix - ): get_fake_exec_inspect, - "{1}/{0}/exec/d5d177f121dc/resize".format( - CURRENT_VERSION, prefix - ): post_fake_exec_resize, - "{1}/{0}/containers/3cc2351ab11b/stats".format( - CURRENT_VERSION, prefix - ): get_fake_stats, - "{1}/{0}/containers/3cc2351ab11b/top".format(CURRENT_VERSION, prefix): get_fake_top, - "{1}/{0}/containers/3cc2351ab11b/stop".format( - CURRENT_VERSION, prefix - ): post_fake_stop_container, - "{1}/{0}/containers/3cc2351ab11b/kill".format( - CURRENT_VERSION, prefix - ): post_fake_kill_container, - "{1}/{0}/containers/3cc2351ab11b/pause".format( - CURRENT_VERSION, prefix - ): post_fake_pause_container, - "{1}/{0}/containers/3cc2351ab11b/unpause".format( - CURRENT_VERSION, prefix - ): post_fake_unpause_container, - "{1}/{0}/containers/3cc2351ab11b/restart".format( - CURRENT_VERSION, prefix - ): post_fake_restart_container, - "{1}/{0}/containers/3cc2351ab11b".format( - CURRENT_VERSION, prefix - ): delete_fake_remove_container, - "{1}/{0}/images/create".format(CURRENT_VERSION, prefix): post_fake_image_create, - "{1}/{0}/images/e9aa60c60128".format( - CURRENT_VERSION, prefix - ): delete_fake_remove_image, - "{1}/{0}/images/e9aa60c60128/get".format( - CURRENT_VERSION, prefix - ): get_fake_get_image, - "{1}/{0}/images/load".format(CURRENT_VERSION, prefix): post_fake_load_image, - "{1}/{0}/images/test_image/json".format( - CURRENT_VERSION, prefix - ): get_fake_inspect_image, - "{1}/{0}/images/test_image/insert".format( - CURRENT_VERSION, prefix - ): get_fake_insert_image, - "{1}/{0}/images/test_image/push".format(CURRENT_VERSION, prefix): post_fake_push, - "{1}/{0}/commit".format(CURRENT_VERSION, prefix): post_fake_commit, - "{1}/{0}/containers/create".format( - CURRENT_VERSION, prefix - ): post_fake_create_container, - "{1}/{0}/build".format(CURRENT_VERSION, prefix): post_fake_build_container, - "{1}/{0}/events".format(CURRENT_VERSION, prefix): get_fake_events, - ("{1}/{0}/volumes".format(CURRENT_VERSION, prefix), "GET"): get_fake_volume_list, - ("{1}/{0}/volumes/create".format(CURRENT_VERSION, prefix), "POST"): get_fake_volume, +fake_responses: dict[str | tuple[str, str], Callable[..., tuple[int, Any]]] = { + f"{prefix}/version": get_fake_version, + f"{prefix}/{CURRENT_VERSION}/version": get_fake_version, + f"{prefix}/{CURRENT_VERSION}/info": get_fake_info, + f"{prefix}/{CURRENT_VERSION}/auth": post_fake_auth, + f"{prefix}/{CURRENT_VERSION}/_ping": get_fake_ping, + f"{prefix}/{CURRENT_VERSION}/images/search": get_fake_search, + f"{prefix}/{CURRENT_VERSION}/images/json": get_fake_images, + f"{prefix}/{CURRENT_VERSION}/images/test_image/history": get_fake_image_history, + f"{prefix}/{CURRENT_VERSION}/containers/json": get_fake_containers, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start": post_fake_start_container, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize": post_fake_resize_container, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json": get_fake_inspect_container, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename": post_fake_rename_container, + f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag": post_fake_tag_image, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait": get_fake_wait, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs": get_fake_logs, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes": get_fake_diff, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export": get_fake_export, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update": post_fake_update_container, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec": post_fake_exec_create, + f"{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start": post_fake_exec_start, + f"{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json": get_fake_exec_inspect, + f"{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize": post_fake_exec_resize, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats": get_fake_stats, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top": get_fake_top, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop": post_fake_stop_container, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill": post_fake_kill_container, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause": post_fake_pause_container, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause": post_fake_unpause_container, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart": post_fake_restart_container, + f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b": delete_fake_remove_container, + f"{prefix}/{CURRENT_VERSION}/images/create": post_fake_image_create, + f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128": delete_fake_remove_image, + f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get": get_fake_get_image, + f"{prefix}/{CURRENT_VERSION}/images/load": post_fake_load_image, + f"{prefix}/{CURRENT_VERSION}/images/test_image/json": get_fake_inspect_image, + f"{prefix}/{CURRENT_VERSION}/images/test_image/insert": get_fake_insert_image, + f"{prefix}/{CURRENT_VERSION}/images/test_image/push": post_fake_push, + f"{prefix}/{CURRENT_VERSION}/commit": post_fake_commit, + f"{prefix}/{CURRENT_VERSION}/containers/create": post_fake_create_container, + f"{prefix}/{CURRENT_VERSION}/build": post_fake_build_container, + f"{prefix}/{CURRENT_VERSION}/events": get_fake_events, + (f"{prefix}/{CURRENT_VERSION}/volumes", "GET"): get_fake_volume_list, + (f"{prefix}/{CURRENT_VERSION}/volumes/create", "POST"): get_fake_volume, ( - "{1}/{0}/volumes/{2}".format(CURRENT_VERSION, prefix, FAKE_VOLUME_NAME), + f"{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}", "GET", ): get_fake_volume, ( - "{1}/{0}/volumes/{2}".format(CURRENT_VERSION, prefix, FAKE_VOLUME_NAME), + f"{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}", "DELETE", ): fake_remove_volume, ( - "{1}/{0}/nodes/{2}/update?version=1".format( - CURRENT_VERSION, prefix, FAKE_NODE_ID - ), + f"{prefix}/{CURRENT_VERSION}/nodes/{FAKE_NODE_ID}/update?version=1", "POST", ): post_fake_update_node, ( - "{1}/{0}/swarm/join".format(CURRENT_VERSION, prefix), + f"{prefix}/{CURRENT_VERSION}/swarm/join", "POST", ): post_fake_join_swarm, - ("{1}/{0}/networks".format(CURRENT_VERSION, prefix), "GET"): get_fake_network_list, + (f"{prefix}/{CURRENT_VERSION}/networks", "GET"): get_fake_network_list, ( - "{1}/{0}/networks/create".format(CURRENT_VERSION, prefix), + f"{prefix}/{CURRENT_VERSION}/networks/create", "POST", ): post_fake_network, ( - "{1}/{0}/networks/{2}".format(CURRENT_VERSION, prefix, FAKE_NETWORK_ID), + f"{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}", "GET", ): get_fake_network, ( - "{1}/{0}/networks/{2}".format(CURRENT_VERSION, prefix, FAKE_NETWORK_ID), + f"{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}", "DELETE", ): delete_fake_network, ( - "{1}/{0}/networks/{2}/connect".format(CURRENT_VERSION, prefix, FAKE_NETWORK_ID), + f"{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/connect", "POST", ): post_fake_network_connect, ( - "{1}/{0}/networks/{2}/disconnect".format( - CURRENT_VERSION, prefix, FAKE_NETWORK_ID - ), + f"{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/disconnect", "POST", ): post_fake_network_disconnect, } diff --git a/tests/unit/mock_docker/fake_api_client.py b/tests/unit/mock_docker/fake_api_client.py index 710055da0..bfb1b7bae 100644 --- a/tests/unit/mock_docker/fake_api_client.py +++ b/tests/unit/mock_docker/fake_api_client.py @@ -1,11 +1,12 @@ """Fake Docker API client.""" -# pylint: disable=attribute-defined-outside-init,protected-access +from __future__ import annotations + import copy -from typing import Any, Dict, Optional +from typing import Any +from unittest import mock import docker -import mock from docker.constants import DEFAULT_DOCKER_API_VERSION from . import fake_api @@ -14,18 +15,14 @@ class CopyReturnMagicMock(mock.MagicMock): """A MagicMock which deep copies every return value.""" - def _mock_call( # pylint: disable=arguments-differ - self, *args: Any, **kwargs: Any - ) -> Any: + def _mock_call(self, *args: Any, **kwargs: Any) -> Any: ret = super()._mock_call(*args, **kwargs) # type: ignore if isinstance(ret, (dict, list)): ret = copy.deepcopy(ret) # type: ignore return ret # type: ignore -def make_fake_api_client( - overrides: Optional[Dict[str, Any]] = None -) -> CopyReturnMagicMock: +def make_fake_api_client(overrides: dict[str, Any] | None = None) -> CopyReturnMagicMock: """Return non-complete fake APIClient. This returns most of the default cases correctly, but most arguments that @@ -61,7 +58,7 @@ def make_fake_api_client( return mock_client -def make_fake_client(overrides: Optional[Dict[str, Any]] = None) -> docker.DockerClient: +def make_fake_client(overrides: dict[str, Any] | None = None) -> docker.DockerClient: """Return a Client with a fake APIClient.""" client = docker.DockerClient(version=DEFAULT_DOCKER_API_VERSION) client.api = make_fake_api_client(overrides) diff --git a/tests/unit/mock_docker/fake_stat.py b/tests/unit/mock_docker/fake_stat.py index 98814c704..12c5d1a37 100644 --- a/tests/unit/mock_docker/fake_stat.py +++ b/tests/unit/mock_docker/fake_stat.py @@ -1,9 +1,9 @@ """Stats for fake Docker API.""" # cspell:disable -from typing import Any, Dict +from typing import Any -OBJ: Dict[str, Any] = { +OBJ: dict[str, Any] = { "read": "2015-02-11T19:20:46.667237763+02:00", "network": { "rx_bytes": 567224, diff --git a/tests/unit/module/conftest.py b/tests/unit/module/conftest.py index f1d4a6bea..406de82e4 100644 --- a/tests/unit/module/conftest.py +++ b/tests/unit/module/conftest.py @@ -10,7 +10,7 @@ from pytest_mock import MockerFixture -@pytest.fixture +@pytest.fixture() def patch_module_npm(mocker: MockerFixture) -> None: """Patch methods and functions used during init of RunwayModuleNpm.""" mocker.patch("runway.module.base.RunwayModuleNpm.check_for_npm") diff --git a/tests/unit/module/staticsite/conftest.py b/tests/unit/module/staticsite/conftest.py index acd844651..7c4bb8941 100644 --- a/tests/unit/module/staticsite/conftest.py +++ b/tests/unit/module/staticsite/conftest.py @@ -1,6 +1,5 @@ """Pytest fixtures and plugins.""" -# pylint: disable=redefined-outer-name from __future__ import annotations from pathlib import Path @@ -8,13 +7,13 @@ import pytest -@pytest.fixture(scope="function") +@pytest.fixture() def expected_yaml(local_fixtures: Path) -> Path: """Path to local fixtures expected yaml.""" return local_fixtures / "expected_yaml" -@pytest.fixture(scope="function") +@pytest.fixture() def local_fixtures() -> Path: """Local fixtures directory.""" return Path(__file__).parent / "fixtures" diff --git a/tests/unit/module/staticsite/options/test_components.py b/tests/unit/module/staticsite/options/test__components.py similarity index 66% rename from tests/unit/module/staticsite/options/test_components.py rename to tests/unit/module/staticsite/options/test__components.py index f705cada2..fd866bc65 100644 --- a/tests/unit/module/staticsite/options/test_components.py +++ b/tests/unit/module/staticsite/options/test__components.py @@ -1,15 +1,14 @@ -"""Test runway.module.staticsite.options.components.""" +"""Test runway.module.staticsite.options._components.""" -# pyright: basic from __future__ import annotations -from runway.module.staticsite.options.components import StaticSiteOptions -from runway.module.staticsite.options.models import ( +from runway.module.staticsite.options._components import StaticSiteOptions +from runway.module.staticsite.options._models import ( RunwayStaticSiteModuleOptionsDataModel, RunwayStaticSitePreBuildStepDataModel, ) -MODULE = "runway.module.staticsite.options.components" +MODULE = "runway.module.staticsite.options._components" class TestStaticSiteOptions: @@ -20,9 +19,7 @@ def test_init(self) -> None: data = RunwayStaticSiteModuleOptionsDataModel( build_output="./dist", build_steps=["runway --help"], - pre_build_steps=[ - RunwayStaticSitePreBuildStepDataModel(command="runway --help") - ], + pre_build_steps=[RunwayStaticSitePreBuildStepDataModel(command="runway --help")], ) obj = StaticSiteOptions(data=data) assert obj.build_output == data.build_output @@ -37,6 +34,5 @@ def test_parse_obj(self) -> None: obj = StaticSiteOptions.parse_obj({}) assert isinstance(obj.data, RunwayStaticSiteModuleOptionsDataModel) assert ( - obj.data.dict(exclude_defaults=True, exclude_none=True, exclude_unset=True) - == {} + obj.data.model_dump(exclude_defaults=True, exclude_none=True, exclude_unset=True) == {} ) diff --git a/tests/unit/module/staticsite/options/test_models.py b/tests/unit/module/staticsite/options/test__models.py similarity index 84% rename from tests/unit/module/staticsite/options/test_models.py rename to tests/unit/module/staticsite/options/test__models.py index 385596b52..223455518 100644 --- a/tests/unit/module/staticsite/options/test_models.py +++ b/tests/unit/module/staticsite/options/test__models.py @@ -1,15 +1,14 @@ """Test runway.module.staticsite.options.models.""" -# pyright: basic from __future__ import annotations from pathlib import Path -from typing import Any, Dict, Optional, cast +from typing import Any, cast import pytest from pydantic import ValidationError -from runway.module.staticsite.options.models import ( +from runway.module.staticsite.options._models import ( RunwayStaticSiteExtraFileDataModel, RunwayStaticSiteModuleOptionsDataModel, RunwayStaticSitePreBuildStepDataModel, @@ -32,18 +31,15 @@ class TestRunwayStaticSiteExtraFileDataModel: ("test", None), ], ) - def test_autofill_content_type(self, expected: Optional[str], name: str) -> None: + def test_autofill_content_type(self, expected: str | None, name: str) -> None: """Test _autofill_content_type.""" assert ( - RunwayStaticSiteExtraFileDataModel(content="test", name=name).content_type - == expected + RunwayStaticSiteExtraFileDataModel(content="test", name=name).content_type == expected ) def test_init_default(self) -> None: """Test init default.""" - obj = RunwayStaticSiteExtraFileDataModel( - content="test-content", name="test-name" - ) + obj = RunwayStaticSiteExtraFileDataModel(content="test-content", name="test-name") assert not obj.content_type assert obj.content == "test-content" assert not obj.file @@ -66,7 +62,7 @@ def test_init_content_and_file(self, tmp_path: Path) -> None: def test_init_content(self) -> None: """Test init content.""" data = {"content_type": "test-data", "content": "content", "name": "test"} - obj = RunwayStaticSiteExtraFileDataModel(**data) + obj = RunwayStaticSiteExtraFileDataModel.model_validate(data) assert obj.content_type == data["content_type"] assert obj.content == data["content"] assert not obj.file @@ -75,7 +71,7 @@ def test_init_content(self) -> None: def test_init_file(self, tmp_path: Path) -> None: """Test init file.""" data = {"content_type": "test-data", "file": tmp_path, "name": "test"} - obj = RunwayStaticSiteExtraFileDataModel.parse_obj(data) + obj = RunwayStaticSiteExtraFileDataModel.model_validate(data) assert obj.content_type == data["content_type"] assert not obj.content assert obj.file == data["file"] @@ -84,13 +80,13 @@ def test_init_file(self, tmp_path: Path) -> None: @pytest.mark.parametrize( "data", [ - cast(Dict[str, str], {}), + cast(dict[str, str], {}), {"name": "test"}, {"content": "test"}, {"file": "test"}, ], ) - def test_init_required(self, data: Dict[str, Any]) -> None: + def test_init_required(self, data: dict[str, Any]) -> None: """Test init required fields.""" with pytest.raises(ValidationError): RunwayStaticSiteExtraFileDataModel(**data) @@ -103,15 +99,18 @@ def test_init_default(self) -> None: """Test init default.""" obj = RunwayStaticSiteModuleOptionsDataModel() assert obj.build_output == "./" - assert not obj.build_steps and isinstance(obj.build_steps, list) - assert not obj.extra_files and isinstance(obj.extra_files, list) - assert not obj.pre_build_steps and isinstance(obj.pre_build_steps, list) + assert not obj.build_steps + assert isinstance(obj.build_steps, list) + assert not obj.extra_files + assert isinstance(obj.extra_files, list) + assert not obj.pre_build_steps + assert isinstance(obj.pre_build_steps, list) assert obj.source_hashing == RunwayStaticSiteSourceHashingDataModel() def test_init_extra(self) -> None: """Test init extra.""" obj = RunwayStaticSiteModuleOptionsDataModel(invalid="val") # type: ignore - assert "invalid" not in obj.dict() + assert "invalid" not in obj.model_dump() def test_init(self) -> None: """Test init.""" @@ -122,7 +121,7 @@ def test_init(self) -> None: "pre_build_steps": [{"command": "runway --help"}], "source_hashing": {"enabled": False}, } - obj = RunwayStaticSiteModuleOptionsDataModel(**data) + obj = RunwayStaticSiteModuleOptionsDataModel.model_validate(data) assert obj.build_output == data["build_output"] assert obj.build_steps == data["build_steps"] assert obj.extra_files == [ @@ -159,9 +158,7 @@ def test_init_required(self, tmp_path: Path) -> None: def test_init(self, tmp_path: Path) -> None: """Test init.""" - obj = RunwayStaticSitePreBuildStepDataModel( - command="runway --help", cwd=tmp_path - ) + obj = RunwayStaticSitePreBuildStepDataModel(command="runway --help", cwd=tmp_path) assert obj.command == "runway --help" assert obj.cwd == tmp_path @@ -181,7 +178,7 @@ def test_init_default(self) -> None: def test_init_extra(self) -> None: """Test init extra.""" with pytest.raises(ValidationError): - RunwayStaticSiteSourceHashingDataModel.parse_obj({"invalid": "test"}) + RunwayStaticSiteSourceHashingDataModel.model_validate({"invalid": "test"}) def test_init(self, tmp_path: Path) -> None: """Test init.""" @@ -190,7 +187,7 @@ def test_init(self, tmp_path: Path) -> None: "enabled": False, "parameter": "test", } - obj = RunwayStaticSiteSourceHashingDataModel(**data) + obj = RunwayStaticSiteSourceHashingDataModel.model_validate(data) assert obj.directories == [ RunwayStaticSiteSourceHashingDirectoryDataModel( **data["directories"][0] # type: ignore @@ -206,7 +203,8 @@ class TestRunwayStaticSiteSourceHashingDirectoryDataModel: def test_init_default(self, tmp_path: Path) -> None: """Test init default.""" obj = RunwayStaticSiteSourceHashingDirectoryDataModel(path=tmp_path) - assert not obj.exclusions and isinstance(obj.exclusions, list) + assert not obj.exclusions + assert isinstance(obj.exclusions, list) assert obj.path == tmp_path def test_init_extra(self, tmp_path: Path) -> None: @@ -226,6 +224,6 @@ def test_init_required(self) -> None: def test_init(self, tmp_path: Path) -> None: """Test init.""" data = {"exclusions": ["**/*.md"], "path": tmp_path} - obj = RunwayStaticSiteSourceHashingDirectoryDataModel.parse_obj(data) + obj = RunwayStaticSiteSourceHashingDirectoryDataModel.model_validate(data) assert obj.exclusions == data["exclusions"] assert obj.path == data["path"] diff --git a/tests/unit/module/staticsite/parameters/test_models.py b/tests/unit/module/staticsite/parameters/test__models.py similarity index 92% rename from tests/unit/module/staticsite/parameters/test_models.py rename to tests/unit/module/staticsite/parameters/test__models.py index ad52e66f2..7eb03c537 100644 --- a/tests/unit/module/staticsite/parameters/test_models.py +++ b/tests/unit/module/staticsite/parameters/test__models.py @@ -1,12 +1,11 @@ """Test runway.module.staticsite.parameters.models.""" -# pyright: basic -from typing import Any, Dict, cast +from typing import Any, cast import pytest from pydantic import ValidationError -from runway.module.staticsite.parameters.models import ( +from runway.module.staticsite.parameters._models import ( RunwayStaticSiteCustomErrorResponseDataModel, RunwayStaticSiteLambdaFunctionAssociationDataModel, RunwayStaticSiteModuleParametersDataModel, @@ -57,12 +56,12 @@ def test_init_extra(self) -> None: @pytest.mark.parametrize( "data", [ - cast(Dict[str, Any], {}), + cast(dict[str, Any], {}), {"arn": "aws:arn:lambda:us-east-1:function:test"}, {"type": "origin-request"}, ], ) - def test_init_required(self, data: Dict[str, Any]) -> None: + def test_init_required(self, data: dict[str, Any]) -> None: """Test init required.""" with pytest.raises(ValidationError): RunwayStaticSiteLambdaFunctionAssociationDataModel.parse_obj(data) @@ -115,9 +114,7 @@ def test_init_default(self) -> None: "font-src 'self' 'unsafe-inline' 'unsafe-eval' data: https:; " "object-src 'none'; " "connect-src 'self' https://*.amazonaws.com https://*.amazoncognito.com", - "Strict-Transport-Security": "max-age=31536000; " - "includeSubdomains; " - "preload", + "Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload", "Referrer-Policy": "same-origin", "X-XSS-Protection": "1; mode=block", "X-Frame-Options": "DENY", @@ -193,10 +190,7 @@ def test_init(self) -> None: } obj = RunwayStaticSiteModuleParametersDataModel(**data) # type: ignore assert obj.acmcert_arn == data["staticsite_acmcert_arn"] - assert ( - obj.additional_redirect_domains - == data["staticsite_additional_redirect_domains"] - ) + assert obj.additional_redirect_domains == data["staticsite_additional_redirect_domains"] assert obj.aliases == data["staticsite_aliases"] assert obj.auth_at_edge is data["staticsite_auth_at_edge"] assert obj.cf_disable is data["staticsite_cf_disable"] @@ -221,10 +215,7 @@ def test_init(self) -> None: assert obj.namespace == data["namespace"] assert obj.non_spa is data["staticsite_non_spa"] assert obj.oauth_scopes == data["staticsite_oauth_scopes"] - assert ( - obj.redirect_path_auth_refresh - == data["staticsite_redirect_path_auth_refresh"] - ) + assert obj.redirect_path_auth_refresh == data["staticsite_redirect_path_auth_refresh"] assert obj.redirect_path_sign_in == data["staticsite_redirect_path_sign_in"] assert obj.redirect_path_sign_out == data["staticsite_redirect_path_sign_out"] assert obj.required_group == data["staticsite_required_group"] @@ -232,9 +223,6 @@ def test_init(self) -> None: assert obj.role_boundary_arn == data["staticsite_role_boundary_arn"] assert obj.service_role == data["cloudformation_service_role"] assert obj.sign_out_url == data["staticsite_sign_out_url"] - assert ( - obj.supported_identity_providers - == data["staticsite_supported_identity_providers"] - ) + assert obj.supported_identity_providers == data["staticsite_supported_identity_providers"] assert obj.user_pool_arn == data["staticsite_user_pool_arn"] assert obj.web_acl == data["staticsite_web_acl"] diff --git a/tests/unit/module/staticsite/test_handler.py b/tests/unit/module/staticsite/test_handler.py index 23f2e01c5..059cea653 100644 --- a/tests/unit/module/staticsite/test_handler.py +++ b/tests/unit/module/staticsite/test_handler.py @@ -1,27 +1,22 @@ """Test runway.module.staticsite.handler.""" -# pylint: disable=protected-access -# pyright: basic from __future__ import annotations import logging import platform import string -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any +from unittest.mock import Mock import pytest -from mock import Mock from runway.module.staticsite.handler import StaticSite -from runway.module.staticsite.options.components import StaticSiteOptions -from runway.module.staticsite.parameters.models import ( - RunwayStaticSiteModuleParametersDataModel, -) +from runway.module.staticsite.options import StaticSiteOptions +from runway.module.staticsite.parameters import RunwayStaticSiteModuleParametersDataModel if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from runway.context import RunwayContext @@ -51,7 +46,7 @@ def test___init__(self, runway_context: RunwayContext, tmp_path: Path) -> None: assert isinstance(obj.options, StaticSiteOptions) assert obj.options == StaticSiteOptions.parse_obj({"build_output": "./dist"}) assert isinstance(obj.parameters, RunwayStaticSiteModuleParametersDataModel) - assert obj.parameters == RunwayStaticSiteModuleParametersDataModel.parse_obj( + assert obj.parameters == RunwayStaticSiteModuleParametersDataModel.model_validate( {"namespace": "test"} ) assert obj.path == tmp_path @@ -99,7 +94,7 @@ def test_create_cleanup_yaml( def test_create_dependencies_yaml( self, expected_yaml: Path, - parameters: Dict[str, Any], + parameters: dict[str, Any], runway_context: RunwayContext, test_file_number: str, tmp_path: Path, @@ -137,7 +132,7 @@ def test_create_dependencies_yaml( def test_create_staticsite_yaml( self, expected_yaml: Path, - parameters: Dict[str, Any], + parameters: dict[str, Any], runway_context: RunwayContext, test_file_number: str, tmp_path: Path, @@ -228,17 +223,13 @@ def test_ensure_valid_environment_config_exit( ) -> None: """Test _ensure_valid_environment_config.""" with pytest.raises(SystemExit): - StaticSite( - runway_context, module_root=tmp_path, parameters={"namespace": ""} - ) + StaticSite(runway_context, module_root=tmp_path, parameters={"namespace": ""}) def test_get_client_updater_variables( self, mocker: MockerFixture, runway_context: RunwayContext, tmp_path: Path ) -> None: """Test _get_client_updater_variables.""" - mock_add_url_scheme = mocker.patch( - f"{MODULE}.add_url_scheme", return_value="success" - ) + mock_add_url_scheme = mocker.patch(f"{MODULE}.add_url_scheme", return_value="success") obj = StaticSite( runway_context, module_root=tmp_path, @@ -257,31 +248,18 @@ def test_get_client_updater_variables( assert "rxref test-" in result["client_id"] assert "rxref test::" in result["distribution_domain"] assert result["oauth_scopes"] == site_stack_variables["OAuthScopes"] - assert ( - result["redirect_path_sign_in"] - == site_stack_variables["RedirectPathSignIn"] - ) - assert ( - result["redirect_path_sign_out"] - == site_stack_variables["RedirectPathSignOut"] - ) - assert ( - result["supported_identity_providers"] - == obj.parameters.supported_identity_providers - ) + assert result["redirect_path_sign_in"] == site_stack_variables["RedirectPathSignIn"] + assert result["redirect_path_sign_out"] == site_stack_variables["RedirectPathSignOut"] + assert result["supported_identity_providers"] == obj.parameters.supported_identity_providers def test_init( - self, caplog: LogCaptureFixture, runway_context: RunwayContext, tmp_path: Path + self, caplog: pytest.LogCaptureFixture, runway_context: RunwayContext, tmp_path: Path ) -> None: """Test init.""" caplog.set_level(logging.WARNING, logger=MODULE) - obj = StaticSite( - runway_context, module_root=tmp_path, parameters={"namespace": "test"} - ) + obj = StaticSite(runway_context, module_root=tmp_path, parameters={"namespace": "test"}) assert not obj.init() - assert ( - f"init not currently supported for {StaticSite.__name__}" in caplog.messages - ) + assert f"init not currently supported for {StaticSite.__name__}" in caplog.messages def test_plan( self, mocker: MockerFixture, runway_context: RunwayContext, tmp_path: Path @@ -298,9 +276,7 @@ def test_plan( assert not obj.plan() mock_setup_website_module.assert_called_once_with(command="plan") - @pytest.mark.parametrize( - "provided, expected", [("foo", "foo"), ("foo.bar", "foo-bar")] - ) + @pytest.mark.parametrize("provided, expected", [("foo", "foo"), ("foo.bar", "foo-bar")]) def test_sanitized_name( self, expected: str, diff --git a/tests/unit/module/test_base.py b/tests/unit/module/test_base.py index 53534ffd2..39d323b46 100644 --- a/tests/unit/module/test_base.py +++ b/tests/unit/module/test_base.py @@ -1,13 +1,10 @@ """Test runway.module.base.""" -# pylint: disable=comparison-with-callable -# comparison-with-callable is intermittent - possibly due to use of runway.compat? -# pyright: basic from __future__ import annotations import logging from contextlib import contextmanager -from typing import TYPE_CHECKING, Any, Dict, Iterator, List, cast +from typing import TYPE_CHECKING, Any, cast import pytest @@ -15,9 +12,9 @@ from runway.module.base import NPM_BIN, ModuleOptions, RunwayModule, RunwayModuleNpm if TYPE_CHECKING: + from collections.abc import Iterator from pathlib import Path - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from pytest_subprocess import FakeProcess @@ -48,7 +45,7 @@ class TestRunwayModuleNpm: """Test runway.module.base.RunwayModuleNpm.""" def test_check_for_npm_missing( - self, caplog: LogCaptureFixture, mocker: MockerFixture + self, caplog: pytest.LogCaptureFixture, mocker: MockerFixture ) -> None: """Test check_for_npm missing.""" caplog.set_level(logging.ERROR, logger=MODULE) @@ -74,9 +71,7 @@ def test_init_npm_not_found( mock_check_for_npm = mocker.patch.object( RunwayModuleNpm, "check_for_npm", side_effect=NpmNotFound ) - mock_warn_on_boto_env_vars = mocker.patch.object( - RunwayModuleNpm, "warn_on_boto_env_vars" - ) + mock_warn_on_boto_env_vars = mocker.patch.object(RunwayModuleNpm, "warn_on_boto_env_vars") with pytest.raises(NpmNotFound): RunwayModuleNpm(runway_context, module_root=tmp_path) mock_check_for_npm.assert_called_once() @@ -87,9 +82,7 @@ def test_init( ) -> None: """Test __init__.""" mock_check_for_npm = mocker.patch.object(RunwayModuleNpm, "check_for_npm") - mock_warn_on_boto_env_vars = mocker.patch.object( - RunwayModuleNpm, "warn_on_boto_env_vars" - ) + mock_warn_on_boto_env_vars = mocker.patch.object(RunwayModuleNpm, "warn_on_boto_env_vars") obj = RunwayModuleNpm( runway_context, module_root=tmp_path, @@ -110,7 +103,7 @@ def test_init( def test_log_npm_command( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -130,7 +123,7 @@ def test_log_npm_command( @pytest.mark.parametrize("colorize", [True, False]) def test_npm_install_ci( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, colorize: bool, fake_process: FakeProcess, mocker: MockerFixture, @@ -144,7 +137,7 @@ def test_npm_install_ci( mocker.patch.object(RunwayModuleNpm, "warn_on_boto_env_vars") runway_context.env.ci = True runway_context.env.vars["RUNWAY_COLORIZE"] = str(colorize) - cmd: List[Any] = [NPM_BIN, "ci"] + cmd: list[Any] = [NPM_BIN, "ci"] if not colorize: cmd.append("--no-color") fake_process.register_subprocess(cmd, returncode=0) @@ -166,7 +159,7 @@ def test_npm_install_ci( ) def test_npm_install_install( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, colorize: bool, fake_process: FakeProcess, is_noninteractive: bool, @@ -182,7 +175,7 @@ def test_npm_install_install( mocker.patch.object(RunwayModuleNpm, "warn_on_boto_env_vars") runway_context.env.ci = is_noninteractive runway_context.env.vars["RUNWAY_COLORIZE"] = str(colorize) - cmd: List[Any] = [NPM_BIN, "install"] + cmd: list[Any] = [NPM_BIN, "install"] if not colorize: cmd.append("--no-color") fake_process.register_subprocess(cmd, returncode=0) @@ -192,7 +185,7 @@ def test_npm_install_install( def test_npm_install_skip( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -208,7 +201,7 @@ def test_npm_install_skip( def test_package_json_missing( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -220,12 +213,12 @@ def test_package_json_missing( obj = RunwayModuleNpm(context=runway_context, module_root=tmp_path) assert obj.package_json_missing() - assert ["module is missing package.json"] == caplog.messages + assert caplog.messages == ["module is missing package.json"] (tmp_path / "package.json").touch() assert not obj.package_json_missing() - def test_warn_on_boto_env_vars(self, caplog: LogCaptureFixture) -> None: + def test_warn_on_boto_env_vars(self, caplog: pytest.LogCaptureFixture) -> None: """Test warn_on_boto_env_vars.""" caplog.set_level(logging.WARNING, logger=MODULE) RunwayModuleNpm.warn_on_boto_env_vars({"AWS_DEFAULT_PROFILE": "something"}) @@ -238,13 +231,13 @@ def test_warn_on_boto_env_vars(self, caplog: LogCaptureFixture) -> None: @pytest.mark.parametrize( "env_vars", [ - cast(Dict[str, str], {}), + cast(dict[str, str], {}), {"AWS_PROFILE": "something"}, {"AWS_DEFAULT_PROFILE": "something", "AWS_PROFILE": "something"}, ], ) def test_warn_on_boto_env_vars_no_warn( - self, caplog: LogCaptureFixture, env_vars: Dict[str, str] + self, caplog: pytest.LogCaptureFixture, env_vars: dict[str, str] ) -> None: """Test warn_on_boto_env_vars no warn.""" caplog.set_level(logging.WARNING, logger=MODULE) @@ -259,9 +252,7 @@ def test_warn_on_boto_env_vars_no_warn( class TestRunwayModule: """Test runway.module.base.RunwayModule.""" - def test___init___default( - self, runway_context: MockRunwayContext, tmp_path: Path - ) -> None: + def test___init___default(self, runway_context: MockRunwayContext, tmp_path: Path) -> None: """Test __init__ default values.""" obj = RunwayModule(runway_context, module_root=tmp_path) assert not obj.explicitly_enabled diff --git a/tests/unit/module/test_cdk.py b/tests/unit/module/test_cdk.py index 1265d5a0c..c18bfb263 100644 --- a/tests/unit/module/test_cdk.py +++ b/tests/unit/module/test_cdk.py @@ -1,16 +1,13 @@ """Test runway.module.cdk.""" -# pylint: disable=unused-argument -# pyright: basic from __future__ import annotations import logging from subprocess import CalledProcessError -from typing import TYPE_CHECKING, Any, Dict, List, Optional -from unittest.mock import call +from typing import TYPE_CHECKING, Any +from unittest.mock import Mock, call import pytest -from mock import Mock from runway.config.models.runway.options.cdk import RunwayCdkModuleOptionsDataModel from runway.module.cdk import CloudDevelopmentKit, CloudDevelopmentKitOptions @@ -18,7 +15,6 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from pytest_subprocess import FakeProcess from pytest_subprocess.fake_popen import FakePopen @@ -35,7 +31,7 @@ class TestCloudDevelopmentKit: def test_cdk_bootstrap( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: RunwayContext, tmp_path: Path, @@ -66,24 +62,20 @@ def test_cdk_bootstrap_raise_called_process_error( ) -> None: """Test cdk_bootstrap raise CalledProcessError.""" mocker.patch.object(CloudDevelopmentKit, "gen_cmd") - mocker.patch( - f"{MODULE}.run_module_command", side_effect=CalledProcessError(1, "") - ) + mocker.patch(f"{MODULE}.run_module_command", side_effect=CalledProcessError(1, "")) with pytest.raises(CalledProcessError): CloudDevelopmentKit(runway_context, module_root=tmp_path).cdk_bootstrap() def test_cdk_deploy( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: RunwayContext, tmp_path: Path, ) -> None: """Test cdk_deploy.""" caplog.set_level(logging.INFO, logger=MODULE) - mock_gen_cmd = mocker.patch.object( - CloudDevelopmentKit, "gen_cmd", return_value=["deploy"] - ) + mock_gen_cmd = mocker.patch.object(CloudDevelopmentKit, "gen_cmd", return_value=["deploy"]) mock_run_module_command = mocker.patch(f"{MODULE}.run_module_command") obj = CloudDevelopmentKit(runway_context, module_root=tmp_path) assert not obj.cdk_deploy() @@ -105,24 +97,20 @@ def test_cdk_deploy_raise_called_process_error( ) -> None: """Test cdk_deploy raise CalledProcessError.""" mocker.patch.object(CloudDevelopmentKit, "gen_cmd") - mocker.patch( - f"{MODULE}.run_module_command", side_effect=CalledProcessError(1, "") - ) + mocker.patch(f"{MODULE}.run_module_command", side_effect=CalledProcessError(1, "")) with pytest.raises(CalledProcessError): CloudDevelopmentKit(runway_context, module_root=tmp_path).cdk_deploy() def test_cdk_destroy( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: RunwayContext, tmp_path: Path, ) -> None: """Test cdk_destroy.""" caplog.set_level(logging.INFO, logger=MODULE) - mock_gen_cmd = mocker.patch.object( - CloudDevelopmentKit, "gen_cmd", return_value=["destroy"] - ) + mock_gen_cmd = mocker.patch.object(CloudDevelopmentKit, "gen_cmd", return_value=["destroy"]) mock_run_module_command = mocker.patch(f"{MODULE}.run_module_command") obj = CloudDevelopmentKit(runway_context, module_root=tmp_path) assert not obj.cdk_destroy() @@ -144,30 +132,24 @@ def test_cdk_destroy_raise_called_process_error( ) -> None: """Test cdk_destroy raise CalledProcessError.""" mocker.patch.object(CloudDevelopmentKit, "gen_cmd") - mocker.patch( - f"{MODULE}.run_module_command", side_effect=CalledProcessError(1, "") - ) + mocker.patch(f"{MODULE}.run_module_command", side_effect=CalledProcessError(1, "")) with pytest.raises(CalledProcessError): CloudDevelopmentKit(runway_context, module_root=tmp_path).cdk_destroy() def test_cdk_diff( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: RunwayContext, tmp_path: Path, ) -> None: """Test cdk_diff.""" caplog.set_level(logging.INFO, logger=MODULE) - mock_gen_cmd = mocker.patch.object( - CloudDevelopmentKit, "gen_cmd", return_value=["diff"] - ) + mock_gen_cmd = mocker.patch.object(CloudDevelopmentKit, "gen_cmd", return_value=["diff"]) mock_run_module_command = mocker.patch(f"{MODULE}.run_module_command") obj = CloudDevelopmentKit(runway_context, module_root=tmp_path) assert not obj.cdk_diff() - mock_gen_cmd.assert_called_once_with( - "diff", args_list=None, include_context=True - ) + mock_gen_cmd.assert_called_once_with("diff", args_list=None, include_context=True) mock_run_module_command.assert_called_once_with( cmd_list=mock_gen_cmd.return_value, env_vars=runway_context.env.vars, @@ -178,9 +160,7 @@ def test_cdk_diff( assert "plan (in progress)" in logs assert "plan (complete)" in logs assert not obj.cdk_diff("stack_name") - mock_gen_cmd.assert_called_with( - "diff", args_list=["stack_name"], include_context=True - ) + mock_gen_cmd.assert_called_with("diff", args_list=["stack_name"], include_context=True) @pytest.mark.parametrize("return_code", [1, 2]) def test_cdk_diff_catch_called_process_error_sys_exit( @@ -208,9 +188,7 @@ def test_cdk_list( tmp_path: Path, ) -> None: """Test cdk_list.""" - mock_gen_cmd = mocker.patch.object( - CloudDevelopmentKit, "gen_cmd", return_value=["list"] - ) + mock_gen_cmd = mocker.patch.object(CloudDevelopmentKit, "gen_cmd", return_value=["list"]) fake_process.register_subprocess( mock_gen_cmd.return_value, returncode=0, stdout="Stack0\nStack1" ) @@ -227,12 +205,8 @@ def test_cdk_list_empty( tmp_path: Path, ) -> None: """Test cdk_list empty.""" - mock_gen_cmd = mocker.patch.object( - CloudDevelopmentKit, "gen_cmd", return_value=["list"] - ) - fake_process.register_subprocess( - mock_gen_cmd.return_value, returncode=0, stdout="" - ) + mock_gen_cmd = mocker.patch.object(CloudDevelopmentKit, "gen_cmd", return_value=["list"]) + fake_process.register_subprocess(mock_gen_cmd.return_value, returncode=0, stdout="") obj = CloudDevelopmentKit(runway_context, module_root=tmp_path) assert obj.cdk_list() == [""] assert fake_process.call_count(mock_gen_cmd.return_value) == 1 @@ -245,9 +219,7 @@ def test_cdk_list_raise_called_process_error( tmp_path: Path, ) -> None: """Test cdk_list raise CalledProcessError.""" - mock_gen_cmd = mocker.patch.object( - CloudDevelopmentKit, "gen_cmd", return_value=["list"] - ) + mock_gen_cmd = mocker.patch.object(CloudDevelopmentKit, "gen_cmd", return_value=["list"]) fake_process.register_subprocess( mock_gen_cmd.return_value, returncode=1, @@ -271,7 +243,7 @@ def test_cdk_list_raise_called_process_error( def test_cli_args( self, debug: bool, - expected: List[str], + expected: list[str], no_color: bool, tmp_path: Path, verbose: bool, @@ -299,9 +271,9 @@ def test_cli_args( ) def test_cli_args_context( self, - expected: List[str], + expected: list[str], runway_context: RunwayContext, - parameters: Dict[str, Any], + parameters: dict[str, Any], tmp_path: Path, ) -> None: """Test cli_args_context.""" @@ -409,10 +381,10 @@ def test_destroy( ) def test_gen_cmd( self, - args_list: Optional[List[str]], + args_list: list[str] | None, command: CdkCommandTypeDef, env_ci: bool, - expected: List[str], + expected: list[str], include_context: bool, mocker: MockerFixture, runway_context: RunwayContext, @@ -420,9 +392,7 @@ def test_gen_cmd( ) -> None: """Test gen_cmd.""" mocker.patch.object(CloudDevelopmentKit, "cli_args", ["cli_args"]) - mocker.patch.object( - CloudDevelopmentKit, "cli_args_context", ["cli_args_context"] - ) + mocker.patch.object(CloudDevelopmentKit, "cli_args_context", ["cli_args_context"]) generate_node_command = mocker.patch( f"{MODULE}.generate_node_command", return_value=["success"] ) @@ -495,16 +465,14 @@ def test_plan( def test_run_build_steps_empty( self, - caplog: LogCaptureFixture, - fake_process: FakeProcess, + caplog: pytest.LogCaptureFixture, + fake_process: FakeProcess, # noqa: ARG002 runway_context: RunwayContext, tmp_path: Path, ) -> None: """Test run_build_steps.""" caplog.set_level(logging.INFO, logger=MODULE) - obj = CloudDevelopmentKit( - runway_context, module_root=tmp_path, options={"build_steps": []} - ) + obj = CloudDevelopmentKit(runway_context, module_root=tmp_path, options={"build_steps": []}) assert not obj.run_build_steps() logs = "\n".join(caplog.messages) assert "build steps (in progress)" not in logs @@ -512,10 +480,10 @@ def test_run_build_steps_empty( def test_run_build_steps_linux( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fake_process: FakeProcess, mocker: MockerFixture, - platform_linux: None, + platform_linux: None, # noqa: ARG002 runway_context: RunwayContext, tmp_path: Path, ) -> None: @@ -535,9 +503,9 @@ def test_run_build_steps_linux( def test_run_build_steps_raise_file_not_found( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fake_process: FakeProcess, - platform_linux: None, + platform_linux: None, # noqa: ARG002 runway_context: RunwayContext, tmp_path: Path, ) -> None: @@ -561,7 +529,7 @@ def _callback(process: FakePopen) -> None: def test_run_build_steps_raise_called_process_error( self, fake_process: FakeProcess, - platform_linux: None, + platform_linux: None, # noqa: ARG002 runway_context: RunwayContext, tmp_path: Path, ) -> None: @@ -577,10 +545,10 @@ def test_run_build_steps_raise_called_process_error( def test_run_build_steps_windows( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fake_process: FakeProcess, mocker: MockerFixture, - platform_windows: None, + platform_windows: None, # noqa: ARG002 runway_context: RunwayContext, tmp_path: Path, ) -> None: @@ -611,7 +579,7 @@ def test_run_build_steps_windows( ) def test_skip( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, expected: bool, explicitly_enabled: bool, mocker: MockerFixture, @@ -637,9 +605,7 @@ def test_skip( if package_json_missing: assert "skipped; package.json" in "\n".join(caplog.messages) elif not explicitly_enabled: - assert "skipped; environment required but not defined" in "\n".join( - caplog.messages - ) + assert "skipped; environment required but not defined" in "\n".join(caplog.messages) class TestCloudDevelopmentKitOptions: @@ -659,4 +625,4 @@ def test_parse_obj(self) -> None: assert isinstance(obj.data, RunwayCdkModuleOptionsDataModel) assert obj.data.build_steps == config["build_steps"] assert obj.data.skip_npm_ci == config["skip_npm_ci"] - assert "key" not in obj.data.dict() + assert "key" not in obj.data.model_dump() diff --git a/tests/unit/module/test_cloudformation.py b/tests/unit/module/test_cloudformation.py index 029e98a5b..ceead2e96 100644 --- a/tests/unit/module/test_cloudformation.py +++ b/tests/unit/module/test_cloudformation.py @@ -1,9 +1,8 @@ """Test runway.module.cloudformation.""" -# pyright: basic from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any from runway.core.components import DeployEnvironment from runway.module.cloudformation import CloudFormation @@ -25,16 +24,14 @@ class TestCloudFormation: """Test runway.module.cloudformation.CloudFormation.""" @property - def generic_parameters(self) -> Dict[str, Any]: + def generic_parameters(self) -> dict[str, Any]: """Return generic module options.""" return {"test_key": "test-value"} @staticmethod def get_context(name: str = "test", region: str = "us-east-1") -> MockRunwayContext: """Create a basic Runway context object.""" - context = MockRunwayContext( - deploy_environment=DeployEnvironment(explicit_name=name) - ) + context = MockRunwayContext(deploy_environment=DeployEnvironment(explicit_name=name)) context.env.aws_region = region return context diff --git a/tests/unit/module/test_k8s.py b/tests/unit/module/test_k8s.py index 0bf296bda..687993022 100644 --- a/tests/unit/module/test_k8s.py +++ b/tests/unit/module/test_k8s.py @@ -1,11 +1,10 @@ """Test runway.module.k8s.""" -# pyright: basic from __future__ import annotations import logging from subprocess import CalledProcessError -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING import pytest import yaml @@ -17,7 +16,6 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from pytest_subprocess import FakeProcess @@ -86,9 +84,9 @@ def test_destroy( ) def test_gen_cmd( self, - args_list: Optional[List[str]], + args_list: list[str] | None, command: KubectlCommandTypeDef, - expected: List[str], + expected: list[str], mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -107,7 +105,7 @@ def test_gen_cmd( def test_init( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, runway_context: MockRunwayContext, tmp_path: Path, ) -> None: @@ -122,17 +120,13 @@ def test_kbenv( ) -> None: """Test kbenv.""" mock_env_mgr = mocker.patch(f"{MODULE}.KBEnvManager", return_value="success") - overlay_path = mocker.patch( - f"{MODULE}.K8sOptions.overlay_path", tmp_path / "overlay" - ) - assert ( - K8s(runway_context, module_root=tmp_path).kbenv == mock_env_mgr.return_value - ) + overlay_path = mocker.patch(f"{MODULE}.K8sOptions.overlay_path", tmp_path / "overlay") + assert K8s(runway_context, module_root=tmp_path).kbenv == mock_env_mgr.return_value mock_env_mgr.assert_called_once_with(tmp_path, overlay_path=overlay_path) def test_kubectl_apply( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -161,9 +155,7 @@ def test_kubectl_apply_raise_called_process_error( ) -> None: """Test kubectl_apply raise CalledProcessError.""" mocker.patch.object(K8s, "gen_cmd") - mocker.patch( - f"{MODULE}.run_module_command", side_effect=CalledProcessError(1, "") - ) + mocker.patch(f"{MODULE}.run_module_command", side_effect=CalledProcessError(1, "")) with pytest.raises(CalledProcessError): K8s(runway_context, module_root=tmp_path).kubectl_apply() @@ -180,9 +172,7 @@ def test_kubectl_bin_option( self, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path ) -> None: """Test kubectl_bin.""" - obj = K8s( - runway_context, module_root=tmp_path, options={"kubectl_version": "1.22.0"} - ) + obj = K8s(runway_context, module_root=tmp_path, options={"kubectl_version": "1.22.0"}) mock_install = mocker.patch.object(obj.kbenv, "install", return_value="success") assert obj.kubectl_bin == mock_install.return_value mock_install.assert_called_once_with("1.22.0") @@ -193,9 +183,7 @@ def test_kubectl_bin_handle_version_not_specified( """Test kubectl_bin.""" which = mocker.patch(f"{MODULE}.which", return_value=True) obj = K8s(runway_context, module_root=tmp_path) - mocker.patch.object( - obj.kbenv, "install", side_effect=KubectlVersionNotSpecified - ) + mocker.patch.object(obj.kbenv, "install", side_effect=KubectlVersionNotSpecified) assert obj.kubectl_bin == "kubectl" which.assert_called_once_with("kubectl") @@ -205,16 +193,14 @@ def test_kubectl_bin_handle_version_not_specified_exit( """Test kubectl_bin.""" which = mocker.patch(f"{MODULE}.which", return_value=False) obj = K8s(runway_context, module_root=tmp_path) - mocker.patch.object( - obj.kbenv, "install", side_effect=KubectlVersionNotSpecified - ) + mocker.patch.object(obj.kbenv, "install", side_effect=KubectlVersionNotSpecified) with pytest.raises(SystemExit): assert obj.kubectl_bin which.assert_called_once_with("kubectl") def test_kubectl_delete( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -243,15 +229,13 @@ def test_kubectl_delete_raise_called_process_error( ) -> None: """Test kubectl_delete raise CalledProcessError.""" mocker.patch.object(K8s, "gen_cmd") - mocker.patch( - f"{MODULE}.run_module_command", side_effect=CalledProcessError(1, "") - ) + mocker.patch(f"{MODULE}.run_module_command", side_effect=CalledProcessError(1, "")) with pytest.raises(CalledProcessError): K8s(runway_context, module_root=tmp_path).kubectl_delete() def test_kubectl_kustomize( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, fake_process: FakeProcess, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -261,15 +245,9 @@ def test_kubectl_kustomize( caplog.set_level(logging.DEBUG, logger=MODULE) data = {"key": "val"} data_string = yaml.dump(data, indent=2) - gen_cmd = mocker.patch.object( - K8s, "gen_cmd", return_value=["kubectl", "kustomize"] - ) - fake_process.register_subprocess( - gen_cmd.return_value, stdout=data_string, returncode=0 - ) - assert ( - K8s(runway_context, module_root=tmp_path).kubectl_kustomize() == data_string - ) + gen_cmd = mocker.patch.object(K8s, "gen_cmd", return_value=["kubectl", "kustomize"]) + fake_process.register_subprocess(gen_cmd.return_value, stdout=data_string, returncode=0) + assert K8s(runway_context, module_root=tmp_path).kubectl_kustomize() == data_string assert fake_process.call_count(gen_cmd.return_value) == 1 logs = "\n".join(caplog.messages) assert f"kustomized yaml generated by kubectl:\n\n{data_string}" in logs @@ -282,9 +260,7 @@ def test_kubectl_kustomize_raise_called_process_error( tmp_path: Path, ) -> None: """Test kubectl_kustomize.""" - gen_cmd = mocker.patch.object( - K8s, "gen_cmd", return_value=["kubectl", "kustomize"] - ) + gen_cmd = mocker.patch.object(K8s, "gen_cmd", return_value=["kubectl", "kustomize"]) fake_process.register_subprocess(gen_cmd.return_value, returncode=1) with pytest.raises(CalledProcessError): assert K8s(runway_context, module_root=tmp_path).kubectl_kustomize() @@ -302,7 +278,7 @@ def test_skip(self, runway_context: MockRunwayContext, tmp_path: Path) -> None: @pytest.mark.parametrize("skip", [False, True]) def test_plan( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, skip: bool, @@ -311,9 +287,7 @@ def test_plan( """Test plan.""" caplog.set_level(logging.INFO, logger=MODULE) mocker.patch.object(K8s, "skip", skip) - kubectl_kustomize = mocker.patch.object( - K8s, "kubectl_kustomize", return_value="success" - ) + kubectl_kustomize = mocker.patch.object(K8s, "kubectl_kustomize", return_value="success") assert not K8s(runway_context, module_root=tmp_path).plan() if skip: kubectl_kustomize.assert_not_called() @@ -321,8 +295,7 @@ def test_plan( kubectl_kustomize.assert_called_once_with() logs = "\n".join(caplog.messages) assert ( - f"kustomized yaml generated by kubectl:\n\n{kubectl_kustomize.return_value}" - in logs + f"kustomized yaml generated by kubectl:\n\n{kubectl_kustomize.return_value}" in logs ) @@ -348,18 +321,13 @@ def test_gen_overlay_dirs(self) -> None: (["test2/kustomization.yaml"], "test"), ], ) - def test_get_overlay_dir( - self, expected: str, files: List[str], tmp_path: Path - ) -> None: + def test_get_overlay_dir(self, expected: str, files: list[str], tmp_path: Path) -> None: """Test get_overlay_dir.""" for f in files: tmp_file = tmp_path / f tmp_file.parent.mkdir(parents=True, exist_ok=True) tmp_file.touch() - assert ( - K8sOptions.get_overlay_dir(tmp_path, "test", "us-east-1") - == tmp_path / expected - ) + assert K8sOptions.get_overlay_dir(tmp_path, "test", "us-east-1") == tmp_path / expected def test_kustomize_config( self, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path @@ -367,9 +335,7 @@ def test_kustomize_config( """Test kustomize_config.""" overlay_path = tmp_path / "overlays" / "test" mocker.patch.object(K8sOptions, "overlay_path", overlay_path) - obj = K8sOptions.parse_obj( - deploy_environment=runway_context.env, obj={}, path=tmp_path - ) + obj = K8sOptions.parse_obj(deploy_environment=runway_context.env, obj={}, path=tmp_path) assert obj.kustomize_config == overlay_path / "kustomization.yaml" def test_overlay_path_found( @@ -380,9 +346,7 @@ def test_overlay_path_found( mock_get_overlay_dir = mocker.patch.object( K8sOptions, "get_overlay_dir", return_value=overlay_path ) - obj = K8sOptions.parse_obj( - deploy_environment=runway_context.env, obj={}, path=tmp_path - ) + obj = K8sOptions.parse_obj(deploy_environment=runway_context.env, obj={}, path=tmp_path) assert obj.overlay_path == overlay_path mock_get_overlay_dir.assert_called_once_with( path=tmp_path / "overlays", @@ -390,9 +354,7 @@ def test_overlay_path_found( region=runway_context.env.aws_region, ) - def test_overlay_path_provided( - self, runway_context: MockRunwayContext, tmp_path: Path - ) -> None: + def test_overlay_path_provided(self, runway_context: MockRunwayContext, tmp_path: Path) -> None: """Test overlay_path provided.""" overlay_path = tmp_path / "overlays" / "test" obj = K8sOptions.parse_obj( @@ -405,9 +367,7 @@ def test_overlay_path_provided( def test_parse_obj(self, runway_context: MockRunwayContext, tmp_path: Path) -> None: """Test parse_obj.""" config = {"kubectl_version": "0.13.0"} - obj = K8sOptions.parse_obj( - deploy_environment=runway_context.env, obj=config, path=tmp_path - ) + obj = K8sOptions.parse_obj(deploy_environment=runway_context.env, obj=config, path=tmp_path) assert isinstance(obj.data, RunwayK8sModuleOptionsDataModel) assert obj.data.kubectl_version == config["kubectl_version"] assert not obj.data.overlay_path diff --git a/tests/unit/module/test_serverless.py b/tests/unit/module/test_serverless.py index 6a3ab4670..0afc21f90 100644 --- a/tests/unit/module/test_serverless.py +++ b/tests/unit/module/test_serverless.py @@ -1,15 +1,15 @@ """Test runway.module.serverless.""" -# pyright: basic, reportFunctionMemberAccess=none +# pyright: reportFunctionMemberAccess=none from __future__ import annotations import logging from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast +from unittest.mock import ANY, MagicMock, Mock, call import pytest import yaml -from mock import ANY, MagicMock, Mock, call from pydantic import ValidationError from runway.config.models.runway.options.serverless import ( @@ -23,7 +23,6 @@ ) if TYPE_CHECKING: - from pytest import LogCaptureFixture from pytest_mock import MockerFixture from pytest_subprocess.fake_process import FakeProcess @@ -40,9 +39,7 @@ class TestServerless: def test___init__(self, runway_context: MockRunwayContext, tmp_path: Path) -> None: """Test __init__ and the attributes set in __init__.""" - obj = Serverless( - runway_context, module_root=tmp_path, options={"skip_npm_ci": True} - ) + obj = Serverless(runway_context, module_root=tmp_path, options={"skip_npm_ci": True}) assert isinstance(obj.options, ServerlessOptions) assert obj.region == runway_context.env.aws_region assert obj.stage == runway_context.env.name @@ -56,7 +53,7 @@ def test___init__(self, runway_context: MockRunwayContext, tmp_path: Path) -> No def test__deploy_package( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tempfile_temporary_directory: MagicMock, @@ -65,9 +62,7 @@ def test__deploy_package( """Test _deploy_package.""" caplog.set_level(logging.INFO, logger=MODULE) sls_deploy = mocker.patch.object(Serverless, "sls_deploy") - assert not Serverless( # pylint: disable=protected-access - runway_context, module_root=tmp_path - )._deploy_package() + assert not Serverless(runway_context, module_root=tmp_path)._deploy_package() tempfile_temporary_directory.assert_not_called() sls_deploy.assert_called_once_with() assert f"{tmp_path.name}:deploy (in progress)" in caplog.messages @@ -75,7 +70,7 @@ def test__deploy_package( def test__deploy_package_promotezip( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tempfile_temporary_directory: MagicMock, @@ -84,25 +79,17 @@ def test__deploy_package_promotezip( """Test _deploy_package.""" caplog.set_level(logging.INFO, logger=MODULE) artifact = Mock(package_path=tmp_path) - artifact_class = mocker.patch( - f"{MODULE}.ServerlessArtifact", return_value=artifact - ) + artifact_class = mocker.patch(f"{MODULE}.ServerlessArtifact", return_value=artifact) sls_deploy = mocker.patch.object(Serverless, "sls_deploy") - sls_package = mocker.patch.object( - Serverless, "sls_package", return_value=str(tmp_path) - ) - sls_print = mocker.patch.object( - Serverless, "sls_print", return_value="print output" - ) + sls_package = mocker.patch.object(Serverless, "sls_package", return_value=str(tmp_path)) + sls_print = mocker.patch.object(Serverless, "sls_print", return_value="print output") obj = Serverless( runway_context, module_root=tmp_path, options={"promotezip": {"bucketname": "test-bucket"}}, ) - assert not obj._deploy_package() # pylint: disable=protected-access - tempfile_temporary_directory.assert_called_once_with( - dir=runway_context.work_dir - ) + assert not obj._deploy_package() + tempfile_temporary_directory.assert_called_once_with(dir=runway_context.work_dir) sls_print.assert_called_once() artifact_class.assert_called_once_with( runway_context, @@ -284,13 +271,12 @@ def test_env_file(self, runway_context: MockRunwayContext, tmp_path: Path) -> No def test_extend_serverless_yml( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, ) -> None: """Test extend_serverless_yml.""" - # pylint: disable=no-member mock_merge = mocker.patch("runway.module.serverless.merge_dicts") caplog.set_level(logging.DEBUG, logger="runway") mock_func = MagicMock() @@ -312,14 +298,10 @@ def test_extend_serverless_yml( tmp_file = obj.options.update_args.call_args[0][1] # 'no way to check the prefix since it will be a uuid' assert tmp_file.endswith(".tmp.serverless.yml") - assert not ( - tmp_path / tmp_file - ).exists(), 'should always be deleted after calling "func"' + assert not (tmp_path / tmp_file).exists(), 'should always be deleted after calling "func"' caplog.clear() - mocker.patch( - "pathlib.Path.unlink", MagicMock(side_effect=OSError("test OSError")) - ) + mocker.patch("pathlib.Path.unlink", MagicMock(side_effect=OSError("test OSError"))) assert not obj.extend_serverless_yml(mock_func) assert ( f"{tmp_path.name}:encountered an error when trying to delete the " @@ -373,7 +355,7 @@ def test_gen_cmd( def test_init( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, runway_context: MockRunwayContext, tmp_path: Path, ) -> None: @@ -388,7 +370,7 @@ def test_init( def test_plan( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, runway_context: MockRunwayContext, tmp_path: Path, ) -> None: @@ -396,13 +378,11 @@ def test_plan( caplog.set_level(logging.INFO, logger="runway") obj = Serverless(runway_context, module_root=tmp_path) assert not obj.plan() - assert [ - f"{tmp_path.name}:plan not currently supported for Serverless" - ] == caplog.messages + assert [f"{tmp_path.name}:plan not currently supported for Serverless"] == caplog.messages def test_skip( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -410,7 +390,7 @@ def test_skip( """Test skip.""" caplog.set_level(logging.INFO, logger="runway") obj = Serverless(runway_context, module_root=tmp_path) - mocker.patch.object(obj, "package_json_missing", lambda: True) + mocker.patch.object(obj, "package_json_missing", return_value=True) mocker.patch.object(obj, "env_file", False) assert obj.skip @@ -420,7 +400,7 @@ def test_skip( ] == caplog.messages caplog.clear() - mocker.patch.object(obj, "package_json_missing", lambda: False) + mocker.patch.object(obj, "package_json_missing", return_value=False) assert obj.skip assert [ f"{tmp_path.name}:skipped; config file for this stage/region not found" @@ -445,7 +425,7 @@ def test_skip( def test_sls_deploy( self, mocker: MockerFixture, - package: Optional[str], + package: str | None, runway_context: MockRunwayContext, skip_install: bool, tmp_path: Path, @@ -475,7 +455,7 @@ def test_sls_deploy( def test_sls_package( self, mocker: MockerFixture, - output_path: Optional[AnyPathConstrained], + output_path: AnyPathConstrained | None, runway_context: MockRunwayContext, skip_install: bool, tmp_path: Path, @@ -494,9 +474,7 @@ def test_sls_package( else: npm_install.assert_called_once_with() if output_path: - gen_cmd.assert_called_once_with( - "package", args_list=["--package", str(output_path)] - ) + gen_cmd.assert_called_once_with("package", args_list=["--package", str(output_path)]) else: gen_cmd.assert_called_once_with("package", args_list=[]) run_module_command.assert_called_once_with( @@ -508,7 +486,7 @@ def test_sls_package( ) def test_sls_print( self, - item_path: Optional[str], + item_path: str | None, mocker: MockerFixture, runway_context: MockRunwayContext, skip_install: bool, @@ -517,9 +495,7 @@ def test_sls_print( """Test sls_print.""" expected_dict = {"status": "success"} mock_check_output = MagicMock(return_value=yaml.safe_dump(expected_dict)) - gen_cmd = mocker.patch.object( - Serverless, "gen_cmd", MagicMock(return_value=["print"]) - ) + gen_cmd = mocker.patch.object(Serverless, "gen_cmd", MagicMock(return_value=["print"])) npm_install = mocker.patch.object(Serverless, "npm_install", MagicMock()) mocker.patch("subprocess.check_output", mock_check_output) assert ( @@ -553,9 +529,7 @@ def test_sls_remove( ) -> None: """Test sls_remove.""" fake_process.register_subprocess("remove", stdout="success") - gen_cmd = mocker.patch.object( - Serverless, "gen_cmd", MagicMock(return_value=["remove"]) - ) + gen_cmd = mocker.patch.object(Serverless, "gen_cmd", MagicMock(return_value=["remove"])) npm_install = mocker.patch.object(Serverless, "npm_install", MagicMock()) assert not Serverless(runway_context, module_root=tmp_path).sls_remove( skip_install=skip_install @@ -648,14 +622,12 @@ def test_source_hash( self, mocker: MockerFixture, runway_context: MockRunwayContext, - service: Union[Dict[str, Any], str], + service: dict[str, Any] | str, service_name: str, tmp_path: Path, ) -> None: """Test source_hash.""" - get_hash_of_files = mocker.patch( - f"{MODULE}.get_hash_of_files", Mock(return_value="hash") - ) + get_hash_of_files = mocker.patch(f"{MODULE}.get_hash_of_files", Mock(return_value="hash")) assert ServerlessArtifact( runway_context, { @@ -680,7 +652,7 @@ def test_source_hash_individually( self, mocker: MockerFixture, runway_context: MockRunwayContext, - service: Union[Dict[str, Any], str], + service: dict[str, Any] | str, tmp_path: Path, ) -> None: """Test source_hash.""" @@ -711,9 +683,7 @@ def test_sync_with_s3_download( self, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path ) -> None: """Test sync_with_s3.""" - does_s3_object_exist = mocker.patch( - f"{MODULE}.does_s3_object_exist", return_value=True - ) + does_s3_object_exist = mocker.patch(f"{MODULE}.does_s3_object_exist", return_value=True) download = mocker.patch(f"{MODULE}.download") session = Mock() package_path = tmp_path / "package" @@ -744,9 +714,7 @@ def test_sync_with_s3_upload( self, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path ) -> None: """Test sync_with_s3.""" - does_s3_object_exist = mocker.patch( - f"{MODULE}.does_s3_object_exist", return_value=False - ) + does_s3_object_exist = mocker.patch(f"{MODULE}.does_s3_object_exist", return_value=False) download = mocker.patch(f"{MODULE}.download") session = Mock() package_path = tmp_path / "package" @@ -779,9 +747,7 @@ def test_sync_with_s3_upload_not_exist( self, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path ) -> None: """Test sync_with_s3.""" - does_s3_object_exist = mocker.patch( - f"{MODULE}.does_s3_object_exist", return_value=False - ) + does_s3_object_exist = mocker.patch(f"{MODULE}.does_s3_object_exist", return_value=False) download = mocker.patch(f"{MODULE}.download") session = Mock() package_path = tmp_path / "package" @@ -819,7 +785,7 @@ class TestServerlessOptions: (["-u"], ["-u"]), ], ) - def test_args(self, args: List[str], expected: List[str]) -> None: + def test_args(self, args: list[str], expected: list[str]) -> None: """Test args.""" obj = ServerlessOptions.parse_obj({"args": args}) assert obj.args == expected @@ -862,21 +828,21 @@ def test_args(self, args: List[str], expected: List[str]) -> None: ), ], ) - def test_parse(self, config: Dict[str, Any]) -> None: + def test_parse(self, config: dict[str, Any]) -> None: """Test parse.""" obj = ServerlessOptions.parse_obj(config) assert obj.args == config.get("args", []) assert obj.extend_serverless_yml == config.get( - "extend_serverless_yml", cast(Dict[str, Any], {}) + "extend_serverless_yml", cast(dict[str, Any], {}) ) if config.get("promotezip"): assert obj.promotezip else: assert not obj.promotezip - assert obj.promotezip.bucketname == config.get( - "promotezip", cast(Dict[str, Any], {}) - ).get("bucketname") + assert obj.promotezip.bucketname == config.get("promotezip", cast(dict[str, Any], {})).get( + "bucketname" + ) assert obj.skip_npm_ci == config.get("skip_npm_ci", False) def test_parse_invalid_promotezip(self) -> None: diff --git a/tests/unit/module/test_terraform.py b/tests/unit/module/test_terraform.py index 517149db9..60dc18c0f 100644 --- a/tests/unit/module/test_terraform.py +++ b/tests/unit/module/test_terraform.py @@ -1,16 +1,15 @@ """Test runway.module.terraform.""" -# pylint: disable=too-many-statements,too-many-lines -# pyright: basic, reportFunctionMemberAccess=none +# pyright: reportFunctionMemberAccess=none from __future__ import annotations import json import logging import subprocess -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, Union +from typing import TYPE_CHECKING, Any +from unittest.mock import MagicMock, Mock import pytest -from mock import MagicMock, Mock from runway._logging import LogLevels from runway.module.terraform import ( @@ -25,7 +24,6 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture, MonkeyPatch from pytest_mock import MockerFixture from ..factories import MockRunwayContext @@ -60,7 +58,7 @@ def test_update_env_vars_with_tf_var_values() -> None: assert result == expected -class TestTerraform: # pylint: disable=too-many-public-methods +class TestTerraform: """Test runway.module.terraform.Terraform.""" def test___init__(self, runway_context: MockRunwayContext, tmp_path: Path) -> None: @@ -90,7 +88,7 @@ def test___init___options_workspace( def test_auto_tfvars( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -122,7 +120,7 @@ def test_auto_tfvars( def test_auto_tfvars_unsupported_version( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -144,7 +142,7 @@ def test_auto_tfvars_unsupported_version( def test_cleanup_dot_terraform( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, runway_context: MockRunwayContext, tmp_path: Path, ) -> None: @@ -180,10 +178,7 @@ def test_current_workspace( mock_terraform_workspace_show = mocker.patch.object( Terraform, "terraform_workspace_show", return_value="default" ) - assert ( - Terraform(runway_context, module_root=tmp_path).current_workspace - == "default" - ) + assert Terraform(runway_context, module_root=tmp_path).current_workspace == "default" mock_terraform_workspace_show.assert_called_once_with() @pytest.mark.parametrize( @@ -197,8 +192,8 @@ def test_current_workspace( ) def test_env_file( self, - filename: Union[List[str], str], - expected: Optional[str], + filename: list[str] | str, + expected: str | None, runway_context: MockRunwayContext, tmp_path: Path, ) -> None: @@ -216,10 +211,10 @@ def test_env_file( assert not obj.env_file @pytest.mark.parametrize("action", ["deploy", "destroy", "init", "plan"]) - def test_execute( + def test_execute( # noqa: PLR0915 self, action: str, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -232,9 +227,7 @@ def test_execute( mocker.patch.object(Terraform, "handle_parameters", MagicMock()) mocker.patch.object(Terraform, "terraform_init", MagicMock()) mocker.patch.object(Terraform, "current_workspace", "test") - mocker.patch.object( - Terraform, "terraform_workspace_list", MagicMock(return_value="* test") - ) + mocker.patch.object(Terraform, "terraform_workspace_list", MagicMock(return_value="* test")) mocker.patch.object(Terraform, "terraform_workspace_select", MagicMock()) mocker.patch.object(Terraform, "terraform_workspace_new", MagicMock()) mocker.patch.object(Terraform, "terraform_get", MagicMock()) @@ -248,8 +241,6 @@ def test_execute( ) command = "apply" if action == "deploy" else action - # pylint: disable=no-member - # module is skipped obj = Terraform(runway_context, module_root=tmp_path) assert not obj[action]() obj.handle_backend.assert_called_once_with() @@ -291,9 +282,7 @@ def test_execute( assert "re-running init after workspace change..." in logs # module is run; create workspace - mocker.patch.object( - Terraform, "terraform_workspace_list", MagicMock(return_value="") - ) + mocker.patch.object(Terraform, "terraform_workspace_list", MagicMock(return_value="")) assert not obj[action]() obj.terraform_workspace_new.assert_called_once_with("test") @@ -311,9 +300,9 @@ def test_execute( ) def test_gen_command( self, - command: Union[List[str], str], - args_list: Optional[List[str]], - expected: List[str], + command: list[str] | str, + args_list: list[str] | None, + expected: list[str], mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -332,7 +321,7 @@ def test_gen_command( def test_handle_backend_no_handler( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -340,7 +329,7 @@ def test_handle_backend_no_handler( """Test handle_backend with no handler.""" caplog.set_level(LogLevels.DEBUG, logger=MODULE) mock_get_full_configuration = MagicMock(return_value={}) - backend: Dict[str, Union[Dict[str, Any], str]] = { + backend: dict[str, dict[str, Any] | str] = { "type": "unsupported", "config": {}, } @@ -360,7 +349,7 @@ def test_handle_backend_no_handler( def test_handle_backend_no_type( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -374,8 +363,8 @@ def test_handle_backend_no_type( def test_handle_backend_remote_name( self, - caplog: LogCaptureFixture, - monkeypatch: MonkeyPatch, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, runway_context: MockRunwayContext, tmp_path: Path, ) -> None: @@ -401,8 +390,8 @@ def test_handle_backend_remote_name( def test_handle_backend_remote_prefix( self, - caplog: LogCaptureFixture, - monkeypatch: MonkeyPatch, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, runway_context: MockRunwayContext, tmp_path: Path, ) -> None: @@ -429,8 +418,8 @@ def test_handle_backend_remote_prefix( def test_handle_backend_remote_undetermined( self, - caplog: LogCaptureFixture, - monkeypatch: MonkeyPatch, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, runway_context: MockRunwayContext, tmp_path: Path, ) -> None: @@ -438,7 +427,7 @@ def test_handle_backend_remote_undetermined( caplog.set_level(LogLevels.WARNING, logger=MODULE) monkeypatch.delenv("TF_WORKSPACE", raising=False) mock_get_full_configuration = MagicMock(return_value={}) - backend: Dict[str, Union[Dict[str, Any], str]] = { + backend: dict[str, dict[str, Any] | str] = { "type": "remote", "config": {}, } @@ -453,9 +442,7 @@ def test_handle_backend_remote_undetermined( assert not obj.handle_backend() mock_get_full_configuration.assert_called_once_with() - assert '"workspaces" not defined in backend config' in "\n".join( - caplog.messages - ) + assert '"workspaces" not defined in backend config' in "\n".join(caplog.messages) def test_handle_parameters( self, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path @@ -536,7 +523,7 @@ def test_tf_bin_global( def test_tf_bin_missing( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, mocker: MockerFixture, runway_context: MockRunwayContext, tmp_path: Path, @@ -552,9 +539,8 @@ def test_tf_bin_missing( assert obj.tf_bin assert excinfo.value.code == 1 mock_which.assert_called_once_with("terraform") - assert ( - "terraform not available and a version to install not specified" - in "\n".join(caplog.messages) + assert "terraform not available and a version to install not specified" in "\n".join( + caplog.messages ) def test_tf_bin_options( @@ -606,7 +592,7 @@ def test_terraform_apply( ) def test_terraform_destroy( self, - expected_options: List[str], + expected_options: list[str], expected_subcmd: str, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -618,9 +604,7 @@ def test_terraform_destroy( Terraform, "gen_command", return_value=["mock_gen_command"] ) mocker.patch.object(Terraform, "version", version) - mock_run_command = mocker.patch( - f"{MODULE}.run_module_command", return_value=None - ) + mock_run_command = mocker.patch(f"{MODULE}.run_module_command", return_value=None) obj = Terraform(runway_context, module_root=tmp_path) mocker.patch.object(obj, "env_file", ["env_file"]) @@ -658,7 +642,7 @@ def test_terraform_init( Terraform, "gen_command", return_value=["mock_gen_command"] ) mock_run_command = mocker.patch(f"{MODULE}.run_module_command") - options: Dict[str, Union[Dict[str, Any], str]] = { + options: dict[str, dict[str, Any] | str] = { "args": {"init": ["init_arg"]}, "terraform_backend_config": {"bucket": "name"}, } @@ -773,9 +757,7 @@ def test_terraform_workspace_show( ) mock_subprocess = mocker.patch(f"{MODULE}.subprocess") check_output_result = MagicMock( - strip=MagicMock( - return_value=MagicMock(decode=MagicMock(return_value="decoded")) - ) + strip=MagicMock(return_value=MagicMock(decode=MagicMock(return_value="decoded"))) ) mock_subprocess.check_output.return_value = check_output_result @@ -835,9 +817,7 @@ def test_version_raise_value_error( tfenv.get_version_from_executable.return_value = None mocker.patch.object(Terraform, "tfenv", tfenv) mocker.patch.object(Terraform, "tf_bin", "/bin/terraform") - with pytest.raises( - ValueError, match="unable to retrieve version from /bin/terraform" - ): + with pytest.raises(ValueError, match="unable to retrieve version from /bin/terraform"): assert Terraform(runway_context, module_root=tmp_path).version @@ -930,7 +910,7 @@ def test_backend_config( ], ) def test_parse_obj( - self, config: Dict[str, Any], runway_context: MockRunwayContext, tmp_path: Path + self, config: dict[str, Any], runway_context: MockRunwayContext, tmp_path: Path ) -> None: """Test parse_obj.""" obj = TerraformOptions.parse_obj( @@ -992,25 +972,17 @@ def test_get_full_configuration( }, ["bucket=test-bucket", "dynamodb_table=test-table", "region=us-east-1"], ), - ( - { - "bucket": "test-bucket", - "dynamodb_table": "test-table", - "region": "us-east-1", - }, - ["bucket=test-bucket", "dynamodb_table=test-table", "region=us-east-1"], - ), ], ) def test_init_args( self, - expected_items: List[str], - input_data: Dict[str, str], + expected_items: list[str], + input_data: dict[str, str], runway_context: MockRunwayContext, tmp_path: Path, ) -> None: """Test init_args.""" - expected: List[str] = [] + expected: list[str] = [] for i in expected_items: expected.extend(["-backend-config", i]) assert ( @@ -1022,7 +994,7 @@ def test_init_args( def test_init_args_file( self, - caplog: LogCaptureFixture, + caplog: pytest.LogCaptureFixture, runway_context: MockRunwayContext, tmp_path: Path, ) -> None: @@ -1049,10 +1021,7 @@ def test_gen_backend_filenames(self) -> None: "backend.tfvars", ] - assert ( - TerraformBackendConfig.gen_backend_filenames("test", "us-east-1") - == expected - ) + assert TerraformBackendConfig.gen_backend_filenames("test", "us-east-1") == expected @pytest.mark.parametrize( "filename, expected", @@ -1069,7 +1038,7 @@ def test_gen_backend_filenames(self) -> None: ], ) def test_get_backend_file( - self, tmp_path: Path, filename: Union[List[str], str], expected: Optional[str] + self, tmp_path: Path, filename: list[str] | str, expected: str | None ) -> None: """Test get_backend_file.""" if isinstance(filename, list): @@ -1095,7 +1064,7 @@ def test_get_backend_file( ) def test_parse_obj( self, - config: Dict[str, str], + config: dict[str, str], expected_region: str, mocker: MockerFixture, runway_context: MockRunwayContext, @@ -1104,11 +1073,11 @@ def test_parse_obj( """Test parse_obj.""" def assert_get_backend_file_args( - _cls: Type[TerraformBackendConfig], + _cls: type[TerraformBackendConfig], path: Path, env_name: str, env_region: str, - ): + ) -> str: """Assert args passed to the method during parse.""" assert path == tmp_path assert env_name == "test" diff --git a/tests/unit/module/test_utils.py b/tests/unit/module/test_utils.py index 74972149e..91b7cbaaf 100644 --- a/tests/unit/module/test_utils.py +++ b/tests/unit/module/test_utils.py @@ -1,12 +1,9 @@ """Test runway.module.utils.""" -# pylint: disable=unused-argument -# pyright: basic from __future__ import annotations -from pathlib import Path from subprocess import CalledProcessError -from typing import TYPE_CHECKING, Any, List +from typing import TYPE_CHECKING, Any import pytest @@ -20,6 +17,8 @@ ) if TYPE_CHECKING: + from pathlib import Path + from pytest_mock import MockerFixture from pytest_subprocess import FakeProcess @@ -36,7 +35,7 @@ ], ) def test_format_npm_command_for_logging_darwin( - command: List[str], expected: str, platform_darwin: None + command: list[str], expected: str, platform_darwin: None # noqa: ARG001 ) -> None: """Test format_npm_command_for_logging on Darwin/macOS.""" assert format_npm_command_for_logging(command) == expected @@ -52,7 +51,7 @@ def test_format_npm_command_for_logging_darwin( ], ) def test_format_npm_command_for_logging_windows( - command: List[str], expected: str, platform_windows: None + command: list[str], expected: str, platform_windows: None # noqa: ARG001 ) -> None: """Test format_npm_command_for_logging on windows.""" assert format_npm_command_for_logging(command) == expected @@ -62,7 +61,7 @@ def test_format_npm_command_for_logging_windows( "command, opts", [("test", []), ("test", ["arg1"]), ("test", ["arg1", "arg2"])] ) def test_generate_node_command( - command: str, mocker: MockerFixture, opts: List[str], tmp_path: Path + command: str, mocker: MockerFixture, opts: list[str], tmp_path: Path ) -> None: """Test generate_node_command.""" mock_which = mocker.patch(f"{MODULE}.which", return_value=False) @@ -83,9 +82,9 @@ def test_generate_node_command( ) def test_generate_node_command_npx( command: str, - expected: List[str], + expected: list[str], mocker: MockerFixture, - opts: List[str], + opts: list[str], tmp_path: Path, ) -> None: """Test generate_node_command.""" @@ -94,9 +93,7 @@ def test_generate_node_command_npx( mock_which.assert_called_once_with(NPX_BIN) -def test_generate_node_command_npx_package( - mocker: MockerFixture, tmp_path: Path -) -> None: +def test_generate_node_command_npx_package(mocker: MockerFixture, tmp_path: Path) -> None: """Test generate_node_command.""" mock_which = mocker.patch(f"{MODULE}.which", return_value=True) assert generate_node_command( @@ -170,7 +167,7 @@ def test_use_npm_ci( (tmp_path / "package-lock.json").touch() if has_shrinkwrap: (tmp_path / "package-lock.json").touch() - cmd: List[Any] = [NPM_BIN, "ci", "-h"] + cmd: list[Any] = [NPM_BIN, "ci", "-h"] fake_process.register_subprocess(cmd, returncode=exit_code) assert use_npm_ci(tmp_path) is expected diff --git a/tests/unit/sources/test_git.py b/tests/unit/sources/test_git.py index 26adc0917..ae96aafbc 100644 --- a/tests/unit/sources/test_git.py +++ b/tests/unit/sources/test_git.py @@ -1,6 +1,5 @@ """Tests for the Source type object.""" -# pyright: basic from __future__ import annotations import logging diff --git a/tests/unit/sources/test_source.py b/tests/unit/sources/test_source.py index 0bfc9cce8..46b20504d 100644 --- a/tests/unit/sources/test_source.py +++ b/tests/unit/sources/test_source.py @@ -1,6 +1,5 @@ """Tests for the Source type object.""" -# pyright: basic from __future__ import annotations import logging diff --git a/tests/unit/test__json_encoder.py b/tests/unit/test__json_encoder.py new file mode 100644 index 000000000..dd479f578 --- /dev/null +++ b/tests/unit/test__json_encoder.py @@ -0,0 +1,40 @@ +"""Test runway.utils._json_encoder.""" + +from __future__ import annotations + +import datetime +from decimal import Decimal +from pathlib import Path +from typing import Any + +import pytest +from packaging.specifiers import SpecifierSet + +from runway.config.models.runway import RunwayAssumeRoleDefinitionModel +from runway.utils import JsonEncoder + + +class TestJsonEncoder: + """Test JsonEncoder.""" + + @pytest.mark.parametrize( + "provided, expected", + [ + (("foo", "bar"), list), + (Decimal("1.1"), float), + (Path.cwd() / ".runway", str), + (RunwayAssumeRoleDefinitionModel(), dict), + (SpecifierSet("==1.0"), str), + (datetime.datetime.now(), str), + ({"foo"}, list), + ], + ) + def test_supported_types(self, provided: Any, expected: type) -> None: + """Test encoding of supported data types.""" + assert isinstance(JsonEncoder().default(provided), expected) + + @pytest.mark.parametrize("provided", [(None)]) + def test_unsupported_types(self, provided: Any) -> None: + """Test encoding of unsupported data types.""" + with pytest.raises(TypeError): + assert not JsonEncoder().default(provided) diff --git a/tests/unit/test_compat.py b/tests/unit/test_compat.py index 8edd79d48..b9c3cd5c0 100644 --- a/tests/unit/test_compat.py +++ b/tests/unit/test_compat.py @@ -15,9 +15,7 @@ MODULE = "runway.compat" -py37 = pytest.mark.skipif( - sys.version_info >= (3, 8), reason="requires python3.8 or higher" -) +py37 = pytest.mark.skipif(sys.version_info >= (3, 8), reason="requires python3.8 or higher") @py37 diff --git a/tests/unit/test_mixins.py b/tests/unit/test_mixins.py index 5703c3ed1..1ea59e3ea 100644 --- a/tests/unit/test_mixins.py +++ b/tests/unit/test_mixins.py @@ -1,13 +1,12 @@ """Test runway.mixins.""" -# pylint: disable=protected-access,unused-argument from __future__ import annotations import subprocess -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any +from unittest.mock import Mock import pytest -from mock import Mock from runway.compat import cached_property from runway.mixins import CliInterfaceMixin, DelCachedPropMixin @@ -35,17 +34,13 @@ def __init__(self, context: CfnginContext, cwd: Path) -> None: @pytest.mark.parametrize("env", [None, {"foo": "bar"}]) def test__run_command( - self, env: Optional[Dict[str, str]], mocker: MockerFixture, tmp_path: Path + self, env: dict[str, str] | None, mocker: MockerFixture, tmp_path: Path ) -> None: """Test _run_command.""" ctx_env = {"foo": "bar", "bar": "foo"} - mock_subprocess = mocker.patch( - f"{MODULE}.subprocess.check_output", return_value="success" - ) + mock_subprocess = mocker.patch(f"{MODULE}.subprocess.check_output", return_value="success") assert ( - self.Kls(Mock(env=Mock(vars=ctx_env)), tmp_path)._run_command( - "test", env=env - ) + self.Kls(Mock(env=Mock(vars=ctx_env)), tmp_path)._run_command("test", env=env) == mock_subprocess.return_value ) mock_subprocess.assert_called_once_with( @@ -57,17 +52,11 @@ def test__run_command( text=True, ) - def test__run_command_no_suppress_output( - self, mocker: MockerFixture, tmp_path: Path - ) -> None: + def test__run_command_no_suppress_output(self, mocker: MockerFixture, tmp_path: Path) -> None: """Test _run_command.""" env = {"foo": "bar"} - mock_list2cmdline = mocker.patch.object( - self.Kls, "list2cmdline", return_value="success" - ) - mock_subprocess = mocker.patch( - f"{MODULE}.subprocess.check_call", return_value=0 - ) + mock_list2cmdline = mocker.patch.object(self.Kls, "list2cmdline", return_value="success") + mock_subprocess = mocker.patch(f"{MODULE}.subprocess.check_call", return_value=0) assert not self.Kls(Mock(env=Mock(vars=env)), tmp_path)._run_command( ["foo", "bar"], suppress_output=False ) @@ -87,9 +76,7 @@ def test__run_command_no_suppress_output( ("--", "foo-bar", "--foo-bar"), ], ) - def test_convert_to_cli_arg( - self, expected: str, prefix: Optional[str], provided: str - ) -> None: + def test_convert_to_cli_arg(self, expected: str, prefix: str | None, provided: str) -> None: """Test convert_to_cli_arg.""" if prefix: assert self.Kls.convert_to_cli_arg(provided, prefix=prefix) == expected @@ -117,9 +104,9 @@ def test_found_in_path(self, mocker: MockerFixture, return_value: bool) -> None: ) def test_generate_command( self, - expected: List[str], + expected: list[str], mocker: MockerFixture, - provided: Dict[str, Any], + provided: dict[str, Any], ) -> None: """Test generate_command.""" exe = mocker.patch.object(self.Kls, "EXECUTABLE", "test.exe", create=True) @@ -130,7 +117,7 @@ def test_generate_command( ] def test_list2cmdline_darwin( - self, mocker: MockerFixture, platform_darwin: None + self, mocker: MockerFixture, platform_darwin: None # noqa: ARG002 ) -> None: """Test list2cmdline on Darwin/macOS systems.""" mock_list2cmdline = mocker.patch(f"{MODULE}.subprocess.list2cmdline") @@ -140,7 +127,7 @@ def test_list2cmdline_darwin( mock_join.assert_called_once_with("foo") def test_list2cmdline_linus( - self, mocker: MockerFixture, platform_linux: None + self, mocker: MockerFixture, platform_linux: None # noqa: ARG002 ) -> None: """Test list2cmdline on Linux systems.""" mock_list2cmdline = mocker.patch(f"{MODULE}.subprocess.list2cmdline") @@ -150,7 +137,7 @@ def test_list2cmdline_linus( mock_join.assert_called_once_with("foo") def test_list2cmdline_windows( - self, mocker: MockerFixture, platform_windows: None + self, mocker: MockerFixture, platform_windows: None # noqa: ARG002 ) -> None: """Test list2cmdline on Windows systems.""" mock_list2cmdline = mocker.patch( diff --git a/tests/unit/test_variables.py b/tests/unit/test_variables.py index 2d9e0f86f..695dc727e 100644 --- a/tests/unit/test_variables.py +++ b/tests/unit/test_variables.py @@ -1,17 +1,13 @@ """Tests for runway.variables.""" -# pylint: disable=expression-not-assigned,protected-access,unused-argument -# pylint: disable=too-many-lines -# pyright: basic from __future__ import annotations -from typing import TYPE_CHECKING, Any, ClassVar, List, Union +from typing import TYPE_CHECKING, Any, ClassVar +from unittest.mock import MagicMock, call import pytest -from mock import MagicMock, call from pydantic import BaseModel -from runway.context import CfnginContext, RunwayContext from runway.exceptions import ( FailedLookup, FailedVariableLookup, @@ -39,20 +35,24 @@ if TYPE_CHECKING: from pytest_mock import MockerFixture - from .factories import MockCFNginContext + from .factories import MockCfnginContext + + +class ExampleModel(BaseModel): + """Example model used for testing.""" + + test: Any = "val" class MockLookupHandler(LookupHandler): """Mock lookup handler.""" return_value: ClassVar[Any] = "resolved" - side_effect: ClassVar[Union[Any, List[Any]]] = None + side_effect: ClassVar[Any | list[Any]] = None @classmethod - def handle( # pylint: disable=arguments-differ + def handle( cls, - value: str, - context: Union[CfnginContext, RunwayContext], *__args: Any, **__kwargs: Any, ) -> Any: @@ -64,24 +64,21 @@ def handle( # pylint: disable=arguments-differ return cls._handle_side_effect(cls.side_effect) @classmethod - def _handle_side_effect(cls, side_effect: Any): + def _handle_side_effect(cls, side_effect: Any) -> Any: """Handle side_effect.""" if isinstance(side_effect, BaseException): raise side_effect return side_effect -@pytest.fixture(autouse=True, scope="function") +@pytest.fixture(autouse=True) def patch_lookups(mocker: MockerFixture) -> None: """Patch registered lookups.""" for registry in [CFNGIN_LOOKUP_HANDLERS, RUNWAY_LOOKUP_HANDLERS]: - # mocked = {k: MockLookupHandler for k in registry} - # mocked["test"] = MockLookupHandler - # mocker.patch.dict(registry, mocked) mocker.patch.dict(registry, {"test": MockLookupHandler}) -def test_resolve_variables(cfngin_context: MockCFNginContext) -> None: +def test_resolve_variables(cfngin_context: MockCfnginContext) -> None: """Test resolve_variables.""" variable = MagicMock() assert not resolve_variables([variable], cfngin_context) @@ -99,7 +96,7 @@ def test_dependencies(self, mocker: MockerFixture) -> None: ) assert Variable("Param", "val").dependencies == {"test"} - def test_get(self, mocker: MockerFixture) -> None: + def test_get(self) -> None: """Test get.""" obj = Variable("Para", {"key": "val"}) assert obj.get("missing") is None @@ -117,9 +114,7 @@ def test_init(self, variable_type: VariableTypeLiteralTypeDef) -> None: def test_multiple_lookup_dict(self, mocker: MockerFixture) -> None: """Test multiple lookup dict.""" - mocker.patch.object( - MockLookupHandler, "side_effect", ["resolved0", "resolved1"] - ) + mocker.patch.object(MockLookupHandler, "side_effect", ["resolved0", "resolved1"]) value = { "something": "${test query0}", "other": "${test query1}", @@ -131,9 +126,7 @@ def test_multiple_lookup_dict(self, mocker: MockerFixture) -> None: def test_multiple_lookup_list(self, mocker: MockerFixture) -> None: """Test multiple lookup list.""" - mocker.patch.object( - MockLookupHandler, "side_effect", ["resolved0", "resolved1"] - ) + mocker.patch.object(MockLookupHandler, "side_effect", ["resolved0", "resolved1"]) value = [ "something", "${test query0}", @@ -170,9 +163,7 @@ def test_multiple_lookup_string(self, mocker: MockerFixture) -> None: """Test multiple lookup string.""" var = Variable("Param1", "url://${test query0}@${test query1}") assert isinstance(var._value, VariableValueConcatenation) - mocker.patch.object( - MockLookupHandler, "side_effect", ["resolved0", "resolved1"] - ) + mocker.patch.object(MockLookupHandler, "side_effect", ["resolved0", "resolved1"]) var.resolve(MagicMock(), MagicMock()) assert var.resolved is True assert var.value == "url://resolved0@resolved1" @@ -214,9 +205,7 @@ def test_no_lookup_str(self) -> None: @pytest.mark.parametrize("resolved", [False, True]) def test_resolved(self, mocker: MockerFixture, resolved: bool) -> None: """Test resolved.""" - mocker.patch.object( - VariableValue, "parse_obj", return_value=MagicMock(resolved=resolved) - ) + mocker.patch.object(VariableValue, "parse_obj", return_value=MagicMock(resolved=resolved)) assert Variable("Param", "val").resolved is resolved def test_resolve_failed(self, mocker: MockerFixture) -> None: @@ -231,7 +220,7 @@ def test_resolve_failed(self, mocker: MockerFixture) -> None: assert excinfo.value.cause == lookup_error assert excinfo.value.variable == obj - def test_repr(self) -> None: + def test___repr__(self) -> None: """Test __repr__.""" assert repr(Variable("Param", "val")) == "Variable[Param=val]" @@ -254,31 +243,29 @@ def test_simple_lookup(self) -> None: assert var.resolved is True assert var.value == "resolved" - def test_value_unresolved(self, mocker: MockerFixture): + def test_value_unresolved(self, mocker: MockerFixture) -> None: """Test value UnresolvedVariable.""" - mocker.patch.object( - VariableValue, "parse_obj", return_value=MagicMock(value="value") - ) + mocker.patch.object(VariableValue, "parse_obj", return_value=MagicMock(value="value")) def test_value(self) -> None: """Test value.""" with pytest.raises(UnresolvedVariable): - Variable("Param", "${test query}").value + Variable("Param", "${test query}").value # noqa: B018 class TestVariableValue: """Test runway.variables.VariableValue.""" + def test___iter__(self) -> None: + """Test __iter__.""" + with pytest.raises(NotImplementedError): + iter(VariableValue()) + def test_dependencies(self) -> None: """Test dependencies.""" obj = VariableValue() assert obj.dependencies == set() - def test_iter(self) -> None: - """Test __iter__.""" - with pytest.raises(NotImplementedError): - iter(VariableValue()) - def test_parse_obj_dict_empty(self) -> None: """Test parse_obj dict empty.""" assert isinstance(VariableValue.parse_obj({}), VariableValueDict) @@ -308,9 +295,7 @@ def test_parse_obj_literal_str(self) -> None: def test_parse_obj_pydantic_model(self) -> None: """Test parse_obj pydantic model.""" - assert isinstance( - VariableValue.parse_obj(BaseModel()), VariableValuePydanticModel - ) + assert isinstance(VariableValue.parse_obj(ExampleModel()), VariableValuePydanticModel) def test_repr(self) -> None: """Test __repr__.""" @@ -320,9 +305,9 @@ def test_repr(self) -> None: def test_resolved(self) -> None: """Test resolved.""" with pytest.raises(NotImplementedError): - VariableValue().resolved # pylint: disable=expression-not-assigned + VariableValue().resolved # noqa: B018 - def test_resolve(self, cfngin_context: MockCFNginContext) -> None: + def test_resolve(self, cfngin_context: MockCfnginContext) -> None: """Test resolve.""" assert not VariableValue().resolve(context=cfngin_context) @@ -334,7 +319,7 @@ def test_simplified(self) -> None: def test_value(self) -> None: """Test value.""" with pytest.raises(NotImplementedError): - VariableValue().value # pylint: disable=expression-not-assigned + VariableValue().value # noqa: B018 class TestVariableValueConcatenation: @@ -392,14 +377,10 @@ def test_resolved(self) -> None: is False ) - def test_resolve( - self, cfngin_context: MockCFNginContext, mocker: MockerFixture - ) -> None: + def test_resolve(self, cfngin_context: MockCfnginContext, mocker: MockerFixture) -> None: """Test resolve.""" mock_provider = MagicMock() - mock_resolve = mocker.patch.object( - VariableValueLiteral, "resolve", return_value=None - ) + mock_resolve = mocker.patch.object(VariableValueLiteral, "resolve", return_value=None) obj = VariableValueConcatenation([VariableValueLiteral("val0")]) assert not obj.resolve( cfngin_context, @@ -440,42 +421,25 @@ def test_simplified_list(self) -> None: """Test simplified list.""" assert [ i.value - for i in VariableValueConcatenation( - [VariableValueList(["foo", "bar"])] - ).simplified + for i in VariableValueConcatenation([VariableValueList(["foo", "bar"])]).simplified ] == ["foo", "bar"] def test_simplified_literal_bool(self) -> None: """Test simplified literal bool.""" - assert ( - VariableValueConcatenation([VariableValueLiteral(True)]).simplified.value - is True - ) - assert ( - VariableValueConcatenation([VariableValueLiteral(False)]).simplified.value - is False - ) + assert VariableValueConcatenation([VariableValueLiteral(True)]).simplified.value is True + assert VariableValueConcatenation([VariableValueLiteral(False)]).simplified.value is False def test_simplified_literal_empty(self) -> None: """Test simplified literal empty.""" - assert ( - VariableValueConcatenation([VariableValueLiteral("")]).simplified.value - == "" - ) + assert VariableValueConcatenation([VariableValueLiteral("")]).simplified.value == "" def test_simplified_literal_int(self) -> None: """Test simplified literal int.""" - assert ( - VariableValueConcatenation([VariableValueLiteral(13)]).simplified.value - == 13 - ) + assert VariableValueConcatenation([VariableValueLiteral(13)]).simplified.value == 13 def test_simplified_literal_str(self) -> None: """Test simplified literal str.""" - assert ( - VariableValueConcatenation([VariableValueLiteral("foo")]).simplified.value - == "foo" - ) + assert VariableValueConcatenation([VariableValueLiteral("foo")]).simplified.value == "foo" assert ( VariableValueConcatenation( [VariableValueLiteral("foo"), VariableValueLiteral("bar")] @@ -483,29 +447,36 @@ def test_simplified_literal_str(self) -> None: == "foobar" ) - def test_value_multiple(self) -> None: - """Test multiple.""" - assert ( - VariableValueConcatenation( - [VariableValueLiteral("foo"), VariableValueLiteral("bar")] - ).value - == "foobar" - ) - assert ( - VariableValueConcatenation( - [VariableValueLiteral(13), VariableValueLiteral("/test")] # type: ignore - ).value - == "13/test" - ) - assert ( - VariableValueConcatenation( - [VariableValueLiteral(5), VariableValueLiteral(13)] - ).value - == "513" - ) + @pytest.mark.parametrize( + "variable, expected", + [ + ( + VariableValueConcatenation( + [VariableValueLiteral("foo"), VariableValueLiteral("bar")] + ), + "foobar", + ), + ( + VariableValueConcatenation( + [VariableValueLiteral(13), VariableValueLiteral("/test")] + ), + "13/test", + ), + ( + VariableValueConcatenation([VariableValueLiteral(5), VariableValueLiteral(13)]), + "513", + ), + ], + ) + def test_value_multiple(self, expected: str, variable: VariableValueConcatenation[Any]) -> None: + """Test value multiple.""" + assert variable.value == expected + + def test_value_multiple_raise_concatenation_error(self) -> None: + """Test value multiple raises InvalidLookupConcatenationError.""" with pytest.raises(InvalidLookupConcatenation): - VariableValueConcatenation( - [VariableValueLiteral(True), VariableValueLiteral("test")] # type: ignore + VariableValueConcatenation( # noqa: B018 + [VariableValueLiteral(True), VariableValueLiteral(VariableValueLiteral)] # type: ignore ).value def test_value_single(self) -> None: @@ -560,7 +531,7 @@ def test_len(self) -> None: def test_repr(self) -> None: """Test __repr__.""" obj = VariableValueDict({"key0": "val0", "key1": "val1"}) - assert repr(obj) == "Dict[key0=Literal[val0], key1=Literal[val1]]" + assert repr(obj) == "dict[key0=Literal[val0], key1=Literal[val1]]" @pytest.mark.parametrize("resolved", [False, True]) def test_resolved(self, mocker: MockerFixture, resolved: bool) -> None: @@ -570,9 +541,7 @@ def test_resolved(self, mocker: MockerFixture, resolved: bool) -> None: obj = VariableValueDict({"key": "val"}) assert obj.resolved is resolved - def test_resolve( - self, cfngin_context: MockCFNginContext, mocker: MockerFixture - ) -> None: + def test_resolve(self, cfngin_context: MockCfnginContext, mocker: MockerFixture) -> None: """Test resolve.""" mock_literal = MagicMock() mock_provider = MagicMock() @@ -634,8 +603,6 @@ def test_getitem(self) -> None: """Test __getitem__.""" obj = VariableValueList(["val0", "val1"]) assert obj[1].value == "val1" - # for some reason, the current version of pylint does not see this as iterable - # pylint: disable=not-an-iterable assert [i.value for i in obj[:2]] == ["val0", "val1"] def test_init(self, mocker: MockerFixture) -> None: @@ -667,7 +634,7 @@ def test_len(self) -> None: def test_repr(self) -> None: """Test __repr__.""" obj = VariableValueList(["val0", "val1"]) - assert repr(obj) == "List[Literal[val0], Literal[val1]]" + assert repr(obj) == "list[Literal[val0], Literal[val1]]" @pytest.mark.parametrize("resolved", [False, True]) def test_resolved(self, mocker: MockerFixture, resolved: bool) -> None: @@ -677,9 +644,7 @@ def test_resolved(self, mocker: MockerFixture, resolved: bool) -> None: obj = VariableValueList(["val0"]) assert obj.resolved is resolved - def test_resolve( - self, cfngin_context: MockCFNginContext, mocker: MockerFixture - ) -> None: + def test_resolve(self, cfngin_context: MockCfnginContext, mocker: MockerFixture) -> None: """Test resolve.""" mock_literal = MagicMock() mock_provider = MagicMock() @@ -726,31 +691,31 @@ class TestVariableValueLiteral: """Test runway.variables.VariableValueLiteral.""" @pytest.mark.parametrize("value", [False, True, 13, "test"]) - def test_init(self, value: Union[int, str]) -> None: + def test_init(self, value: int | str) -> None: """Test __init__.""" obj = VariableValueLiteral(value) # type: ignore assert obj._data == value @pytest.mark.parametrize("value", [False, True, 13, "test"]) - def test_iter(self, value: Union[int, str]) -> None: + def test_iter(self, value: int | str) -> None: """Test __iter__.""" obj = VariableValueLiteral(value) # type: ignore assert list(iter(obj)) == [obj] # type: ignore @pytest.mark.parametrize("value", [False, True, 13, "test"]) - def test_repr(self, value: Union[int, str]) -> None: + def test_repr(self, value: int | str) -> None: """Test __repr__.""" obj = VariableValueLiteral(value) # type: ignore assert repr(obj) == f"Literal[{value}]" # type: ignore @pytest.mark.parametrize("value", [False, True, 13, "test"]) - def test_resolved(self, value: Union[int, str]) -> None: + def test_resolved(self, value: int | str) -> None: """Test resolved.""" obj = VariableValueLiteral(value) # type: ignore assert obj.resolved @pytest.mark.parametrize("value", [False, True, 13, "test"]) - def test_value(self, value: Union[int, str]) -> None: + def test_value(self, value: int | str) -> None: """Test value.""" obj = VariableValueLiteral(value) # type: ignore assert obj.value == value @@ -765,20 +730,16 @@ def test_dependencies_no_attr(self) -> None: class FakeLookup: """Fake lookup.""" - obj = VariableValueLookup( - VariableValueLiteral("test"), "query", FakeLookup # type: ignore - ) + obj = VariableValueLookup(VariableValueLiteral("test"), "query", FakeLookup) # type: ignore assert obj.dependencies == set() def test_dependencies(self, mocker: MockerFixture) -> None: """Test dependencies.""" mocker.patch.object(MockLookupHandler, "dependencies", return_value={"test"}) - obj = VariableValueLookup( - VariableValueLiteral("test"), "query", MockLookupHandler - ) + obj = VariableValueLookup(VariableValueLiteral("test"), "query", MockLookupHandler) assert obj.dependencies == {"test"} - def test_init_convert_query(self) -> None: + def test___init___convert_query(self) -> None: """Test __init__ convert query.""" obj = VariableValueLookup( VariableValueLiteral("test"), "query", MockLookupHandler, "runway" @@ -786,15 +747,13 @@ def test_init_convert_query(self) -> None: assert isinstance(obj.lookup_query, VariableValueLiteral) assert obj.lookup_query.value == "query" - def test_init_find_handler_cfngin(self, mocker: MockerFixture) -> None: + def test___init___find_handler_cfngin(self, mocker: MockerFixture) -> None: """Test __init__ find handler cfngin.""" mocker.patch.dict(CFNGIN_LOOKUP_HANDLERS, {"test": "success"}) - obj = VariableValueLookup( - VariableValueLiteral("test"), VariableValueLiteral("query") - ) + obj = VariableValueLookup(VariableValueLiteral("test"), VariableValueLiteral("query")) assert obj.handler == "success" - def test_init_find_handler_runway(self, mocker: MockerFixture) -> None: + def test___init___find_handler_runway(self, mocker: MockerFixture) -> None: """Test __init__ find handler runway.""" mocker.patch.dict(RUNWAY_LOOKUP_HANDLERS, {"test": "success"}) obj = VariableValueLookup( @@ -804,16 +763,16 @@ def test_init_find_handler_runway(self, mocker: MockerFixture) -> None: ) assert obj.handler == "success" - def test_init_find_handler_value_error(self) -> None: + def test___init___find_handler_value_error(self) -> None: """Test __init__ fund handler ValueError.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Variable type must be one of"): VariableValueLookup( VariableValueLiteral("test"), VariableValueLiteral("query"), variable_type="invalid", # type: ignore ) - def test_init_find_handler_unknown_lookup_type(self) -> None: + def test___init___find_handler_unknown_lookup_type(self) -> None: """Test __init__ fund handler UnknownLookupType.""" with pytest.raises(UnknownLookupType): VariableValueLookup( @@ -821,7 +780,7 @@ def test_init_find_handler_unknown_lookup_type(self) -> None: VariableValueLiteral("query"), ) - def test_init(self) -> None: + def test___init__(self) -> None: """Test __init__.""" name = VariableValueLiteral("test") query = VariableValueLiteral("query") @@ -831,12 +790,12 @@ def test_init(self) -> None: assert obj.lookup_query == query assert obj.variable_type == "runway" - def test_iter(self) -> None: + def test___iter__(self) -> None: """Test __iter__.""" obj = VariableValueLookup(VariableValueLiteral("test"), "query") assert list(iter(obj)) == [obj] - def test_repr(self) -> None: + def test___repr__(self) -> None: """Test __repr__.""" obj = VariableValueLookup(VariableValueLiteral("test"), "query") assert repr(obj) == "Lookup[Literal[test] Literal[query]]" @@ -867,16 +826,10 @@ def test_resolve(self, mocker: MockerFixture) -> None: "variables": MagicMock(), "kwarg": "something", } - mock_handle = mocker.patch.object( - MockLookupHandler, "handle", return_value="resolved" - ) - mock_resolve = mocker.patch.object( - VariableValueLookup, "_resolve", return_value=None - ) + mock_handle = mocker.patch.object(MockLookupHandler, "handle", return_value="resolved") + mock_resolve = mocker.patch.object(VariableValueLookup, "_resolve", return_value=None) mock_resolve_query = mocker.patch.object(VariableValueLiteral, "resolve") - obj = VariableValueLookup( - VariableValueLiteral("test"), VariableValueLiteral("query") - ) + obj = VariableValueLookup(VariableValueLiteral("test"), VariableValueLiteral("query")) assert not obj.resolve(**kwargs) # type: ignore mock_resolve_query.assert_called_once_with(**kwargs) mock_handle.assert_called_once_with("query", **kwargs) @@ -887,19 +840,16 @@ def test_simplified(self) -> None: obj = VariableValueLookup(VariableValueLiteral("test"), "query") assert obj.simplified == obj - def test_str(self) -> None: + def test___str__(self) -> None: """Test __str__.""" - assert ( - str(VariableValueLookup(VariableValueLiteral("test"), "query")) - == "${test query}" - ) + assert str(VariableValueLookup(VariableValueLiteral("test"), "query")) == "${test query}" def test_value(self) -> None: """Test value.""" obj = VariableValueLookup(VariableValueLiteral("test"), "query") assert obj.resolved is False with pytest.raises(UnresolvedVariableValue): - obj.value # pylint: disable=pointless-statement + assert obj.value obj._resolve("success") assert obj.resolved is True assert obj.value == "success" @@ -922,9 +872,7 @@ def test___delitem__(self) -> None: def test___getitem__(self, mocker: MockerFixture) -> None: """Test __getitem__.""" - mocker.patch.object( - VariableValuePydanticModel, "parse_obj", return_value="parsed_val" - ) + mocker.patch.object(VariableValuePydanticModel, "parse_obj", return_value="parsed_val") obj = VariableValuePydanticModel(self.ModelClass()) assert obj["test"] == "parsed_val" @@ -956,9 +904,7 @@ def test___repr__(self) -> None: def test___setitem__(self, mocker: MockerFixture) -> None: """Test __setitem__.""" - mocker.patch.object( - VariableValuePydanticModel, "parse_obj", return_value="parsed_val" - ) + mocker.patch.object(VariableValuePydanticModel, "parse_obj", return_value="parsed_val") obj = VariableValuePydanticModel(self.ModelClass()) obj["test"] = "new" # type: ignore assert obj["test"] == "new" @@ -966,21 +912,15 @@ def test___setitem__(self, mocker: MockerFixture) -> None: def test_dependencies(self, mocker: MockerFixture) -> None: """Test dependencies.""" mock_literal = MagicMock(dependencies=set("foobar")) - mocker.patch.object( - VariableValuePydanticModel, "parse_obj", return_value=mock_literal - ) + mocker.patch.object(VariableValuePydanticModel, "parse_obj", return_value=mock_literal) obj = VariableValuePydanticModel(self.ModelClass()) assert obj.dependencies == mock_literal.dependencies - def test_resolve( - self, cfngin_context: MockCFNginContext, mocker: MockerFixture - ) -> None: + def test_resolve(self, cfngin_context: MockCfnginContext, mocker: MockerFixture) -> None: """Test resolve.""" mock_literal = MagicMock() mock_provider = MagicMock() - mocker.patch.object( - VariableValuePydanticModel, "parse_obj", return_value=mock_literal - ) + mocker.patch.object(VariableValuePydanticModel, "parse_obj", return_value=mock_literal) obj = VariableValuePydanticModel(self.ModelClass()) assert not obj.resolve( cfngin_context, @@ -999,26 +939,20 @@ def test_resolve( def test_resolved(self, mocker: MockerFixture, resolved: bool) -> None: """Test resolved.""" mock_literal = MagicMock(resolved=resolved) - mocker.patch.object( - VariableValuePydanticModel, "parse_obj", return_value=mock_literal - ) + mocker.patch.object(VariableValuePydanticModel, "parse_obj", return_value=mock_literal) obj = VariableValuePydanticModel(self.ModelClass()) assert obj.resolved is resolved def test_simplified(self, mocker: MockerFixture) -> None: """Test simplified.""" mock_literal = MagicMock(simplified="simplified") - mocker.patch.object( - VariableValuePydanticModel, "parse_obj", return_value=mock_literal - ) + mocker.patch.object(VariableValuePydanticModel, "parse_obj", return_value=mock_literal) obj = VariableValuePydanticModel(self.ModelClass()) assert obj.simplified == {"test": "simplified"} def test_value(self, mocker: MockerFixture) -> None: """Test value.""" mock_literal = MagicMock(value="value") - mocker.patch.object( - VariableValuePydanticModel, "parse_obj", return_value=mock_literal - ) + mocker.patch.object(VariableValuePydanticModel, "parse_obj", return_value=mock_literal) obj = VariableValuePydanticModel(self.ModelClass()) assert obj.value == self.ModelClass(test=mock_literal.value) diff --git a/tests/unit/utils/pydantic_validators/__init__.py b/tests/unit/utils/pydantic_validators/__init__.py new file mode 100644 index 000000000..19f40a399 --- /dev/null +++ b/tests/unit/utils/pydantic_validators/__init__.py @@ -0,0 +1 @@ +"""Test runway.utils.pydantic_validators.""" diff --git a/tests/unit/utils/pydantic_validators/test__lax_str.py b/tests/unit/utils/pydantic_validators/test__lax_str.py new file mode 100644 index 000000000..635d25926 --- /dev/null +++ b/tests/unit/utils/pydantic_validators/test__lax_str.py @@ -0,0 +1,51 @@ +"""Test runway.utils.pydantic_validators._lax_str.""" + +from __future__ import annotations + +from decimal import Decimal +from enum import Enum +from typing import Annotated + +import pytest +from pydantic import BaseModel, ValidationError + +from runway.utils.pydantic_validators._lax_str import LaxStr + + +class SomeEnum(str, Enum): + """Enum for testing.""" + + FOO = "foo" + + +class Model(BaseModel): + """Model used for testing.""" + + test: Annotated[str | None, LaxStr] + + +@pytest.mark.parametrize( + "provided, expected", + [ + ("foo", "foo"), + (5, "5"), + (1.0, "1.0"), + (Decimal(1.0), "1"), + (SomeEnum.FOO, "foo"), + (b"foo", "foo"), + (None, None), + ], +) +def test__handler(provided: object, expected: str) -> None: + """Test _handler.""" + assert Model.model_validate({"test": provided}).test == expected + + +@pytest.mark.parametrize( + "provided", + [{"foo": "bar"}, {"foo", "bar"}, ["foo", "bar"], ("foo", "bar")], +) +def test_raise_validation_error(provided: object) -> None: + """Test _handler unconverted.""" + with pytest.raises(ValidationError): + Model.model_validate({"test": provided}) diff --git a/tests/unit/utils/test_utils.py b/tests/unit/utils/test___init__.py similarity index 85% rename from tests/unit/utils/test_utils.py rename to tests/unit/utils/test___init__.py index aaa09fb0d..90563a9e1 100644 --- a/tests/unit/utils/test_utils.py +++ b/tests/unit/utils/test___init__.py @@ -1,24 +1,21 @@ """Test runway.utils.__init__.""" -# pyright: basic from __future__ import annotations -import datetime import hashlib import json import logging import os import string import sys +from contextlib import suppress from copy import deepcopy -from decimal import Decimal -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any +from unittest.mock import MagicMock, patch import pytest -from mock import MagicMock, patch from runway.utils import ( - JsonEncoder, MutableMap, SafeHaven, argv, @@ -33,8 +30,8 @@ if TYPE_CHECKING: from pathlib import Path - from pytest import LogCaptureFixture, MonkeyPatch from pytest_mock import MockerFixture + from typing_extensions import TypeAlias MODULE = "runway.utils" VALUE = { @@ -46,23 +43,6 @@ } -class TestJsonEncoder: - """Test runway.utils.JsonEncoder.""" - - @pytest.mark.parametrize( - "provided, expected", [(datetime.datetime.now(), str), (Decimal("1.1"), float)] - ) - def test_supported_types(self, provided: Any, expected: type) -> None: - """Test encoding of supported data types.""" - assert isinstance(JsonEncoder().default(provided), expected) - - @pytest.mark.parametrize("provided", [(None)]) - def test_unsupported_types(self, provided: Any) -> None: - """Test encoding of unsupported data types.""" - with pytest.raises(TypeError): - assert not JsonEncoder().default(provided) - - class TestMutableMap: """Test for the custom MutableMap data type.""" @@ -121,21 +101,19 @@ def test_find_default(self) -> None: """Validate default value functionality.""" mute_map = MutableMap(**VALUE) - assert ( - mute_map.find("NOT_VALID", "default_val") == "default_val" - ), "default should be used" + assert mute_map.find("NOT_VALID", "default_val") == "default_val", "default should be used" assert ( mute_map.find("str_val", "default_val") == VALUE["str_val"] ), "default should be ignored" -TestParamsTypeDef = Optional[Union[Dict[str, str], List[str], str]] +TestParamsTypeDef: TypeAlias = "dict[str, str] | list[str] | str | None" class TestSafeHaven: """Test SafeHaven context manager.""" - TEST_PARAMS: List[TestParamsTypeDef] = [ + TEST_PARAMS: list[TestParamsTypeDef] = [ (None), ("string"), ({}), @@ -144,7 +122,7 @@ class TestSafeHaven: ] def test_context_manager_magic( - self, caplog: LogCaptureFixture, monkeypatch: MonkeyPatch + self, caplog: pytest.LogCaptureFixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test init and the attributes it sets.""" mock_reset_all = MagicMock() @@ -165,8 +143,8 @@ def test_context_manager_magic( def test_os_environ( self, provided: TestParamsTypeDef, - caplog: LogCaptureFixture, - monkeypatch: MonkeyPatch, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test os.environ interactions.""" caplog.set_level(logging.DEBUG, "runway.SafeHaven") @@ -191,7 +169,7 @@ def test_os_environ( assert caplog.messages == expected_logs def test_reset_all( - self, caplog: LogCaptureFixture, monkeypatch: MonkeyPatch + self, caplog: pytest.LogCaptureFixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test reset_all.""" mock_method = MagicMock() @@ -218,8 +196,8 @@ def test_reset_all( def test_sys_argv( self, provided: TestParamsTypeDef, - caplog: LogCaptureFixture, - monkeypatch: MonkeyPatch, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test sys.argv interactions.""" caplog.set_level(logging.DEBUG, "runway.SafeHaven") @@ -241,19 +219,16 @@ def test_sys_argv( assert caplog.messages == expected_logs def test_sys_modules( - self, caplog: LogCaptureFixture, monkeypatch: MonkeyPatch + self, caplog: pytest.LogCaptureFixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test sys.modules interactions.""" - caplog.set_level(logging.DEBUG, "runway.SafeHaven") + caplog.set_level(1, "runway.SafeHaven") monkeypatch.setattr(SafeHaven, "reset_all", MagicMock()) - orig_val = {} - for k, v in sys.modules.items(): - orig_val[k] = v + orig_val = dict(sys.modules) expected_logs = ["entering a safe haven...", "resetting sys.modules..."] with SafeHaven() as obj: - # pylint: disable=import-outside-toplevel from ..fixtures import mock_hooks # noqa: F401 assert sys.modules != orig_val @@ -262,7 +237,7 @@ def test_sys_modules( assert caplog.messages[:2] == expected_logs assert caplog.messages[-1] == "leaving the safe haven..." - def test_sys_modules_exclude(self, monkeypatch: MonkeyPatch) -> None: + def test_sys_modules_exclude(self, monkeypatch: pytest.MonkeyPatch) -> None: """Test sys.modules interactions with excluded module.""" monkeypatch.setattr(SafeHaven, "reset_all", MagicMock()) @@ -270,7 +245,6 @@ def test_sys_modules_exclude(self, monkeypatch: MonkeyPatch) -> None: assert module not in sys.modules with SafeHaven(sys_modules_exclude=[module]) as obj: - # pylint: disable=import-outside-toplevel from ..fixtures import mock_hooks # noqa: F401 assert module in sys.modules @@ -284,8 +258,8 @@ def test_sys_modules_exclude(self, monkeypatch: MonkeyPatch) -> None: def test_sys_path( self, provided: TestParamsTypeDef, - caplog: LogCaptureFixture, - monkeypatch: MonkeyPatch, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test sys.path interactions.""" caplog.set_level(logging.DEBUG, "runway.SafeHaven") @@ -383,17 +357,15 @@ def test_load_object_from_string() -> None: assert load_object_from_string(obj_path, try_reload=True) == "us-west-2" -def test_load_object_from_string_reload_conditions(monkeypatch: MonkeyPatch) -> None: +def test_load_object_from_string_reload_conditions(monkeypatch: pytest.MonkeyPatch) -> None: """Test load_object_from_string reload conditions.""" mock_reload = MagicMock() monkeypatch.setattr("runway.utils.importlib.reload", mock_reload) builtin_test = "sys.version_info" mock_hook = "tests.unit.fixtures.mock_hooks.GLOBAL_VALUE" - try: + with suppress(Exception): del sys.modules["tests.unit.fixtures.mock_hooks"] - except: # noqa pylint: disable=bare-except - pass load_object_from_string(builtin_test, try_reload=False) mock_reload.assert_not_called() diff --git a/tests/unit/utils/test__file_hash.py b/tests/unit/utils/test__file_hash.py index a4dcbc154..e8bc2f4b7 100644 --- a/tests/unit/utils/test__file_hash.py +++ b/tests/unit/utils/test__file_hash.py @@ -1,6 +1,5 @@ """Test runway.utils._file_hash.""" -# pyright: basic from __future__ import annotations import hashlib diff --git a/typings/awacs/__init__.pyi b/typings/awacs/__init__.pyi index 40cd9c366..9f6b488b4 100644 --- a/typings/awacs/__init__.pyi +++ b/typings/awacs/__init__.pyi @@ -4,7 +4,7 @@ from __future__ import annotations import json import re -from typing import Any, Dict, Optional, Union +from typing import Any __version__ = "1.0.1" valid_names = re.compile(r"^[a-zA-Z0-9]+$") @@ -14,15 +14,15 @@ class AWSObject(object): self, name: str, type: Any = ..., - dictname: Optional[str] = ..., - props: Dict[str, Any] = ..., + dictname: str | None = ..., + props: dict[str, Any] = ..., **kwargs: Any, ) -> None: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def validate(self) -> None: ... - def JSONrepr(self) -> Dict[str, Any]: ... - def to_json(self, indent: Optional[int] = ..., sort_keys: bool = ...) -> str: ... + def JSONrepr(self) -> dict[str, Any]: ... + def to_json(self, indent: int | None = ..., sort_keys: bool = ...) -> str: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... def __hash__(self) -> int: ... @@ -37,8 +37,8 @@ class AWSProperty(AWSObject): def __init__(self, **kwargs: Any) -> None: ... class AWSHelperFn(object): - def getdata(self, data: object) -> Union[str, object]: ... - def to_json(self, indent: Optional[int] = ..., sort_keys: bool = ...) -> str: ... + def getdata(self, data: object) -> str | object: ... + def to_json(self, indent: int | None = ..., sort_keys: bool = ...) -> str: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... def __hash__(self) -> int: ... diff --git a/typings/awacs/acm_pca.pyi b/typings/awacs/acm_pca.pyi index 267cdcf4e..9766f1b1b 100644 --- a/typings/awacs/acm_pca.pyi +++ b/typings/awacs/acm_pca.pyi @@ -21,9 +21,7 @@ DeleteCertificateAuthority = Action("DeleteCertificateAuthority") DeletePermission = Action("DeletePermission") DeletePolicy = Action("DeletePolicy") DescribeCertificateAuthority = Action("DescribeCertificateAuthority") -DescribeCertificateAuthorityAuditReport = Action( - "DescribeCertificateAuthorityAuditReport" -) +DescribeCertificateAuthorityAuditReport = Action("DescribeCertificateAuthorityAuditReport") GetCertificate = Action("GetCertificate") GetCertificateAuthorityCertificate = Action("GetCertificateAuthorityCertificate") GetCertificateAuthorityCsr = Action("GetCertificateAuthorityCsr") diff --git a/typings/awacs/aws.pyi b/typings/awacs/aws.pyi index 059f5223b..2b6421e08 100644 --- a/typings/awacs/aws.pyi +++ b/typings/awacs/aws.pyi @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, Union +from typing import TYPE_CHECKING, Any from . import AWSHelperFn, AWSProperty @@ -48,7 +48,7 @@ class ARN(BaseARN): class ConditionElement(AWSHelperFn): def __init__( self, - data: Union[Dict[str, Any], str], + data: dict[str, Any] | str, value: Any = ..., ) -> None: """Create a ConditionElement @@ -68,7 +68,7 @@ class ConditionElement(AWSHelperFn): """ ... - def get_dict(self) -> Dict[str, Union[List[str], str]]: ... + def get_dict(self) -> dict[str, list[str] | str]: ... class Condition(AWSHelperFn): def __init__(self, conditions) -> None: ... @@ -79,26 +79,26 @@ class Principal(AWSHelperFn): def __init__( self, principal: str, - resources: Union[ - List[str], - List[troposphere.AWSHelperFn], - List[Union[str, troposphere.AWSHelperFn]], - str, - troposphere.AWSHelperFn, - ] = ..., + resources: ( + list[str] + | list[troposphere.AWSHelperFn] + | list[str | troposphere.AWSHelperFn] + | str + | troposphere.AWSHelperFn + ) = ..., ) -> None: ... def JSONrepr(self): ... class AWSPrincipal(Principal): def __init__( self, - principals: Union[ - List[str], - List[troposphere.AWSHelperFn], - List[Union[str, troposphere.AWSHelperFn]], - str, - troposphere.AWSHelperFn, - ], + principals: ( + list[str] + | list[troposphere.AWSHelperFn] + | list[str | troposphere.AWSHelperFn] + | str + | troposphere.AWSHelperFn + ), ) -> None: ... def effect(x): ... diff --git a/typings/awacs/aws_marketplace.pyi b/typings/awacs/aws_marketplace.pyi index 98a395cf3..4c390335d 100644 --- a/typings/awacs/aws_marketplace.pyi +++ b/typings/awacs/aws_marketplace.pyi @@ -15,9 +15,7 @@ class ARN(BaseARN): def __init__(self, resource=..., region=..., account=...) -> None: ... AcceptAgreementApprovalRequest = Action("AcceptAgreementApprovalRequest") -AssociateProductsWithPrivateMarketplace = Action( - "AssociateProductsWithPrivateMarketplace" -) +AssociateProductsWithPrivateMarketplace = Action("AssociateProductsWithPrivateMarketplace") BatchMeterUsage = Action("BatchMeterUsage") CancelAgreementRequest = Action("CancelAgreementRequest") CancelChangeSet = Action("CancelChangeSet") @@ -34,13 +32,9 @@ DescribePrivateMarketplaceProfile = Action("DescribePrivateMarketplaceProfile") DescribePrivateMarketplaceRequests = Action("DescribePrivateMarketplaceRequests") DescribePrivateMarketplaceSettings = Action("DescribePrivateMarketplaceSettings") DescribePrivateMarketplaceStatus = Action("DescribePrivateMarketplaceStatus") -DescribeProcurementSystemConfiguration = Action( - "DescribeProcurementSystemConfiguration" -) +DescribeProcurementSystemConfiguration = Action("DescribeProcurementSystemConfiguration") DescribeTask = Action("DescribeTask") -DisassociateProductsFromPrivateMarketplace = Action( - "DisassociateProductsFromPrivateMarketplace" -) +DisassociateProductsFromPrivateMarketplace = Action("DisassociateProductsFromPrivateMarketplace") GetAgreementApprovalRequest = Action("GetAgreementApprovalRequest") GetAgreementRequest = Action("GetAgreementRequest") GetAgreementTerms = Action("GetAgreementTerms") diff --git a/typings/awacs/chime.pyi b/typings/awacs/chime.pyi index 958bf2c23..3d10fe81b 100644 --- a/typings/awacs/chime.pyi +++ b/typings/awacs/chime.pyi @@ -19,15 +19,11 @@ ActivateUsers = Action("ActivateUsers") AddDomain = Action("AddDomain") AddOrUpdateGroups = Action("AddOrUpdateGroups") AssociatePhoneNumberWithUser = Action("AssociatePhoneNumberWithUser") -AssociatePhoneNumbersWithVoiceConnector = Action( - "AssociatePhoneNumbersWithVoiceConnector" -) +AssociatePhoneNumbersWithVoiceConnector = Action("AssociatePhoneNumbersWithVoiceConnector") AssociatePhoneNumbersWithVoiceConnectorGroup = Action( "AssociatePhoneNumbersWithVoiceConnectorGroup" ) -AssociateSigninDelegateGroupsWithAccount = Action( - "AssociateSigninDelegateGroupsWithAccount" -) +AssociateSigninDelegateGroupsWithAccount = Action("AssociateSigninDelegateGroupsWithAccount") AuthorizeDirectory = Action("AuthorizeDirectory") BatchCreateAttendee = Action("BatchCreateAttendee") BatchCreateRoomMembership = Action("BatchCreateRoomMembership") @@ -73,23 +69,15 @@ DeleteVoiceConnectorEmergencyCallingConfiguration = Action( DeleteVoiceConnectorGroup = Action("DeleteVoiceConnectorGroup") DeleteVoiceConnectorOrigination = Action("DeleteVoiceConnectorOrigination") DeleteVoiceConnectorProxy = Action("DeleteVoiceConnectorProxy") -DeleteVoiceConnectorStreamingConfiguration = Action( - "DeleteVoiceConnectorStreamingConfiguration" -) +DeleteVoiceConnectorStreamingConfiguration = Action("DeleteVoiceConnectorStreamingConfiguration") DeleteVoiceConnectorTermination = Action("DeleteVoiceConnectorTermination") -DeleteVoiceConnectorTerminationCredentials = Action( - "DeleteVoiceConnectorTerminationCredentials" -) +DeleteVoiceConnectorTerminationCredentials = Action("DeleteVoiceConnectorTerminationCredentials") DisassociatePhoneNumberFromUser = Action("DisassociatePhoneNumberFromUser") -DisassociatePhoneNumbersFromVoiceConnector = Action( - "DisassociatePhoneNumbersFromVoiceConnector" -) +DisassociatePhoneNumbersFromVoiceConnector = Action("DisassociatePhoneNumbersFromVoiceConnector") DisassociatePhoneNumbersFromVoiceConnectorGroup = Action( "DisassociatePhoneNumbersFromVoiceConnectorGroup" ) -DisassociateSigninDelegateGroupsFromAccount = Action( - "DisassociateSigninDelegateGroupsFromAccount" -) +DisassociateSigninDelegateGroupsFromAccount = Action("DisassociateSigninDelegateGroupsFromAccount") DisconnectDirectory = Action("DisconnectDirectory") GetAccount = Action("GetAccount") GetAccountResource = Action("GetAccountResource") @@ -122,9 +110,7 @@ GetVoiceConnectorGroup = Action("GetVoiceConnectorGroup") GetVoiceConnectorLoggingConfiguration = Action("GetVoiceConnectorLoggingConfiguration") GetVoiceConnectorOrigination = Action("GetVoiceConnectorOrigination") GetVoiceConnectorProxy = Action("GetVoiceConnectorProxy") -GetVoiceConnectorStreamingConfiguration = Action( - "GetVoiceConnectorStreamingConfiguration" -) +GetVoiceConnectorStreamingConfiguration = Action("GetVoiceConnectorStreamingConfiguration") GetVoiceConnectorTermination = Action("GetVoiceConnectorTermination") GetVoiceConnectorTerminationHealth = Action("GetVoiceConnectorTerminationHealth") InviteDelegate = Action("InviteDelegate") @@ -154,9 +140,7 @@ ListRooms = Action("ListRooms") ListTagsForResource = Action("ListTagsForResource") ListUsers = Action("ListUsers") ListVoiceConnectorGroups = Action("ListVoiceConnectorGroups") -ListVoiceConnectorTerminationCredentials = Action( - "ListVoiceConnectorTerminationCredentials" -) +ListVoiceConnectorTerminationCredentials = Action("ListVoiceConnectorTerminationCredentials") ListVoiceConnectors = Action("ListVoiceConnectors") LogoutUser = Action("LogoutUser") PutEventsConfiguration = Action("PutEventsConfiguration") @@ -167,13 +151,9 @@ PutVoiceConnectorEmergencyCallingConfiguration = Action( PutVoiceConnectorLoggingConfiguration = Action("PutVoiceConnectorLoggingConfiguration") PutVoiceConnectorOrigination = Action("PutVoiceConnectorOrigination") PutVoiceConnectorProxy = Action("PutVoiceConnectorProxy") -PutVoiceConnectorStreamingConfiguration = Action( - "PutVoiceConnectorStreamingConfiguration" -) +PutVoiceConnectorStreamingConfiguration = Action("PutVoiceConnectorStreamingConfiguration") PutVoiceConnectorTermination = Action("PutVoiceConnectorTermination") -PutVoiceConnectorTerminationCredentials = Action( - "PutVoiceConnectorTerminationCredentials" -) +PutVoiceConnectorTerminationCredentials = Action("PutVoiceConnectorTerminationCredentials") RedactConversationMessage = Action("RedactConversationMessage") RedactRoomMessage = Action("RedactRoomMessage") RegenerateSecurityToken = Action("RegenerateSecurityToken") diff --git a/typings/awacs/cloudfront.pyi b/typings/awacs/cloudfront.pyi index 413cf87f4..f3da6f29d 100644 --- a/typings/awacs/cloudfront.pyi +++ b/typings/awacs/cloudfront.pyi @@ -36,9 +36,7 @@ DeleteStreamingDistribution = Action("DeleteStreamingDistribution") GetCachePolicy = Action("GetCachePolicy") GetCachePolicyConfig = Action("GetCachePolicyConfig") GetCloudFrontOriginAccessIdentity = Action("GetCloudFrontOriginAccessIdentity") -GetCloudFrontOriginAccessIdentityConfig = Action( - "GetCloudFrontOriginAccessIdentityConfig" -) +GetCloudFrontOriginAccessIdentityConfig = Action("GetCloudFrontOriginAccessIdentityConfig") GetDistribution = Action("GetDistribution") GetDistributionConfig = Action("GetDistributionConfig") GetFieldLevelEncryption = Action("GetFieldLevelEncryption") @@ -57,9 +55,7 @@ ListCloudFrontOriginAccessIdentities = Action("ListCloudFrontOriginAccessIdentit ListDistributions = Action("ListDistributions") ListDistributionsByCachePolicyId = Action("ListDistributionsByCachePolicyId") ListDistributionsByLambdaFunction = Action("ListDistributionsByLambdaFunction") -ListDistributionsByOriginRequestPolicyId = Action( - "ListDistributionsByOriginRequestPolicyId" -) +ListDistributionsByOriginRequestPolicyId = Action("ListDistributionsByOriginRequestPolicyId") ListDistributionsByWebACLId = Action("ListDistributionsByWebACLId") ListFieldLevelEncryptionConfigs = Action("ListFieldLevelEncryptionConfigs") ListFieldLevelEncryptionProfiles = Action("ListFieldLevelEncryptionProfiles") diff --git a/typings/awacs/codecommit.pyi b/typings/awacs/codecommit.pyi index f2309c7da..87e8ef117 100644 --- a/typings/awacs/codecommit.pyi +++ b/typings/awacs/codecommit.pyi @@ -14,9 +14,7 @@ class Action(BaseAction): class ARN(BaseARN): def __init__(self, resource=..., region=..., account=...) -> None: ... -AssociateApprovalRuleTemplateWithRepository = Action( - "AssociateApprovalRuleTemplateWithRepository" -) +AssociateApprovalRuleTemplateWithRepository = Action("AssociateApprovalRuleTemplateWithRepository") BatchAssociateApprovalRuleTemplateWithRepositories = Action( "BatchAssociateApprovalRuleTemplateWithRepositories" ) @@ -81,9 +79,7 @@ ListAssociatedApprovalRuleTemplatesForRepository = Action( ListBranches = Action("ListBranches") ListPullRequests = Action("ListPullRequests") ListRepositories = Action("ListRepositories") -ListRepositoriesForApprovalRuleTemplate = Action( - "ListRepositoriesForApprovalRuleTemplate" -) +ListRepositoriesForApprovalRuleTemplate = Action("ListRepositoriesForApprovalRuleTemplate") ListTagsForResource = Action("ListTagsForResource") MergeBranchesByFastForward = Action("MergeBranchesByFastForward") MergeBranchesBySquash = Action("MergeBranchesBySquash") diff --git a/typings/awacs/config.pyi b/typings/awacs/config.pyi index 4ff9b853c..4cfd0ab2b 100644 --- a/typings/awacs/config.pyi +++ b/typings/awacs/config.pyi @@ -31,9 +31,7 @@ DeleteRemediationExceptions = Action("DeleteRemediationExceptions") DeleteResourceConfig = Action("DeleteResourceConfig") DeleteRetentionConfiguration = Action("DeleteRetentionConfiguration") DeliverConfigSnapshot = Action("DeliverConfigSnapshot") -DescribeAggregateComplianceByConfigRules = Action( - "DescribeAggregateComplianceByConfigRules" -) +DescribeAggregateComplianceByConfigRules = Action("DescribeAggregateComplianceByConfigRules") DescribeAggregationAuthorizations = Action("DescribeAggregationAuthorizations") DescribeComplianceByConfigRule = Action("DescribeComplianceByConfigRule") DescribeComplianceByResource = Action("DescribeComplianceByResource") @@ -50,25 +48,17 @@ DescribeConformancePackStatus = Action("DescribeConformancePackStatus") DescribeConformancePacks = Action("DescribeConformancePacks") DescribeDeliveryChannelStatus = Action("DescribeDeliveryChannelStatus") DescribeDeliveryChannels = Action("DescribeDeliveryChannels") -DescribeOrganizationConfigRuleStatuses = Action( - "DescribeOrganizationConfigRuleStatuses" -) +DescribeOrganizationConfigRuleStatuses = Action("DescribeOrganizationConfigRuleStatuses") DescribeOrganizationConfigRules = Action("DescribeOrganizationConfigRules") -DescribeOrganizationConformancePackStatuses = Action( - "DescribeOrganizationConformancePackStatuses" -) +DescribeOrganizationConformancePackStatuses = Action("DescribeOrganizationConformancePackStatuses") DescribeOrganizationConformancePacks = Action("DescribeOrganizationConformancePacks") DescribePendingAggregationRequests = Action("DescribePendingAggregationRequests") DescribeRemediationConfigurations = Action("DescribeRemediationConfigurations") DescribeRemediationExceptions = Action("DescribeRemediationExceptions") DescribeRemediationExecutionStatus = Action("DescribeRemediationExecutionStatus") DescribeRetentionConfigurations = Action("DescribeRetentionConfigurations") -GetAggregateComplianceDetailsByConfigRule = Action( - "GetAggregateComplianceDetailsByConfigRule" -) -GetAggregateConfigRuleComplianceSummary = Action( - "GetAggregateConfigRuleComplianceSummary" -) +GetAggregateComplianceDetailsByConfigRule = Action("GetAggregateComplianceDetailsByConfigRule") +GetAggregateConfigRuleComplianceSummary = Action("GetAggregateConfigRuleComplianceSummary") GetAggregateDiscoveredResourceCounts = Action("GetAggregateDiscoveredResourceCounts") GetAggregateResourceConfig = Action("GetAggregateResourceConfig") GetComplianceDetailsByConfigRule = Action("GetComplianceDetailsByConfigRule") @@ -78,9 +68,7 @@ GetComplianceSummaryByResourceType = Action("GetComplianceSummaryByResourceType" GetConformancePackComplianceDetails = Action("GetConformancePackComplianceDetails") GetConformancePackComplianceSummary = Action("GetConformancePackComplianceSummary") GetDiscoveredResourceCounts = Action("GetDiscoveredResourceCounts") -GetOrganizationConfigRuleDetailedStatus = Action( - "GetOrganizationConfigRuleDetailedStatus" -) +GetOrganizationConfigRuleDetailedStatus = Action("GetOrganizationConfigRuleDetailedStatus") GetOrganizationConformancePackDetailedStatus = Action( "GetOrganizationConformancePackDetailedStatus" ) diff --git a/typings/awacs/connect.pyi b/typings/awacs/connect.pyi index 70ec992d5..f4d529b45 100644 --- a/typings/awacs/connect.pyi +++ b/typings/awacs/connect.pyi @@ -59,9 +59,7 @@ UpdateContactAttributes = Action("UpdateContactAttributes") UpdateContactFlowContent = Action("UpdateContactFlowContent") UpdateContactFlowName = Action("UpdateContactFlowName") UpdateRoutingProfileConcurrency = Action("UpdateRoutingProfileConcurrency") -UpdateRoutingProfileDefaultOutboundQueue = Action( - "UpdateRoutingProfileDefaultOutboundQueue" -) +UpdateRoutingProfileDefaultOutboundQueue = Action("UpdateRoutingProfileDefaultOutboundQueue") UpdateRoutingProfileName = Action("UpdateRoutingProfileName") UpdateRoutingProfileQueues = Action("UpdateRoutingProfileQueues") UpdateUserHierarchy = Action("UpdateUserHierarchy") diff --git a/typings/awacs/deepracer.pyi b/typings/awacs/deepracer.pyi index 2e96c5cf3..4949c27e6 100644 --- a/typings/awacs/deepracer.pyi +++ b/typings/awacs/deepracer.pyi @@ -38,7 +38,5 @@ ListTrainingJobs = Action("ListTrainingJobs") SetAlias = Action("SetAlias") StartEvaluation = Action("StartEvaluation") StopEvaluation = Action("StopEvaluation") -StopTrainingReinforcementLearningModel = Action( - "StopTrainingReinforcementLearningModel" -) +StopTrainingReinforcementLearningModel = Action("StopTrainingReinforcementLearningModel") TestRewardFunction = Action("TestRewardFunction") diff --git a/typings/awacs/directconnect.pyi b/typings/awacs/directconnect.pyi index ee81ae998..74ccf62e8 100644 --- a/typings/awacs/directconnect.pyi +++ b/typings/awacs/directconnect.pyi @@ -57,12 +57,8 @@ DescribeConnectionsOnInterconnect = Action("DescribeConnectionsOnInterconnect") DescribeDirectConnectGatewayAssociationProposals = Action( "DescribeDirectConnectGatewayAssociationProposals" ) -DescribeDirectConnectGatewayAssociations = Action( - "DescribeDirectConnectGatewayAssociations" -) -DescribeDirectConnectGatewayAttachments = Action( - "DescribeDirectConnectGatewayAttachments" -) +DescribeDirectConnectGatewayAssociations = Action("DescribeDirectConnectGatewayAssociations") +DescribeDirectConnectGatewayAttachments = Action("DescribeDirectConnectGatewayAttachments") DescribeDirectConnectGateways = Action("DescribeDirectConnectGateways") DescribeHostedConnections = Action("DescribeHostedConnections") DescribeInterconnectLoa = Action("DescribeInterconnectLoa") diff --git a/typings/awacs/discovery.pyi b/typings/awacs/discovery.pyi index d84b085d6..8ad928255 100644 --- a/typings/awacs/discovery.pyi +++ b/typings/awacs/discovery.pyi @@ -14,9 +14,7 @@ class Action(BaseAction): class ARN(BaseARN): def __init__(self, resource=..., region=..., account=...) -> None: ... -AssociateConfigurationItemsToApplication = Action( - "AssociateConfigurationItemsToApplication" -) +AssociateConfigurationItemsToApplication = Action("AssociateConfigurationItemsToApplication") BatchDeleteImportData = Action("BatchDeleteImportData") CreateApplication = Action("CreateApplication") CreateTags = Action("CreateTags") diff --git a/typings/awacs/dms.pyi b/typings/awacs/dms.pyi index 4b3027aec..1cbcf4c0a 100644 --- a/typings/awacs/dms.pyi +++ b/typings/awacs/dms.pyi @@ -40,9 +40,7 @@ DescribeRefreshSchemasStatus = Action("DescribeRefreshSchemasStatus") DescribeReplicationInstanceTaskLogs = Action("DescribeReplicationInstanceTaskLogs") DescribeReplicationInstances = Action("DescribeReplicationInstances") DescribeReplicationSubnetGroups = Action("DescribeReplicationSubnetGroups") -DescribeReplicationTaskAssessmentResults = Action( - "DescribeReplicationTaskAssessmentResults" -) +DescribeReplicationTaskAssessmentResults = Action("DescribeReplicationTaskAssessmentResults") DescribeReplicationTasks = Action("DescribeReplicationTasks") DescribeSchemas = Action("DescribeSchemas") DescribeTableStatistics = Action("DescribeTableStatistics") diff --git a/typings/awacs/ec2.pyi b/typings/awacs/ec2.pyi index 4b1f50509..8050a6021 100644 --- a/typings/awacs/ec2.pyi +++ b/typings/awacs/ec2.pyi @@ -22,9 +22,7 @@ AcceptVpcPeeringConnection = Action("AcceptVpcPeeringConnection") AdvertiseByoipCidr = Action("AdvertiseByoipCidr") AllocateAddress = Action("AllocateAddress") AllocateHosts = Action("AllocateHosts") -ApplySecurityGroupsToClientVpnTargetNetwork = Action( - "ApplySecurityGroupsToClientVpnTargetNetwork" -) +ApplySecurityGroupsToClientVpnTargetNetwork = Action("ApplySecurityGroupsToClientVpnTargetNetwork") AssignIpv6Addresses = Action("AssignIpv6Addresses") AssignPrivateIpAddresses = Action("AssignPrivateIpAddresses") AssociateAddress = Action("AssociateAddress") @@ -33,9 +31,7 @@ AssociateDhcpOptions = Action("AssociateDhcpOptions") AssociateIamInstanceProfile = Action("AssociateIamInstanceProfile") AssociateRouteTable = Action("AssociateRouteTable") AssociateSubnetCidrBlock = Action("AssociateSubnetCidrBlock") -AssociateTransitGatewayMulticastDomain = Action( - "AssociateTransitGatewayMulticastDomain" -) +AssociateTransitGatewayMulticastDomain = Action("AssociateTransitGatewayMulticastDomain") AssociateTransitGatewayRouteTable = Action("AssociateTransitGatewayRouteTable") AssociateVpcCidrBlock = Action("AssociateVpcCidrBlock") AttachClassicLinkVpc = Action("AttachClassicLinkVpc") @@ -78,9 +74,7 @@ CreateKeyPair = Action("CreateKeyPair") CreateLaunchTemplate = Action("CreateLaunchTemplate") CreateLaunchTemplateVersion = Action("CreateLaunchTemplateVersion") CreateLocalGatewayRoute = Action("CreateLocalGatewayRoute") -CreateLocalGatewayRouteTableVpcAssociation = Action( - "CreateLocalGatewayRouteTableVpcAssociation" -) +CreateLocalGatewayRouteTableVpcAssociation = Action("CreateLocalGatewayRouteTableVpcAssociation") CreateManagedPrefixList = Action("CreateManagedPrefixList") CreateNatGateway = Action("CreateNatGateway") CreateNetworkAcl = Action("CreateNetworkAcl") @@ -104,18 +98,14 @@ CreateTrafficMirrorTarget = Action("CreateTrafficMirrorTarget") CreateTransitGateway = Action("CreateTransitGateway") CreateTransitGatewayMulticastDomain = Action("CreateTransitGatewayMulticastDomain") CreateTransitGatewayPeeringAttachment = Action("CreateTransitGatewayPeeringAttachment") -CreateTransitGatewayPrefixListReference = Action( - "CreateTransitGatewayPrefixListReference" -) +CreateTransitGatewayPrefixListReference = Action("CreateTransitGatewayPrefixListReference") CreateTransitGatewayRoute = Action("CreateTransitGatewayRoute") CreateTransitGatewayRouteTable = Action("CreateTransitGatewayRouteTable") CreateTransitGatewayVpcAttachment = Action("CreateTransitGatewayVpcAttachment") CreateVolume = Action("CreateVolume") CreateVpc = Action("CreateVpc") CreateVpcEndpoint = Action("CreateVpcEndpoint") -CreateVpcEndpointConnectionNotification = Action( - "CreateVpcEndpointConnectionNotification" -) +CreateVpcEndpointConnectionNotification = Action("CreateVpcEndpointConnectionNotification") CreateVpcEndpointServiceConfiguration = Action("CreateVpcEndpointServiceConfiguration") CreateVpcPeeringConnection = Action("CreateVpcPeeringConnection") CreateVpnConnection = Action("CreateVpnConnection") @@ -135,9 +125,7 @@ DeleteKeyPair = Action("DeleteKeyPair") DeleteLaunchTemplate = Action("DeleteLaunchTemplate") DeleteLaunchTemplateVersions = Action("DeleteLaunchTemplateVersions") DeleteLocalGatewayRoute = Action("DeleteLocalGatewayRoute") -DeleteLocalGatewayRouteTableVpcAssociation = Action( - "DeleteLocalGatewayRouteTableVpcAssociation" -) +DeleteLocalGatewayRouteTableVpcAssociation = Action("DeleteLocalGatewayRouteTableVpcAssociation") DeleteManagedPrefixList = Action("DeleteManagedPrefixList") DeleteNatGateway = Action("DeleteNatGateway") DeleteNetworkAcl = Action("DeleteNetworkAcl") @@ -159,20 +147,14 @@ DeleteTrafficMirrorTarget = Action("DeleteTrafficMirrorTarget") DeleteTransitGateway = Action("DeleteTransitGateway") DeleteTransitGatewayMulticastDomain = Action("DeleteTransitGatewayMulticastDomain") DeleteTransitGatewayPeeringAttachment = Action("DeleteTransitGatewayPeeringAttachment") -DeleteTransitGatewayPrefixListReference = Action( - "DeleteTransitGatewayPrefixListReference" -) +DeleteTransitGatewayPrefixListReference = Action("DeleteTransitGatewayPrefixListReference") DeleteTransitGatewayRoute = Action("DeleteTransitGatewayRoute") DeleteTransitGatewayRouteTable = Action("DeleteTransitGatewayRouteTable") DeleteTransitGatewayVpcAttachment = Action("DeleteTransitGatewayVpcAttachment") DeleteVolume = Action("DeleteVolume") DeleteVpc = Action("DeleteVpc") -DeleteVpcEndpointConnectionNotifications = Action( - "DeleteVpcEndpointConnectionNotifications" -) -DeleteVpcEndpointServiceConfigurations = Action( - "DeleteVpcEndpointServiceConfigurations" -) +DeleteVpcEndpointConnectionNotifications = Action("DeleteVpcEndpointConnectionNotifications") +DeleteVpcEndpointServiceConfigurations = Action("DeleteVpcEndpointServiceConfigurations") DeleteVpcEndpoints = Action("DeleteVpcEndpoints") DeleteVpcPeeringConnection = Action("DeleteVpcPeeringConnection") DeleteVpnConnection = Action("DeleteVpnConnection") @@ -221,9 +203,7 @@ DescribeFpgaImages = Action("DescribeFpgaImages") DescribeHostReservationOfferings = Action("DescribeHostReservationOfferings") DescribeHostReservations = Action("DescribeHostReservations") DescribeHosts = Action("DescribeHosts") -DescribeIamInstanceProfileAssociations = Action( - "DescribeIamInstanceProfileAssociations" -) +DescribeIamInstanceProfileAssociations = Action("DescribeIamInstanceProfileAssociations") DescribeIdFormat = Action("DescribeIdFormat") DescribeIdentityIdFormat = Action("DescribeIdentityIdFormat") DescribeImageAttribute = Action("DescribeImageAttribute") @@ -232,9 +212,7 @@ DescribeImportImageTasks = Action("DescribeImportImageTasks") DescribeImportSnapshotTasks = Action("DescribeImportSnapshotTasks") DescribeInstanceAttribute = Action("DescribeInstanceAttribute") DescribeInstanceCreditSpecifications = Action("DescribeInstanceCreditSpecifications") -DescribeInstanceEventNotificationAttributes = Action( - "DescribeInstanceEventNotificationAttributes" -) +DescribeInstanceEventNotificationAttributes = Action("DescribeInstanceEventNotificationAttributes") DescribeInstanceStatus = Action("DescribeInstanceStatus") DescribeInstanceTypeOfferings = Action("DescribeInstanceTypeOfferings") DescribeInstanceTypes = Action("DescribeInstanceTypes") @@ -250,9 +228,7 @@ DescribeLocalGatewayRouteTableVpcAssociations = Action( "DescribeLocalGatewayRouteTableVpcAssociations" ) DescribeLocalGatewayRouteTables = Action("DescribeLocalGatewayRouteTables") -DescribeLocalGatewayVirtualInterfaceGroups = Action( - "DescribeLocalGatewayVirtualInterfaceGroups" -) +DescribeLocalGatewayVirtualInterfaceGroups = Action("DescribeLocalGatewayVirtualInterfaceGroups") DescribeLocalGatewayVirtualInterfaces = Action("DescribeLocalGatewayVirtualInterfaces") DescribeLocalGateways = Action("DescribeLocalGateways") DescribeManagedPrefixLists = Action("DescribeManagedPrefixLists") @@ -269,9 +245,7 @@ DescribePublicIpv4Pools = Action("DescribePublicIpv4Pools") DescribeRegions = Action("DescribeRegions") DescribeReservedInstances = Action("DescribeReservedInstances") DescribeReservedInstancesListings = Action("DescribeReservedInstancesListings") -DescribeReservedInstancesModifications = Action( - "DescribeReservedInstancesModifications" -) +DescribeReservedInstancesModifications = Action("DescribeReservedInstancesModifications") DescribeReservedInstancesOfferings = Action("DescribeReservedInstancesOfferings") DescribeRouteTables = Action("DescribeRouteTables") DescribeScheduledInstanceAvailability = Action("DescribeScheduledInstanceAvailability") @@ -293,12 +267,8 @@ DescribeTrafficMirrorFilters = Action("DescribeTrafficMirrorFilters") DescribeTrafficMirrorSessions = Action("DescribeTrafficMirrorSessions") DescribeTrafficMirrorTargets = Action("DescribeTrafficMirrorTargets") DescribeTransitGatewayAttachments = Action("DescribeTransitGatewayAttachments") -DescribeTransitGatewayMulticastDomains = Action( - "DescribeTransitGatewayMulticastDomains" -) -DescribeTransitGatewayPeeringAttachments = Action( - "DescribeTransitGatewayPeeringAttachments" -) +DescribeTransitGatewayMulticastDomains = Action("DescribeTransitGatewayMulticastDomains") +DescribeTransitGatewayPeeringAttachments = Action("DescribeTransitGatewayPeeringAttachments") DescribeTransitGatewayRouteTables = Action("DescribeTransitGatewayRouteTables") DescribeTransitGatewayVpcAttachments = Action("DescribeTransitGatewayVpcAttachments") DescribeTransitGateways = Action("DescribeTransitGateways") @@ -309,13 +279,9 @@ DescribeVolumesModifications = Action("DescribeVolumesModifications") DescribeVpcAttribute = Action("DescribeVpcAttribute") DescribeVpcClassicLink = Action("DescribeVpcClassicLink") DescribeVpcClassicLinkDnsSupport = Action("DescribeVpcClassicLinkDnsSupport") -DescribeVpcEndpointConnectionNotifications = Action( - "DescribeVpcEndpointConnectionNotifications" -) +DescribeVpcEndpointConnectionNotifications = Action("DescribeVpcEndpointConnectionNotifications") DescribeVpcEndpointConnections = Action("DescribeVpcEndpointConnections") -DescribeVpcEndpointServiceConfigurations = Action( - "DescribeVpcEndpointServiceConfigurations" -) +DescribeVpcEndpointServiceConfigurations = Action("DescribeVpcEndpointServiceConfigurations") DescribeVpcEndpointServicePermissions = Action("DescribeVpcEndpointServicePermissions") DescribeVpcEndpointServices = Action("DescribeVpcEndpointServices") DescribeVpcEndpoints = Action("DescribeVpcEndpoints") @@ -330,9 +296,7 @@ DetachVolume = Action("DetachVolume") DetachVpnGateway = Action("DetachVpnGateway") DisableEbsEncryptionByDefault = Action("DisableEbsEncryptionByDefault") DisableFastSnapshotRestores = Action("DisableFastSnapshotRestores") -DisableTransitGatewayRouteTablePropagation = Action( - "DisableTransitGatewayRouteTablePropagation" -) +DisableTransitGatewayRouteTablePropagation = Action("DisableTransitGatewayRouteTablePropagation") DisableVgwRoutePropagation = Action("DisableVgwRoutePropagation") DisableVpcClassicLink = Action("DisableVpcClassicLink") DisableVpcClassicLinkDnsSupport = Action("DisableVpcClassicLinkDnsSupport") @@ -341,16 +305,12 @@ DisassociateClientVpnTargetNetwork = Action("DisassociateClientVpnTargetNetwork" DisassociateIamInstanceProfile = Action("DisassociateIamInstanceProfile") DisassociateRouteTable = Action("DisassociateRouteTable") DisassociateSubnetCidrBlock = Action("DisassociateSubnetCidrBlock") -DisassociateTransitGatewayMulticastDomain = Action( - "DisassociateTransitGatewayMulticastDomain" -) +DisassociateTransitGatewayMulticastDomain = Action("DisassociateTransitGatewayMulticastDomain") DisassociateTransitGatewayRouteTable = Action("DisassociateTransitGatewayRouteTable") DisassociateVpcCidrBlock = Action("DisassociateVpcCidrBlock") EnableEbsEncryptionByDefault = Action("EnableEbsEncryptionByDefault") EnableFastSnapshotRestores = Action("EnableFastSnapshotRestores") -EnableTransitGatewayRouteTablePropagation = Action( - "EnableTransitGatewayRouteTablePropagation" -) +EnableTransitGatewayRouteTablePropagation = Action("EnableTransitGatewayRouteTablePropagation") EnableVgwRoutePropagation = Action("EnableVgwRoutePropagation") EnableVolumeIO = Action("EnableVolumeIO") EnableVpcClassicLink = Action("EnableVpcClassicLink") @@ -374,19 +334,13 @@ GetManagedPrefixListAssociations = Action("GetManagedPrefixListAssociations") GetManagedPrefixListEntries = Action("GetManagedPrefixListEntries") GetPasswordData = Action("GetPasswordData") GetReservedInstancesExchangeQuote = Action("GetReservedInstancesExchangeQuote") -GetTransitGatewayAttachmentPropagations = Action( - "GetTransitGatewayAttachmentPropagations" -) +GetTransitGatewayAttachmentPropagations = Action("GetTransitGatewayAttachmentPropagations") GetTransitGatewayMulticastDomainAssociations = Action( "GetTransitGatewayMulticastDomainAssociations" ) GetTransitGatewayPrefixListReferences = Action("GetTransitGatewayPrefixListReferences") -GetTransitGatewayRouteTableAssociations = Action( - "GetTransitGatewayRouteTableAssociations" -) -GetTransitGatewayRouteTablePropagations = Action( - "GetTransitGatewayRouteTablePropagations" -) +GetTransitGatewayRouteTableAssociations = Action("GetTransitGatewayRouteTableAssociations") +GetTransitGatewayRouteTablePropagations = Action("GetTransitGatewayRouteTablePropagations") ImportClientVpnClientCertificateRevocationList = Action( "ImportClientVpnClientCertificateRevocationList" ) @@ -406,9 +360,7 @@ ModifyIdFormat = Action("ModifyIdFormat") ModifyIdentityIdFormat = Action("ModifyIdentityIdFormat") ModifyImageAttribute = Action("ModifyImageAttribute") ModifyInstanceAttribute = Action("ModifyInstanceAttribute") -ModifyInstanceCapacityReservationAttributes = Action( - "ModifyInstanceCapacityReservationAttributes" -) +ModifyInstanceCapacityReservationAttributes = Action("ModifyInstanceCapacityReservationAttributes") ModifyInstanceCreditSpecification = Action("ModifyInstanceCreditSpecification") ModifyInstanceEventStartTime = Action("ModifyInstanceEventStartTime") ModifyInstanceMetadataOptions = Action("ModifyInstanceMetadataOptions") @@ -420,23 +372,17 @@ ModifyReservedInstances = Action("ModifyReservedInstances") ModifySnapshotAttribute = Action("ModifySnapshotAttribute") ModifySpotFleetRequest = Action("ModifySpotFleetRequest") ModifySubnetAttribute = Action("ModifySubnetAttribute") -ModifyTrafficMirrorFilterNetworkServices = Action( - "ModifyTrafficMirrorFilterNetworkServices" -) +ModifyTrafficMirrorFilterNetworkServices = Action("ModifyTrafficMirrorFilterNetworkServices") ModifyTrafficMirrorFilterRule = Action("ModifyTrafficMirrorFilterRule") ModifyTrafficMirrorSession = Action("ModifyTrafficMirrorSession") ModifyTransitGateway = Action("ModifyTransitGateway") -ModifyTransitGatewayPrefixListReference = Action( - "ModifyTransitGatewayPrefixListReference" -) +ModifyTransitGatewayPrefixListReference = Action("ModifyTransitGatewayPrefixListReference") ModifyTransitGatewayVpcAttachment = Action("ModifyTransitGatewayVpcAttachment") ModifyVolume = Action("ModifyVolume") ModifyVolumeAttribute = Action("ModifyVolumeAttribute") ModifyVpcAttribute = Action("ModifyVpcAttribute") ModifyVpcEndpoint = Action("ModifyVpcEndpoint") -ModifyVpcEndpointConnectionNotification = Action( - "ModifyVpcEndpointConnectionNotification" -) +ModifyVpcEndpointConnectionNotification = Action("ModifyVpcEndpointConnectionNotification") ModifyVpcEndpointServiceConfiguration = Action("ModifyVpcEndpointServiceConfiguration") ModifyVpcEndpointServicePermissions = Action("ModifyVpcEndpointServicePermissions") ModifyVpcPeeringConnectionOptions = Action("ModifyVpcPeeringConnectionOptions") @@ -452,15 +398,9 @@ PurchaseReservedInstancesOffering = Action("PurchaseReservedInstancesOffering") PurchaseScheduledInstances = Action("PurchaseScheduledInstances") RebootInstances = Action("RebootInstances") RegisterImage = Action("RegisterImage") -RegisterInstanceEventNotificationAttributes = Action( - "RegisterInstanceEventNotificationAttributes" -) -RegisterTransitGatewayMulticastGroupMembers = Action( - "RegisterTransitGatewayMulticastGroupMembers" -) -RegisterTransitGatewayMulticastGroupSources = Action( - "RegisterTransitGatewayMulticastGroupSources" -) +RegisterInstanceEventNotificationAttributes = Action("RegisterInstanceEventNotificationAttributes") +RegisterTransitGatewayMulticastGroupMembers = Action("RegisterTransitGatewayMulticastGroupMembers") +RegisterTransitGatewayMulticastGroupSources = Action("RegisterTransitGatewayMulticastGroupSources") RejectTransitGatewayPeeringAttachment = Action("RejectTransitGatewayPeeringAttachment") RejectTransitGatewayVpcAttachment = Action("RejectTransitGatewayVpcAttachment") RejectVpcEndpointConnections = Action("RejectVpcEndpointConnections") @@ -503,10 +443,6 @@ TerminateInstances = Action("TerminateInstances") UnassignIpv6Addresses = Action("UnassignIpv6Addresses") UnassignPrivateIpAddresses = Action("UnassignPrivateIpAddresses") UnmonitorInstances = Action("UnmonitorInstances") -UpdateSecurityGroupRuleDescriptionsEgress = Action( - "UpdateSecurityGroupRuleDescriptionsEgress" -) -UpdateSecurityGroupRuleDescriptionsIngress = Action( - "UpdateSecurityGroupRuleDescriptionsIngress" -) +UpdateSecurityGroupRuleDescriptionsEgress = Action("UpdateSecurityGroupRuleDescriptionsEgress") +UpdateSecurityGroupRuleDescriptionsIngress = Action("UpdateSecurityGroupRuleDescriptionsIngress") WithdrawByoipCidr = Action("WithdrawByoipCidr") diff --git a/typings/awacs/elasticache.pyi b/typings/awacs/elasticache.pyi index 04fbfadcf..bbe8753dd 100644 --- a/typings/awacs/elasticache.pyi +++ b/typings/awacs/elasticache.pyi @@ -29,9 +29,7 @@ CreateReplicationGroup = Action("CreateReplicationGroup") CreateSnapshot = Action("CreateSnapshot") CreateUser = Action("CreateUser") CreateUserGroup = Action("CreateUserGroup") -DecreaseNodeGroupsInGlobalReplicationGroup = Action( - "DecreaseNodeGroupsInGlobalReplicationGroup" -) +DecreaseNodeGroupsInGlobalReplicationGroup = Action("DecreaseNodeGroupsInGlobalReplicationGroup") DecreaseReplicaCount = Action("DecreaseReplicaCount") DeleteCacheCluster = Action("DeleteCacheCluster") DeleteCacheParameterGroup = Action("DeleteCacheParameterGroup") @@ -61,9 +59,7 @@ DescribeUserGroups = Action("DescribeUserGroups") DescribeUsers = Action("DescribeUsers") DisassociateGlobalReplicationGroup = Action("DisassociateGlobalReplicationGroup") FailoverGlobalReplicationGroup = Action("FailoverGlobalReplicationGroup") -IncreaseNodeGroupsInGlobalReplicationGroup = Action( - "IncreaseNodeGroupsInGlobalReplicationGroup" -) +IncreaseNodeGroupsInGlobalReplicationGroup = Action("IncreaseNodeGroupsInGlobalReplicationGroup") IncreaseReplicaCount = Action("IncreaseReplicaCount") ListAllowedNodeTypeModifications = Action("ListAllowedNodeTypeModifications") ListTagsForResource = Action("ListTagsForResource") @@ -72,15 +68,11 @@ ModifyCacheParameterGroup = Action("ModifyCacheParameterGroup") ModifyCacheSubnetGroup = Action("ModifyCacheSubnetGroup") ModifyGlobalReplicationGroup = Action("ModifyGlobalReplicationGroup") ModifyReplicationGroup = Action("ModifyReplicationGroup") -ModifyReplicationGroupShardConfiguration = Action( - "ModifyReplicationGroupShardConfiguration" -) +ModifyReplicationGroupShardConfiguration = Action("ModifyReplicationGroupShardConfiguration") ModifyUser = Action("ModifyUser") ModifyUserGroup = Action("ModifyUserGroup") PurchaseReservedCacheNodesOffering = Action("PurchaseReservedCacheNodesOffering") -RebalanceSlotsInGlobalReplicationGroup = Action( - "RebalanceSlotsInGlobalReplicationGroup" -) +RebalanceSlotsInGlobalReplicationGroup = Action("RebalanceSlotsInGlobalReplicationGroup") RebootCacheCluster = Action("RebootCacheCluster") RemoveTagsFromResource = Action("RemoveTagsFromResource") ResetCacheParameterGroup = Action("ResetCacheParameterGroup") diff --git a/typings/awacs/elasticbeanstalk.pyi b/typings/awacs/elasticbeanstalk.pyi index 6f8098cc4..765a15a05 100644 --- a/typings/awacs/elasticbeanstalk.pyi +++ b/typings/awacs/elasticbeanstalk.pyi @@ -37,9 +37,7 @@ DescribeApplications = Action("DescribeApplications") DescribeConfigurationOptions = Action("DescribeConfigurationOptions") DescribeConfigurationSettings = Action("DescribeConfigurationSettings") DescribeEnvironmentHealth = Action("DescribeEnvironmentHealth") -DescribeEnvironmentManagedActionHistory = Action( - "DescribeEnvironmentManagedActionHistory" -) +DescribeEnvironmentManagedActionHistory = Action("DescribeEnvironmentManagedActionHistory") DescribeEnvironmentManagedActions = Action("DescribeEnvironmentManagedActions") DescribeEnvironmentResources = Action("DescribeEnvironmentResources") DescribeEnvironments = Action("DescribeEnvironments") diff --git a/typings/awacs/elasticloadbalancing.pyi b/typings/awacs/elasticloadbalancing.pyi index c543d76cf..7919d78ef 100644 --- a/typings/awacs/elasticloadbalancing.pyi +++ b/typings/awacs/elasticloadbalancing.pyi @@ -50,12 +50,8 @@ DescribeTargetGroupAttributes = Action("DescribeTargetGroupAttributes") DescribeTargetGroups = Action("DescribeTargetGroups") DescribeTargetHealth = Action("DescribeTargetHealth") DetachLoadBalancerFromSubnets = Action("DetachLoadBalancerFromSubnets") -DisableAvailabilityZonesForLoadBalancer = Action( - "DisableAvailabilityZonesForLoadBalancer" -) -EnableAvailabilityZonesForLoadBalancer = Action( - "EnableAvailabilityZonesForLoadBalancer" -) +DisableAvailabilityZonesForLoadBalancer = Action("DisableAvailabilityZonesForLoadBalancer") +EnableAvailabilityZonesForLoadBalancer = Action("EnableAvailabilityZonesForLoadBalancer") ModifyListener = Action("ModifyListener") ModifyLoadBalancerAttributes = Action("ModifyLoadBalancerAttributes") ModifyRule = Action("ModifyRule") @@ -67,9 +63,7 @@ RemoveListenerCertificates = Action("RemoveListenerCertificates") RemoveTags = Action("RemoveTags") SetIpAddressType = Action("SetIpAddressType") SetLoadBalancerListenerSSLCertificate = Action("SetLoadBalancerListenerSSLCertificate") -SetLoadBalancerPoliciesForBackendServer = Action( - "SetLoadBalancerPoliciesForBackendServer" -) +SetLoadBalancerPoliciesForBackendServer = Action("SetLoadBalancerPoliciesForBackendServer") SetLoadBalancerPoliciesOfListener = Action("SetLoadBalancerPoliciesOfListener") SetRulePriorities = Action("SetRulePriorities") SetSecurityGroups = Action("SetSecurityGroups") diff --git a/typings/awacs/es.pyi b/typings/awacs/es.pyi index e2c689fda..fd9f819fc 100644 --- a/typings/awacs/es.pyi +++ b/typings/awacs/es.pyi @@ -14,29 +14,19 @@ class Action(BaseAction): class ARN(BaseARN): def __init__(self, resource=..., region=..., account=...) -> None: ... -AcceptInboundCrossClusterSearchConnection = Action( - "AcceptInboundCrossClusterSearchConnection" -) +AcceptInboundCrossClusterSearchConnection = Action("AcceptInboundCrossClusterSearchConnection") AddTags = Action("AddTags") CreateElasticsearchDomain = Action("CreateElasticsearchDomain") CreateElasticsearchServiceRole = Action("CreateElasticsearchServiceRole") -CreateOutboundCrossClusterSearchConnection = Action( - "CreateOutboundCrossClusterSearchConnection" -) +CreateOutboundCrossClusterSearchConnection = Action("CreateOutboundCrossClusterSearchConnection") DeleteElasticsearchDomain = Action("DeleteElasticsearchDomain") DeleteElasticsearchServiceRole = Action("DeleteElasticsearchServiceRole") -DeleteInboundCrossClusterSearchConnection = Action( - "DeleteInboundCrossClusterSearchConnection" -) -DeleteOutboundCrossClusterSearchConnection = Action( - "DeleteOutboundCrossClusterSearchConnection" -) +DeleteInboundCrossClusterSearchConnection = Action("DeleteInboundCrossClusterSearchConnection") +DeleteOutboundCrossClusterSearchConnection = Action("DeleteOutboundCrossClusterSearchConnection") DescribeElasticsearchDomain = Action("DescribeElasticsearchDomain") DescribeElasticsearchDomainConfig = Action("DescribeElasticsearchDomainConfig") DescribeElasticsearchDomains = Action("DescribeElasticsearchDomains") -DescribeElasticsearchInstanceTypeLimits = Action( - "DescribeElasticsearchInstanceTypeLimits" -) +DescribeElasticsearchInstanceTypeLimits = Action("DescribeElasticsearchInstanceTypeLimits") DescribeInboundCrossClusterSearchConnections = Action( "DescribeInboundCrossClusterSearchConnections" ) @@ -46,9 +36,7 @@ DescribeOutboundCrossClusterSearchConnections = Action( DescribeReservedElasticsearchInstanceOfferings = Action( "DescribeReservedElasticsearchInstanceOfferings" ) -DescribeReservedElasticsearchInstances = Action( - "DescribeReservedElasticsearchInstances" -) +DescribeReservedElasticsearchInstances = Action("DescribeReservedElasticsearchInstances") ESCrossClusterGet = Action("ESCrossClusterGet") ESHttpDelete = Action("ESHttpDelete") ESHttpGet = Action("ESHttpGet") @@ -68,9 +56,7 @@ PurchaseReservedElasticsearchInstance = Action("PurchaseReservedElasticsearchIns PurchaseReservedElasticsearchInstanceOffering = Action( "PurchaseReservedElasticsearchInstanceOffering" ) -RejectInboundCrossClusterSearchConnection = Action( - "RejectInboundCrossClusterSearchConnection" -) +RejectInboundCrossClusterSearchConnection = Action("RejectInboundCrossClusterSearchConnection") RemoveTags = Action("RemoveTags") UpdateElasticsearchDomainConfig = Action("UpdateElasticsearchDomainConfig") UpgradeElasticsearchDomain = Action("UpgradeElasticsearchDomain") diff --git a/typings/awacs/health.pyi b/typings/awacs/health.pyi index f326ccfba..53c7b8b33 100644 --- a/typings/awacs/health.pyi +++ b/typings/awacs/health.pyi @@ -14,13 +14,9 @@ class Action(BaseAction): class ARN(BaseARN): def __init__(self, resource=..., region=..., account=...) -> None: ... -DescribeAffectedAccountsForOrganization = Action( - "DescribeAffectedAccountsForOrganization" -) +DescribeAffectedAccountsForOrganization = Action("DescribeAffectedAccountsForOrganization") DescribeAffectedEntities = Action("DescribeAffectedEntities") -DescribeAffectedEntitiesForOrganization = Action( - "DescribeAffectedEntitiesForOrganization" -) +DescribeAffectedEntitiesForOrganization = Action("DescribeAffectedEntitiesForOrganization") DescribeEntityAggregates = Action("DescribeEntityAggregates") DescribeEventAggregates = Action("DescribeEventAggregates") DescribeEventDetails = Action("DescribeEventDetails") @@ -28,12 +24,6 @@ DescribeEventDetailsForOrganization = Action("DescribeEventDetailsForOrganizatio DescribeEventTypes = Action("DescribeEventTypes") DescribeEvents = Action("DescribeEvents") DescribeEventsForOrganization = Action("DescribeEventsForOrganization") -DescribeHealthServiceStatusForOrganization = Action( - "DescribeHealthServiceStatusForOrganization" -) -DisableHealthServiceAccessForOrganization = Action( - "DisableHealthServiceAccessForOrganization" -) -EnableHealthServiceAccessForOrganization = Action( - "EnableHealthServiceAccessForOrganization" -) +DescribeHealthServiceStatusForOrganization = Action("DescribeHealthServiceStatusForOrganization") +DisableHealthServiceAccessForOrganization = Action("DisableHealthServiceAccessForOrganization") +EnableHealthServiceAccessForOrganization = Action("EnableHealthServiceAccessForOrganization") diff --git a/typings/awacs/iam.pyi b/typings/awacs/iam.pyi index 9c088782e..6453d1439 100644 --- a/typings/awacs/iam.pyi +++ b/typings/awacs/iam.pyi @@ -89,9 +89,7 @@ GetSAMLProvider = Action("GetSAMLProvider") GetSSHPublicKey = Action("GetSSHPublicKey") GetServerCertificate = Action("GetServerCertificate") GetServiceLastAccessedDetails = Action("GetServiceLastAccessedDetails") -GetServiceLastAccessedDetailsWithEntities = Action( - "GetServiceLastAccessedDetailsWithEntities" -) +GetServiceLastAccessedDetailsWithEntities = Action("GetServiceLastAccessedDetailsWithEntities") GetServiceLinkedRoleDeletionStatus = Action("GetServiceLinkedRoleDeletionStatus") GetUser = Action("GetUser") GetUserPolicy = Action("GetUserPolicy") @@ -129,9 +127,7 @@ PutRolePermissionsBoundary = Action("PutRolePermissionsBoundary") PutRolePolicy = Action("PutRolePolicy") PutUserPermissionsBoundary = Action("PutUserPermissionsBoundary") PutUserPolicy = Action("PutUserPolicy") -RemoveClientIDFromOpenIDConnectProvider = Action( - "RemoveClientIDFromOpenIDConnectProvider" -) +RemoveClientIDFromOpenIDConnectProvider = Action("RemoveClientIDFromOpenIDConnectProvider") RemoveRoleFromInstanceProfile = Action("RemoveRoleFromInstanceProfile") RemoveUserFromGroup = Action("RemoveUserFromGroup") RequestSmsMfaRegistration = Action("RequestSmsMfaRegistration") diff --git a/typings/awacs/iotsitewise.pyi b/typings/awacs/iotsitewise.pyi index 72bf2063a..d268005f2 100644 --- a/typings/awacs/iotsitewise.pyi +++ b/typings/awacs/iotsitewise.pyi @@ -54,9 +54,7 @@ DescribeAssetTemplates = Action("DescribeAssetTemplates") DescribeAssets = Action("DescribeAssets") DescribeDashboard = Action("DescribeDashboard") DescribeGateway = Action("DescribeGateway") -DescribeGatewayCapabilityConfiguration = Action( - "DescribeGatewayCapabilityConfiguration" -) +DescribeGatewayCapabilityConfiguration = Action("DescribeGatewayCapabilityConfiguration") DescribeGateways = Action("DescribeGateways") DescribeGroups = Action("DescribeGroups") DescribeLoggingOptions = Action("DescribeLoggingOptions") diff --git a/typings/awacs/kinesisanalytics.pyi b/typings/awacs/kinesisanalytics.pyi index c499c154d..5867d5cb7 100644 --- a/typings/awacs/kinesisanalytics.pyi +++ b/typings/awacs/kinesisanalytics.pyi @@ -16,18 +16,14 @@ class ARN(BaseARN): AddApplicationCloudWatchLoggingOption = Action("AddApplicationCloudWatchLoggingOption") AddApplicationInput = Action("AddApplicationInput") -AddApplicationInputProcessingConfiguration = Action( - "AddApplicationInputProcessingConfiguration" -) +AddApplicationInputProcessingConfiguration = Action("AddApplicationInputProcessingConfiguration") AddApplicationOutput = Action("AddApplicationOutput") AddApplicationReferenceDataSource = Action("AddApplicationReferenceDataSource") AddApplicationVpcConfiguration = Action("AddApplicationVpcConfiguration") CreateApplication = Action("CreateApplication") CreateApplicationSnapshot = Action("CreateApplicationSnapshot") DeleteApplication = Action("DeleteApplication") -DeleteApplicationCloudWatchLoggingOption = Action( - "DeleteApplicationCloudWatchLoggingOption" -) +DeleteApplicationCloudWatchLoggingOption = Action("DeleteApplicationCloudWatchLoggingOption") DeleteApplicationInputProcessingConfiguration = Action( "DeleteApplicationInputProcessingConfiguration" ) diff --git a/typings/awacs/license_manager.pyi b/typings/awacs/license_manager.pyi index ae27c01a1..ea6bf0d57 100644 --- a/typings/awacs/license_manager.pyi +++ b/typings/awacs/license_manager.pyi @@ -18,9 +18,7 @@ CreateLicenseConfiguration = Action("CreateLicenseConfiguration") DeleteLicenseConfiguration = Action("DeleteLicenseConfiguration") GetLicenseConfiguration = Action("GetLicenseConfiguration") GetServiceSettings = Action("GetServiceSettings") -ListAssociationsForLicenseConfiguration = Action( - "ListAssociationsForLicenseConfiguration" -) +ListAssociationsForLicenseConfiguration = Action("ListAssociationsForLicenseConfiguration") ListLicenseConfigurations = Action("ListLicenseConfigurations") ListLicenseSpecificationsForResource = Action("ListLicenseSpecificationsForResource") ListResourceInventory = Action("ListResourceInventory") @@ -29,7 +27,5 @@ ListUsageForLicenseConfiguration = Action("ListUsageForLicenseConfiguration") TagResource = Action("TagResource") UntagResource = Action("UntagResource") UpdateLicenseConfiguration = Action("UpdateLicenseConfiguration") -UpdateLicenseSpecificationsForResource = Action( - "UpdateLicenseSpecificationsForResource" -) +UpdateLicenseSpecificationsForResource = Action("UpdateLicenseSpecificationsForResource") UpdateServiceSettings = Action("UpdateServiceSettings") diff --git a/typings/awacs/lightsail.pyi b/typings/awacs/lightsail.pyi index 2d2a8b8f0..d05e95414 100644 --- a/typings/awacs/lightsail.pyi +++ b/typings/awacs/lightsail.pyi @@ -88,9 +88,7 @@ GetRelationalDatabaseBundles = Action("GetRelationalDatabaseBundles") GetRelationalDatabaseEvents = Action("GetRelationalDatabaseEvents") GetRelationalDatabaseLogEvents = Action("GetRelationalDatabaseLogEvents") GetRelationalDatabaseLogStreams = Action("GetRelationalDatabaseLogStreams") -GetRelationalDatabaseMasterUserPassword = Action( - "GetRelationalDatabaseMasterUserPassword" -) +GetRelationalDatabaseMasterUserPassword = Action("GetRelationalDatabaseMasterUserPassword") GetRelationalDatabaseMetricData = Action("GetRelationalDatabaseMetricData") GetRelationalDatabaseParameters = Action("GetRelationalDatabaseParameters") GetRelationalDatabaseSnapshot = Action("GetRelationalDatabaseSnapshot") diff --git a/typings/awacs/rds.pyi b/typings/awacs/rds.pyi index 914e39782..f71af3b67 100644 --- a/typings/awacs/rds.pyi +++ b/typings/awacs/rds.pyi @@ -78,9 +78,7 @@ DescribeDBSecurityGroups = Action("DescribeDBSecurityGroups") DescribeDBSnapshotAttributes = Action("DescribeDBSnapshotAttributes") DescribeDBSnapshots = Action("DescribeDBSnapshots") DescribeDBSubnetGroups = Action("DescribeDBSubnetGroups") -DescribeEngineDefaultClusterParameters = Action( - "DescribeEngineDefaultClusterParameters" -) +DescribeEngineDefaultClusterParameters = Action("DescribeEngineDefaultClusterParameters") DescribeEngineDefaultParameters = Action("DescribeEngineDefaultParameters") DescribeEventCategories = Action("DescribeEventCategories") DescribeEventSubscriptions = Action("DescribeEventSubscriptions") @@ -122,9 +120,7 @@ RegisterDBProxyTargets = Action("RegisterDBProxyTargets") RemoveFromGlobalCluster = Action("RemoveFromGlobalCluster") RemoveRoleFromDBCluster = Action("RemoveRoleFromDBCluster") RemoveRoleFromDBInstance = Action("RemoveRoleFromDBInstance") -RemoveSourceIdentifierFromSubscription = Action( - "RemoveSourceIdentifierFromSubscription" -) +RemoveSourceIdentifierFromSubscription = Action("RemoveSourceIdentifierFromSubscription") RemoveTagsFromResource = Action("RemoveTagsFromResource") ResetDBClusterParameterGroup = Action("ResetDBClusterParameterGroup") ResetDBParameterGroup = Action("ResetDBParameterGroup") diff --git a/typings/awacs/route53.pyi b/typings/awacs/route53.pyi index b1ed79a0d..2df1bddfb 100644 --- a/typings/awacs/route53.pyi +++ b/typings/awacs/route53.pyi @@ -63,9 +63,7 @@ ListTagsForResource = Action("ListTagsForResource") ListTagsForResources = Action("ListTagsForResources") ListTrafficPolicies = Action("ListTrafficPolicies") ListTrafficPolicyInstances = Action("ListTrafficPolicyInstances") -ListTrafficPolicyInstancesByHostedZone = Action( - "ListTrafficPolicyInstancesByHostedZone" -) +ListTrafficPolicyInstancesByHostedZone = Action("ListTrafficPolicyInstancesByHostedZone") ListTrafficPolicyInstancesByPolicy = Action("ListTrafficPolicyInstancesByPolicy") ListTrafficPolicyVersions = Action("ListTrafficPolicyVersions") ListVPCAssociationAuthorizations = Action("ListVPCAssociationAuthorizations") diff --git a/typings/awacs/route53resolver.pyi b/typings/awacs/route53resolver.pyi index d3cefff03..b32add6db 100644 --- a/typings/awacs/route53resolver.pyi +++ b/typings/awacs/route53resolver.pyi @@ -35,9 +35,7 @@ GetResolverRuleAssociation = Action("GetResolverRuleAssociation") GetResolverRulePolicy = Action("GetResolverRulePolicy") ListResolverEndpointIpAddresses = Action("ListResolverEndpointIpAddresses") ListResolverEndpoints = Action("ListResolverEndpoints") -ListResolverQueryLogConfigAssociations = Action( - "ListResolverQueryLogConfigAssociations" -) +ListResolverQueryLogConfigAssociations = Action("ListResolverQueryLogConfigAssociations") ListResolverQueryLogConfigs = Action("ListResolverQueryLogConfigs") ListResolverRuleAssociations = Action("ListResolverRuleAssociations") ListResolverRules = Action("ListResolverRules") diff --git a/typings/awacs/sagemaker.pyi b/typings/awacs/sagemaker.pyi index 77ff503b5..9f8ef51c2 100644 --- a/typings/awacs/sagemaker.pyi +++ b/typings/awacs/sagemaker.pyi @@ -84,9 +84,7 @@ DescribeModel = Action("DescribeModel") DescribeModelPackage = Action("DescribeModelPackage") DescribeMonitoringSchedule = Action("DescribeMonitoringSchedule") DescribeNotebookInstance = Action("DescribeNotebookInstance") -DescribeNotebookInstanceLifecycleConfig = Action( - "DescribeNotebookInstanceLifecycleConfig" -) +DescribeNotebookInstanceLifecycleConfig = Action("DescribeNotebookInstanceLifecycleConfig") DescribeProcessingJob = Action("DescribeProcessingJob") DescribeSubscribedWorkteam = Action("DescribeSubscribedWorkteam") DescribeTrainingJob = Action("DescribeTrainingJob") @@ -125,9 +123,7 @@ ListProcessingJobs = Action("ListProcessingJobs") ListSubscribedWorkteams = Action("ListSubscribedWorkteams") ListTags = Action("ListTags") ListTrainingJobs = Action("ListTrainingJobs") -ListTrainingJobsForHyperParameterTuningJob = Action( - "ListTrainingJobsForHyperParameterTuningJob" -) +ListTrainingJobsForHyperParameterTuningJob = Action("ListTrainingJobsForHyperParameterTuningJob") ListTransformJobs = Action("ListTransformJobs") ListTrialComponents = Action("ListTrialComponents") ListTrials = Action("ListTrials") diff --git a/typings/awacs/servicecatalog.pyi b/typings/awacs/servicecatalog.pyi index f4c2daee0..cacf4c1f6 100644 --- a/typings/awacs/servicecatalog.pyi +++ b/typings/awacs/servicecatalog.pyi @@ -58,9 +58,7 @@ DescribeProvisioningArtifact = Action("DescribeProvisioningArtifact") DescribeProvisioningParameters = Action("DescribeProvisioningParameters") DescribeRecord = Action("DescribeRecord") DescribeServiceAction = Action("DescribeServiceAction") -DescribeServiceActionExecutionParameters = Action( - "DescribeServiceActionExecutionParameters" -) +DescribeServiceActionExecutionParameters = Action("DescribeServiceActionExecutionParameters") DescribeTagOption = Action("DescribeTagOption") DisableAWSOrganizationsAccess = Action("DisableAWSOrganizationsAccess") DisassociateBudgetFromResource = Action("DisassociateBudgetFromResource") @@ -72,9 +70,7 @@ DisassociateServiceActionFromProvisioningArtifact = Action( DisassociateTagOptionFromResource = Action("DisassociateTagOptionFromResource") EnableAWSOrganizationsAccess = Action("EnableAWSOrganizationsAccess") ExecuteProvisionedProductPlan = Action("ExecuteProvisionedProductPlan") -ExecuteProvisionedProductServiceAction = Action( - "ExecuteProvisionedProductServiceAction" -) +ExecuteProvisionedProductServiceAction = Action("ExecuteProvisionedProductServiceAction") GetAWSOrganizationsAccessStatus = Action("GetAWSOrganizationsAccessStatus") ListAcceptedPortfolioShares = Action("ListAcceptedPortfolioShares") ListBudgetsForResource = Action("ListBudgetsForResource") @@ -87,18 +83,12 @@ ListPortfoliosForProduct = Action("ListPortfoliosForProduct") ListPrincipalsForPortfolio = Action("ListPrincipalsForPortfolio") ListProvisionedProductPlans = Action("ListProvisionedProductPlans") ListProvisioningArtifacts = Action("ListProvisioningArtifacts") -ListProvisioningArtifactsForServiceAction = Action( - "ListProvisioningArtifactsForServiceAction" -) +ListProvisioningArtifactsForServiceAction = Action("ListProvisioningArtifactsForServiceAction") ListRecordHistory = Action("ListRecordHistory") ListResourcesForTagOption = Action("ListResourcesForTagOption") ListServiceActions = Action("ListServiceActions") -ListServiceActionsForProvisioningArtifact = Action( - "ListServiceActionsForProvisioningArtifact" -) -ListStackInstancesForProvisionedProduct = Action( - "ListStackInstancesForProvisionedProduct" -) +ListServiceActionsForProvisioningArtifact = Action("ListServiceActionsForProvisioningArtifact") +ListStackInstancesForProvisionedProduct = Action("ListStackInstancesForProvisionedProduct") ListTagOptions = Action("ListTagOptions") ProvisionProduct = Action("ProvisionProduct") RejectPortfolioShare = Action("RejectPortfolioShare") diff --git a/typings/awacs/servicequotas.pyi b/typings/awacs/servicequotas.pyi index b4919cc81..bbe455f59 100644 --- a/typings/awacs/servicequotas.pyi +++ b/typings/awacs/servicequotas.pyi @@ -23,22 +23,14 @@ GetAWSDefaultServiceQuota = Action("GetAWSDefaultServiceQuota") GetAssociationForServiceQuotaTemplate = Action("GetAssociationForServiceQuotaTemplate") GetRequestedServiceQuotaChange = Action("GetRequestedServiceQuotaChange") GetServiceQuota = Action("GetServiceQuota") -GetServiceQuotaIncreaseRequestFromTemplate = Action( - "GetServiceQuotaIncreaseRequestFromTemplate" -) +GetServiceQuotaIncreaseRequestFromTemplate = Action("GetServiceQuotaIncreaseRequestFromTemplate") ListAWSDefaultServiceQuotas = Action("ListAWSDefaultServiceQuotas") -ListRequestedServiceQuotaChangeHistory = Action( - "ListRequestedServiceQuotaChangeHistory" -) +ListRequestedServiceQuotaChangeHistory = Action("ListRequestedServiceQuotaChangeHistory") ListRequestedServiceQuotaChangeHistoryByQuota = Action( "ListRequestedServiceQuotaChangeHistoryByQuota" ) -ListServiceQuotaIncreaseRequestsInTemplate = Action( - "ListServiceQuotaIncreaseRequestsInTemplate" -) +ListServiceQuotaIncreaseRequestsInTemplate = Action("ListServiceQuotaIncreaseRequestsInTemplate") ListServiceQuotas = Action("ListServiceQuotas") ListServices = Action("ListServices") -PutServiceQuotaIncreaseRequestIntoTemplate = Action( - "PutServiceQuotaIncreaseRequestIntoTemplate" -) +PutServiceQuotaIncreaseRequestIntoTemplate = Action("PutServiceQuotaIncreaseRequestIntoTemplate") RequestServiceQuotaIncrease = Action("RequestServiceQuotaIncrease") diff --git a/typings/awacs/ses.pyi b/typings/awacs/ses.pyi index 3c785dfc0..c450bec15 100644 --- a/typings/awacs/ses.pyi +++ b/typings/awacs/ses.pyi @@ -16,9 +16,7 @@ class ARN(BaseARN): CloneReceiptRuleSet = Action("CloneReceiptRuleSet") CreateConfigurationSet = Action("CreateConfigurationSet") -CreateConfigurationSetEventDestination = Action( - "CreateConfigurationSetEventDestination" -) +CreateConfigurationSetEventDestination = Action("CreateConfigurationSetEventDestination") CreateConfigurationSetTrackingOptions = Action("CreateConfigurationSetTrackingOptions") CreateCustomVerificationEmailTemplate = Action("CreateCustomVerificationEmailTemplate") CreateDedicatedIpPool = Action("CreateDedicatedIpPool") @@ -29,9 +27,7 @@ CreateReceiptRule = Action("CreateReceiptRule") CreateReceiptRuleSet = Action("CreateReceiptRuleSet") CreateTemplate = Action("CreateTemplate") DeleteConfigurationSet = Action("DeleteConfigurationSet") -DeleteConfigurationSetEventDestination = Action( - "DeleteConfigurationSetEventDestination" -) +DeleteConfigurationSetEventDestination = Action("DeleteConfigurationSetEventDestination") DeleteConfigurationSetTrackingOptions = Action("DeleteConfigurationSetTrackingOptions") DeleteCustomVerificationEmailTemplate = Action("DeleteCustomVerificationEmailTemplate") DeleteDedicatedIpPool = Action("DeleteDedicatedIpPool") @@ -102,9 +98,7 @@ SendTemplatedEmail = Action("SendTemplatedEmail") SetActiveReceiptRuleSet = Action("SetActiveReceiptRuleSet") SetIdentityDkimEnabled = Action("SetIdentityDkimEnabled") SetIdentityFeedbackForwardingEnabled = Action("SetIdentityFeedbackForwardingEnabled") -SetIdentityHeadersInNotificationsEnabled = Action( - "SetIdentityHeadersInNotificationsEnabled" -) +SetIdentityHeadersInNotificationsEnabled = Action("SetIdentityHeadersInNotificationsEnabled") SetIdentityMailFromDomain = Action("SetIdentityMailFromDomain") SetIdentityNotificationTopic = Action("SetIdentityNotificationTopic") SetReceiptRulePosition = Action("SetReceiptRulePosition") @@ -112,9 +106,7 @@ TagResource = Action("TagResource") TestRenderTemplate = Action("TestRenderTemplate") UntagResource = Action("UntagResource") UpdateAccountSendingEnabled = Action("UpdateAccountSendingEnabled") -UpdateConfigurationSetEventDestination = Action( - "UpdateConfigurationSetEventDestination" -) +UpdateConfigurationSetEventDestination = Action("UpdateConfigurationSetEventDestination") UpdateConfigurationSetReputationMetricsEnabled = Action( "UpdateConfigurationSetReputationMetricsEnabled" ) diff --git a/typings/awacs/sms_voice.pyi b/typings/awacs/sms_voice.pyi index ad1b21f8e..aefe486a6 100644 --- a/typings/awacs/sms_voice.pyi +++ b/typings/awacs/sms_voice.pyi @@ -15,16 +15,10 @@ class ARN(BaseARN): def __init__(self, resource=..., region=..., account=...) -> None: ... CreateConfigurationSet = Action("CreateConfigurationSet") -CreateConfigurationSetEventDestination = Action( - "CreateConfigurationSetEventDestination" -) +CreateConfigurationSetEventDestination = Action("CreateConfigurationSetEventDestination") DeleteConfigurationSet = Action("DeleteConfigurationSet") -DeleteConfigurationSetEventDestination = Action( - "DeleteConfigurationSetEventDestination" -) +DeleteConfigurationSetEventDestination = Action("DeleteConfigurationSetEventDestination") GetConfigurationSetEventDestinations = Action("GetConfigurationSetEventDestinations") ListConfigurationSets = Action("ListConfigurationSets") SendVoiceMessage = Action("SendVoiceMessage") -UpdateConfigurationSetEventDestination = Action( - "UpdateConfigurationSetEventDestination" -) +UpdateConfigurationSetEventDestination = Action("UpdateConfigurationSetEventDestination") diff --git a/typings/awacs/ssm.pyi b/typings/awacs/ssm.pyi index 2d379ac19..f28299ff6 100644 --- a/typings/awacs/ssm.pyi +++ b/typings/awacs/ssm.pyi @@ -49,24 +49,18 @@ DescribeDocument = Action("DescribeDocument") DescribeDocumentParameters = Action("DescribeDocumentParameters") DescribeDocumentPermission = Action("DescribeDocumentPermission") DescribeEffectiveInstanceAssociations = Action("DescribeEffectiveInstanceAssociations") -DescribeEffectivePatchesForPatchBaseline = Action( - "DescribeEffectivePatchesForPatchBaseline" -) +DescribeEffectivePatchesForPatchBaseline = Action("DescribeEffectivePatchesForPatchBaseline") DescribeInstanceAssociationsStatus = Action("DescribeInstanceAssociationsStatus") DescribeInstanceInformation = Action("DescribeInstanceInformation") DescribeInstancePatchStates = Action("DescribeInstancePatchStates") -DescribeInstancePatchStatesForPatchGroup = Action( - "DescribeInstancePatchStatesForPatchGroup" -) +DescribeInstancePatchStatesForPatchGroup = Action("DescribeInstancePatchStatesForPatchGroup") DescribeInstancePatches = Action("DescribeInstancePatches") DescribeInstanceProperties = Action("DescribeInstanceProperties") DescribeInventoryDeletions = Action("DescribeInventoryDeletions") DescribeMaintenanceWindowExecutionTaskInvocations = Action( "DescribeMaintenanceWindowExecutionTaskInvocations" ) -DescribeMaintenanceWindowExecutionTasks = Action( - "DescribeMaintenanceWindowExecutionTasks" -) +DescribeMaintenanceWindowExecutionTasks = Action("DescribeMaintenanceWindowExecutionTasks") DescribeMaintenanceWindowExecutions = Action("DescribeMaintenanceWindowExecutions") DescribeMaintenanceWindowSchedule = Action("DescribeMaintenanceWindowSchedule") DescribeMaintenanceWindowTargets = Action("DescribeMaintenanceWindowTargets") @@ -92,9 +86,7 @@ GetInventorySchema = Action("GetInventorySchema") GetMaintenanceWindow = Action("GetMaintenanceWindow") GetMaintenanceWindowExecution = Action("GetMaintenanceWindowExecution") GetMaintenanceWindowExecutionTask = Action("GetMaintenanceWindowExecutionTask") -GetMaintenanceWindowExecutionTaskInvocation = Action( - "GetMaintenanceWindowExecutionTaskInvocation" -) +GetMaintenanceWindowExecutionTaskInvocation = Action("GetMaintenanceWindowExecutionTaskInvocation") GetMaintenanceWindowTask = Action("GetMaintenanceWindowTask") GetManifest = Action("GetManifest") GetOpsItem = Action("GetOpsItem") diff --git a/typings/awacs/sso.pyi b/typings/awacs/sso.pyi index 72bf2f685..21cd346ea 100644 --- a/typings/awacs/sso.pyi +++ b/typings/awacs/sso.pyi @@ -38,17 +38,11 @@ DeletePermissionSet = Action("DeletePermissionSet") DeletePermissionsPolicy = Action("DeletePermissionsPolicy") DeleteProfile = Action("DeleteProfile") DeleteUser = Action("DeleteUser") -DescribeAccountAssignmentCreationStatus = Action( - "DescribeAccountAssignmentCreationStatus" -) -DescribeAccountAssignmentDeletionStatus = Action( - "DescribeAccountAssignmentDeletionStatus" -) +DescribeAccountAssignmentCreationStatus = Action("DescribeAccountAssignmentCreationStatus") +DescribeAccountAssignmentDeletionStatus = Action("DescribeAccountAssignmentDeletionStatus") DescribeGroups = Action("DescribeGroups") DescribePermissionSet = Action("DescribePermissionSet") -DescribePermissionSetProvisioningStatus = Action( - "DescribePermissionSetProvisioningStatus" -) +DescribePermissionSetProvisioningStatus = Action("DescribePermissionSetProvisioningStatus") DescribePermissionsPolicies = Action("DescribePermissionsPolicies") DescribeRegisteredRegions = Action("DescribeRegisteredRegions") DescribeUsers = Action("DescribeUsers") @@ -77,9 +71,7 @@ ImportApplicationInstanceServiceProviderMetadata = Action( ListAccountAssignmentCreationStatus = Action("ListAccountAssignmentCreationStatus") ListAccountAssignmentDeletionStatus = Action("ListAccountAssignmentDeletionStatus") ListAccountAssignments = Action("ListAccountAssignments") -ListAccountsForProvisionedPermissionSet = Action( - "ListAccountsForProvisionedPermissionSet" -) +ListAccountsForProvisionedPermissionSet = Action("ListAccountsForProvisionedPermissionSet") ListApplicationInstanceCertificates = Action("ListApplicationInstanceCertificates") ListApplicationInstances = Action("ListApplicationInstances") ListApplicationTemplates = Action("ListApplicationTemplates") @@ -91,9 +83,7 @@ ListManagedPoliciesInPermissionSet = Action("ListManagedPoliciesInPermissionSet" ListMembersInGroup = Action("ListMembersInGroup") ListPermissionSetProvisioningStatus = Action("ListPermissionSetProvisioningStatus") ListPermissionSets = Action("ListPermissionSets") -ListPermissionSetsProvisionedToAccount = Action( - "ListPermissionSetsProvisionedToAccount" -) +ListPermissionSetsProvisionedToAccount = Action("ListPermissionSetsProvisionedToAccount") ListProfileAssociations = Action("ListProfileAssociations") ListProfiles = Action("ListProfiles") ListTagsForResource = Action("ListTagsForResource") @@ -108,9 +98,7 @@ SetTemporaryPassword = Action("SetTemporaryPassword") StartSSO = Action("StartSSO") TagResource = Action("TagResource") UntagResource = Action("UntagResource") -UpdateApplicationInstanceActiveCertificate = Action( - "UpdateApplicationInstanceActiveCertificate" -) +UpdateApplicationInstanceActiveCertificate = Action("UpdateApplicationInstanceActiveCertificate") UpdateApplicationInstanceDisplayData = Action("UpdateApplicationInstanceDisplayData") UpdateApplicationInstanceResponseConfiguration = Action( "UpdateApplicationInstanceResponseConfiguration" @@ -127,9 +115,7 @@ UpdateApplicationInstanceServiceProviderConfiguration = Action( UpdateApplicationInstanceStatus = Action("UpdateApplicationInstanceStatus") UpdateDirectoryAssociation = Action("UpdateDirectoryAssociation") UpdateGroup = Action("UpdateGroup") -UpdateManagedApplicationInstanceStatus = Action( - "UpdateManagedApplicationInstanceStatus" -) +UpdateManagedApplicationInstanceStatus = Action("UpdateManagedApplicationInstanceStatus") UpdatePermissionSet = Action("UpdatePermissionSet") UpdateProfile = Action("UpdateProfile") UpdateSSOConfiguration = Action("UpdateSSOConfiguration") diff --git a/typings/awacs/sso_directory.pyi b/typings/awacs/sso_directory.pyi index df113247b..830a4a338 100644 --- a/typings/awacs/sso_directory.pyi +++ b/typings/awacs/sso_directory.pyi @@ -18,16 +18,12 @@ AddMemberToGroup = Action("AddMemberToGroup") CompleteVirtualMfaDeviceRegistration = Action("CompleteVirtualMfaDeviceRegistration") CreateAlias = Action("CreateAlias") CreateBearerToken = Action("CreateBearerToken") -CreateExternalIdPConfigurationForDirectory = Action( - "CreateExternalIdPConfigurationForDirectory" -) +CreateExternalIdPConfigurationForDirectory = Action("CreateExternalIdPConfigurationForDirectory") CreateGroup = Action("CreateGroup") CreateProvisioningTenant = Action("CreateProvisioningTenant") CreateUser = Action("CreateUser") DeleteBearerToken = Action("DeleteBearerToken") -DeleteExternalIdPConfigurationForDirectory = Action( - "DeleteExternalIdPConfigurationForDirectory" -) +DeleteExternalIdPConfigurationForDirectory = Action("DeleteExternalIdPConfigurationForDirectory") DeleteGroup = Action("DeleteGroup") DeleteMfaDeviceForUser = Action("DeleteMfaDeviceForUser") DeleteProvisioningTenant = Action("DeleteProvisioningTenant") @@ -35,19 +31,13 @@ DeleteUser = Action("DeleteUser") DescribeDirectory = Action("DescribeDirectory") DescribeGroups = Action("DescribeGroups") DescribeUsers = Action("DescribeUsers") -DisableExternalIdPConfigurationForDirectory = Action( - "DisableExternalIdPConfigurationForDirectory" -) +DisableExternalIdPConfigurationForDirectory = Action("DisableExternalIdPConfigurationForDirectory") DisableUser = Action("DisableUser") -EnableExternalIdPConfigurationForDirectory = Action( - "EnableExternalIdPConfigurationForDirectory" -) +EnableExternalIdPConfigurationForDirectory = Action("EnableExternalIdPConfigurationForDirectory") EnableUser = Action("EnableUser") GetAWSSPConfigurationForDirectory = Action("GetAWSSPConfigurationForDirectory") ListBearerTokens = Action("ListBearerTokens") -ListExternalIdPConfigurationsForDirectory = Action( - "ListExternalIdPConfigurationsForDirectory" -) +ListExternalIdPConfigurationsForDirectory = Action("ListExternalIdPConfigurationsForDirectory") ListGroupsForUser = Action("ListGroupsForUser") ListMembersInGroup = Action("ListMembersInGroup") ListMfaDevicesForUser = Action("ListMfaDevicesForUser") @@ -56,9 +46,7 @@ RemoveMemberFromGroup = Action("RemoveMemberFromGroup") SearchGroups = Action("SearchGroups") SearchUsers = Action("SearchUsers") StartVirtualMfaDeviceRegistration = Action("StartVirtualMfaDeviceRegistration") -UpdateExternalIdPConfigurationForDirectory = Action( - "UpdateExternalIdPConfigurationForDirectory" -) +UpdateExternalIdPConfigurationForDirectory = Action("UpdateExternalIdPConfigurationForDirectory") UpdateGroup = Action("UpdateGroup") UpdatePassword = Action("UpdatePassword") UpdateUser = Action("UpdateUser") diff --git a/typings/awacs/support.pyi b/typings/awacs/support.pyi index ae31fb669..d1dba3123 100644 --- a/typings/awacs/support.pyi +++ b/typings/awacs/support.pyi @@ -25,9 +25,7 @@ DescribeIssueTypes = Action("DescribeIssueTypes") DescribeServices = Action("DescribeServices") DescribeSeverityLevels = Action("DescribeSeverityLevels") DescribeSupportLevel = Action("DescribeSupportLevel") -DescribeTrustedAdvisorCheckRefreshStatuses = Action( - "DescribeTrustedAdvisorCheckRefreshStatuses" -) +DescribeTrustedAdvisorCheckRefreshStatuses = Action("DescribeTrustedAdvisorCheckRefreshStatuses") DescribeTrustedAdvisorCheckResult = Action("DescribeTrustedAdvisorCheckResult") DescribeTrustedAdvisorCheckSummaries = Action("DescribeTrustedAdvisorCheckSummaries") DescribeTrustedAdvisorChecks = Action("DescribeTrustedAdvisorChecks") diff --git a/typings/awacs/swf.pyi b/typings/awacs/swf.pyi index 73ed5b8f6..e5641b1b6 100644 --- a/typings/awacs/swf.pyi +++ b/typings/awacs/swf.pyi @@ -45,9 +45,7 @@ RegisterActivityType = Action("RegisterActivityType") RegisterDomain = Action("RegisterDomain") RegisterWorkflowType = Action("RegisterWorkflowType") RequestCancelActivityTask = Action("RequestCancelActivityTask") -RequestCancelExternalWorkflowExecution = Action( - "RequestCancelExternalWorkflowExecution" -) +RequestCancelExternalWorkflowExecution = Action("RequestCancelExternalWorkflowExecution") RequestCancelWorkflowExecution = Action("RequestCancelWorkflowExecution") RespondActivityTaskCanceled = Action("RespondActivityTaskCanceled") RespondActivityTaskCompleted = Action("RespondActivityTaskCompleted") diff --git a/typings/awacs/transcribe.pyi b/typings/awacs/transcribe.pyi index 317d910f7..a420f51af 100644 --- a/typings/awacs/transcribe.pyi +++ b/typings/awacs/transcribe.pyi @@ -37,9 +37,7 @@ ListTranscriptionJobs = Action("ListTranscriptionJobs") ListVocabularies = Action("ListVocabularies") ListVocabularyFilters = Action("ListVocabularyFilters") StartMedicalStreamTranscription = Action("StartMedicalStreamTranscription") -StartMedicalStreamTranscriptionWebSocket = Action( - "StartMedicalStreamTranscriptionWebSocket" -) +StartMedicalStreamTranscriptionWebSocket = Action("StartMedicalStreamTranscriptionWebSocket") StartMedicalTranscriptionJob = Action("StartMedicalTranscriptionJob") StartStreamTranscription = Action("StartStreamTranscription") StartStreamTranscriptionWebSocket = Action("StartStreamTranscriptionWebSocket") diff --git a/typings/awacs/worklink.pyi b/typings/awacs/worklink.pyi index 9932c4a73..8cf4f1b03 100644 --- a/typings/awacs/worklink.pyi +++ b/typings/awacs/worklink.pyi @@ -28,12 +28,8 @@ DescribeFleetMetadata = Action("DescribeFleetMetadata") DescribeIdentityProviderConfiguration = Action("DescribeIdentityProviderConfiguration") DescribeWebsiteCertificateAuthority = Action("DescribeWebsiteCertificateAuthority") DisassociateDomain = Action("DisassociateDomain") -DisassociateWebsiteAuthorizationProvider = Action( - "DisassociateWebsiteAuthorizationProvider" -) -DisassociateWebsiteCertificateAuthority = Action( - "DisassociateWebsiteCertificateAuthority" -) +DisassociateWebsiteAuthorizationProvider = Action("DisassociateWebsiteAuthorizationProvider") +DisassociateWebsiteCertificateAuthority = Action("DisassociateWebsiteCertificateAuthority") ListDevices = Action("ListDevices") ListDomains = Action("ListDomains") ListFleets = Action("ListFleets") diff --git a/typings/botocore/client.pyi b/typings/botocore/client.pyi index 7503a7226..69fa41c52 100644 --- a/typings/botocore/client.pyi +++ b/typings/botocore/client.pyi @@ -60,9 +60,7 @@ class ClientEndpointBridge(object): default_endpoint=..., service_signing_name=..., ) -> None: ... - def resolve( - self, service_name, region_name=..., endpoint_url=..., is_secure=... - ): ... + def resolve(self, service_name, region_name=..., endpoint_url=..., is_secure=...): ... class BaseClient(object): _PY_TO_OP_NAME = ... diff --git a/typings/botocore/credentials.pyi b/typings/botocore/credentials.pyi index 9b86c5c68..803f1238c 100644 --- a/typings/botocore/credentials.pyi +++ b/typings/botocore/credentials.pyi @@ -3,15 +3,14 @@ This type stub file was generated by pyright. """ import logging -from collections import namedtuple -from typing import NamedTuple, Optional +from typing import NamedTuple logger = logging.getLogger(__name__) class ReadOnlyCredentials(NamedTuple): access_key: str secret_key: str - token: Optional[str] + token: str | None def create_credential_resolver(session, cache=..., region_name=...): """Create a default credential resolver. @@ -34,9 +33,7 @@ class ProfileProviderBuilder(object): the source profile chain created by the assume role provider. """ - def __init__( - self, session, cache=..., region_name=..., sso_token_cache=... - ) -> None: ... + def __init__(self, session, cache=..., region_name=..., sso_token_cache=...) -> None: ... def providers(self, profile_name, disable_env_vars=...): ... def get_credentials(session): ... diff --git a/typings/botocore/discovery.pyi b/typings/botocore/discovery.pyi index 11b3ba516..ff08c689a 100644 --- a/typings/botocore/discovery.pyi +++ b/typings/botocore/discovery.pyi @@ -34,9 +34,7 @@ class EndpointDiscoveryModel(object): def gather_identifiers(self, operation, params): ... class EndpointDiscoveryManager(object): - def __init__( - self, client, cache=..., current_time=..., always_discover=... - ) -> None: ... + def __init__(self, client, cache=..., current_time=..., always_discover=...) -> None: ... def gather_identifiers(self, operation, params): ... def delete_endpoints(self, **kwargs): ... def describe_endpoint(self, **kwargs): ... diff --git a/typings/botocore/docs/method.pyi b/typings/botocore/docs/method.pyi index 2cc52c5e3..9553a0cce 100644 --- a/typings/botocore/docs/method.pyi +++ b/typings/botocore/docs/method.pyi @@ -15,9 +15,7 @@ def get_instance_public_methods(instance): """ ... -def document_model_driven_signature( - section, name, operation_model, include=..., exclude=... -): +def document_model_driven_signature(section, name, operation_model, include=..., exclude=...): """Documents the signature of a model-driven method :param section: The section to write the documentation to. diff --git a/typings/botocore/docs/shape.pyi b/typings/botocore/docs/shape.pyi index 53f95ecdd..963401f1c 100644 --- a/typings/botocore/docs/shape.pyi +++ b/typings/botocore/docs/shape.pyi @@ -4,9 +4,7 @@ This type stub file was generated by pyright. class ShapeDocumenter(object): EVENT_NAME = ... - def __init__( - self, service_name, operation_name, event_emitter, context=... - ) -> None: ... + def __init__(self, service_name, operation_name, event_emitter, context=...) -> None: ... def traverse_and_document_shape( self, section, diff --git a/typings/botocore/handlers.pyi b/typings/botocore/handlers.pyi index 6508e6258..990cb2abe 100644 --- a/typings/botocore/handlers.pyi +++ b/typings/botocore/handlers.pyi @@ -21,9 +21,7 @@ logger = logging.getLogger(__name__) REGISTER_FIRST = object() REGISTER_LAST = object() VALID_BUCKET = re.compile(r"^[a-zA-Z0-9.\-_]{1,255}$") -_ACCESSPOINT_ARN = ( - r"^arn:(aws).*:s3:[a-z\-0-9]+:[0-9]{12}:accesspoint[/:]" r"[a-zA-Z0-9\-]{1,63}$" -) +_ACCESSPOINT_ARN = r"^arn:(aws).*:s3:[a-z\-0-9]+:[0-9]{12}:accesspoint[/:]" r"[a-zA-Z0-9\-]{1,63}$" _OUTPOST_ARN = ( r"^arn:(aws).*:s3-outposts:[a-z\-0-9]+:[0-9]{12}:outpost[/:]" r"[a-zA-Z0-9\-]{1,63}[/:]accesspoint[/:][a-zA-Z0-9\-]{1,63}$" @@ -266,8 +264,7 @@ BUILTIN_HANDLERS = [ "docs.*.glacier.*.complete-section", AutoPopulatedParam( "accountId", - 'Note: this parameter is set to "-" by' - "default if no value is not specified.", + 'Note: this parameter is set to "-" by' "default if no value is not specified.", ).document_auto_populated_param, ), ( diff --git a/typings/botocore/hooks.pyi b/typings/botocore/hooks.pyi index 1e78a99da..b1ac1e00f 100644 --- a/typings/botocore/hooks.pyi +++ b/typings/botocore/hooks.pyi @@ -72,9 +72,7 @@ class BaseEventHooks(object): """ ... - def register_first( - self, event_name, handler, unique_id=..., unique_id_uses_count=... - ): + def register_first(self, event_name, handler, unique_id=..., unique_id_uses_count=...): """Register an event handler to be called first for an event. All event handlers registered with ``register_first()`` will @@ -84,9 +82,7 @@ class BaseEventHooks(object): """ ... - def register_last( - self, event_name, handler, unique_id=..., unique_id_uses_count=... - ): + def register_last(self, event_name, handler, unique_id=..., unique_id_uses_count=...): """Register an event handler to be called last for an event. All event handlers registered with ``register_last()`` will be called @@ -95,9 +91,7 @@ class BaseEventHooks(object): """ ... - def unregister( - self, event_name, handler=..., unique_id=..., unique_id_uses_count=... - ): + def unregister(self, event_name, handler=..., unique_id=..., unique_id_uses_count=...): """Unregister an event handler for a given event. If no ``unique_id`` was given during registration, then the @@ -137,27 +131,17 @@ class HierarchicalEmitter(BaseEventHooks): """ ... - def unregister( - self, event_name, handler=..., unique_id=..., unique_id_uses_count=... - ): ... + def unregister(self, event_name, handler=..., unique_id=..., unique_id_uses_count=...): ... def __copy__(self): ... class EventAliaser(BaseEventHooks): def __init__(self, event_emitter, event_aliases=...) -> None: ... def emit(self, event_name, **kwargs): ... def emit_until_response(self, event_name, **kwargs): ... - def register( - self, event_name, handler, unique_id=..., unique_id_uses_count=... - ): ... - def register_first( - self, event_name, handler, unique_id=..., unique_id_uses_count=... - ): ... - def register_last( - self, event_name, handler, unique_id=..., unique_id_uses_count=... - ): ... - def unregister( - self, event_name, handler=..., unique_id=..., unique_id_uses_count=... - ): ... + def register(self, event_name, handler, unique_id=..., unique_id_uses_count=...): ... + def register_first(self, event_name, handler, unique_id=..., unique_id_uses_count=...): ... + def register_last(self, event_name, handler, unique_id=..., unique_id_uses_count=...): ... + def unregister(self, event_name, handler=..., unique_id=..., unique_id_uses_count=...): ... def __copy__(self): ... class _PrefixTrie(object): diff --git a/typings/botocore/regions.pyi b/typings/botocore/regions.pyi index 1f95f4311..b59e41395 100644 --- a/typings/botocore/regions.pyi +++ b/typings/botocore/regions.pyi @@ -53,9 +53,7 @@ class BaseEndpointResolver(object): """ ... - def get_available_endpoints( - self, service_name, partition_name=..., allow_non_regional=... - ): + def get_available_endpoints(self, service_name, partition_name=..., allow_non_regional=...): """Lists the endpoint names of a particular partition. :type service_name: string @@ -84,7 +82,5 @@ class EndpointResolver(BaseEndpointResolver): ... def get_available_partitions(self): ... - def get_available_endpoints( - self, service_name, partition_name=..., allow_non_regional=... - ): ... + def get_available_endpoints(self, service_name, partition_name=..., allow_non_regional=...): ... def construct_endpoint(self, service_name, region_name=..., partition_name=...): ... diff --git a/typings/botocore/response.pyi b/typings/botocore/response.pyi index cb8d3d14b..293090853 100644 --- a/typings/botocore/response.pyi +++ b/typings/botocore/response.pyi @@ -1,7 +1,7 @@ """This type stub file was generated by pyright.""" import logging -from typing import BinaryIO, Generator, Optional +from typing import BinaryIO, Generator logger = logging.getLogger(__name__) @@ -27,7 +27,7 @@ class StreamingBody(object): """Set the timeout seconds on the socket.""" ... - def read(self, amt: Optional[int] = ...) -> bytes: + def read(self, amt: int | None = ...) -> bytes: """Read at most amt bytes from the stream. If the amt argument is omitted, read all data. diff --git a/typings/botocore/retries/throttling.pyi b/typings/botocore/retries/throttling.pyi index d167740af..cf176be64 100644 --- a/typings/botocore/retries/throttling.pyi +++ b/typings/botocore/retries/throttling.pyi @@ -9,9 +9,7 @@ CubicParams = namedtuple("CubicParams", ["w_max", "k", "last_fail"]) class CubicCalculator(object): _SCALE_CONSTANT = ... _BETA = ... - def __init__( - self, starting_max_rate, start_time, scale_constant=..., beta=... - ) -> None: ... + def __init__(self, starting_max_rate, start_time, scale_constant=..., beta=...) -> None: ... def success_received(self, timestamp): ... def error_received(self, current_rate, timestamp): ... def get_params_snapshot(self): diff --git a/typings/botocore/session.pyi b/typings/botocore/session.pyi index aed30a706..7fa3fac45 100644 --- a/typings/botocore/session.pyi +++ b/typings/botocore/session.pyi @@ -320,9 +320,7 @@ class Session(object): """ ... - def unregister( - self, event_name, handler=..., unique_id=..., unique_id_uses_count=... - ): + def unregister(self, event_name, handler=..., unique_id=..., unique_id_uses_count=...): """Unregister a handler with an event. :type event_name: str @@ -450,9 +448,7 @@ class Session(object): """ ... - def get_available_regions( - self, service_name, partition_name=..., allow_non_regional=... - ): + def get_available_regions(self, service_name, partition_name=..., allow_non_regional=...): """Lists the region and endpoint names of a particular partition. :type service_name: string diff --git a/typings/botocore/signers.pyi b/typings/botocore/signers.pyi index 5e3b30912..65f12dbff 100644 --- a/typings/botocore/signers.pyi +++ b/typings/botocore/signers.pyi @@ -90,9 +90,7 @@ class RequestSigner(object): """ ... - def get_auth_instance( - self, signing_name, region_name, signature_version=..., **kwargs - ): + def get_auth_instance(self, signing_name, region_name, signature_version=..., **kwargs): """ Get an auth instance which can be used to sign a request using the given signature version. @@ -199,9 +197,7 @@ class CloudFrontSigner(object): """ ... - def build_policy( - self, resource, date_less_than, date_greater_than=..., ip_address=... - ): + def build_policy(self, resource, date_less_than, date_greater_than=..., ip_address=...): """A helper to build policy. :type resource: str @@ -289,9 +285,7 @@ class S3PostPresigner(object): ... def add_generate_presigned_url(class_attributes, **kwargs): ... -def generate_presigned_url( - self, ClientMethod, Params=..., ExpiresIn=..., HttpMethod=... -): +def generate_presigned_url(self, ClientMethod, Params=..., ExpiresIn=..., HttpMethod=...): """Generate a presigned url given a client, its method, and arguments :type ClientMethod: string @@ -314,9 +308,7 @@ def generate_presigned_url( ... def add_generate_presigned_post(class_attributes, **kwargs): ... -def generate_presigned_post( - self, Bucket, Key, Fields=..., Conditions=..., ExpiresIn=... -): +def generate_presigned_post(self, Bucket, Key, Fields=..., Conditions=..., ExpiresIn=...): """Builds the url and the form fields used for a presigned s3 post :type Bucket: string diff --git a/typings/botocore/translate.pyi b/typings/botocore/translate.pyi index eab388da5..e29d44509 100644 --- a/typings/botocore/translate.pyi +++ b/typings/botocore/translate.pyi @@ -2,9 +2,7 @@ This type stub file was generated by pyright. """ -def build_retry_config( - endpoint_prefix, retry_model, definitions, client_retry_config=... -): ... +def build_retry_config(endpoint_prefix, retry_model, definitions, client_retry_config=...): ... def resolve_references(config, definitions): """Recursively replace $ref keys. diff --git a/typings/botocore/utils.pyi b/typings/botocore/utils.pyi index ca334b7a5..f41771a99 100644 --- a/typings/botocore/utils.pyi +++ b/typings/botocore/utils.pyi @@ -405,9 +405,7 @@ def check_dns_name(bucket_name): """ ... -def fix_s3_host( - request, signature_version, region_name, default_endpoint_url=..., **kwargs -): +def fix_s3_host(request, signature_version, region_name, default_endpoint_url=..., **kwargs): """ This handler looks at S3 requests just before they are signed. If there is a bucket name on the path (true for everything except @@ -419,9 +417,7 @@ def fix_s3_host( """ ... -def switch_to_virtual_host_style( - request, signature_version, default_endpoint_url=..., **kwargs -): +def switch_to_virtual_host_style(request, signature_version, default_endpoint_url=..., **kwargs): """ This is a handler to force virtual host style s3 addressing no matter the signature version (which is taken in consideration for the default diff --git a/typings/botocore/vendored/six.pyi b/typings/botocore/vendored/six.pyi index d6edc13ed..448890af8 100644 --- a/typings/botocore/vendored/six.pyi +++ b/typings/botocore/vendored/six.pyi @@ -85,18 +85,14 @@ class _MovedItems(_LazyModule): _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute( - "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse" - ), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute( - "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload" - ), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), @@ -105,9 +101,7 @@ _moved_attributes = [ MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute( - "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest" - ), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), @@ -119,9 +113,7 @@ _moved_attributes = [ MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule( - "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart" - ), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), diff --git a/typings/docker/__init__.pyi b/typings/docker/__init__.pyi index 3cbfdd399..6e00a7009 100644 --- a/typings/docker/__init__.pyi +++ b/typings/docker/__init__.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.api import APIClient diff --git a/typings/docker/api/__init__.pyi b/typings/docker/api/__init__.pyi index d92182f0e..a247d02f2 100644 --- a/typings/docker/api/__init__.pyi +++ b/typings/docker/api/__init__.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.api.client import APIClient diff --git a/typings/docker/api/build.pyi b/typings/docker/api/build.pyi index ef391cf6c..2afdee0d8 100644 --- a/typings/docker/api/build.pyi +++ b/typings/docker/api/build.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker import utils diff --git a/typings/docker/api/client.pyi b/typings/docker/api/client.pyi index 64843193b..adffdf13e 100644 --- a/typings/docker/api/client.pyi +++ b/typings/docker/api/client.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations import requests diff --git a/typings/docker/api/config.pyi b/typings/docker/api/config.pyi index 3855bbe0e..f9ddbe6dc 100644 --- a/typings/docker/api/config.pyi +++ b/typings/docker/api/config.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker import utils diff --git a/typings/docker/api/container.pyi b/typings/docker/api/container.pyi index b026e5313..67ad56d85 100644 --- a/typings/docker/api/container.pyi +++ b/typings/docker/api/container.pyi @@ -1,15 +1,12 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker import utils class ContainerApiMixin: @utils.check_resource("container") - def attach( - self, container, stdout=..., stderr=..., stream=..., logs=..., demux=... - ): ... + def attach(self, container, stdout=..., stderr=..., stream=..., logs=..., demux=...): ... @utils.check_resource("container") def attach_socket(self, container, params=..., ws=...): ... @utils.check_resource("container") diff --git a/typings/docker/api/daemon.pyi b/typings/docker/api/daemon.pyi index 063fb71f5..519e55434 100644 --- a/typings/docker/api/daemon.pyi +++ b/typings/docker/api/daemon.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker import utils diff --git a/typings/docker/api/exec_api.pyi b/typings/docker/api/exec_api.pyi index f81bae60a..cbbbc2575 100644 --- a/typings/docker/api/exec_api.pyi +++ b/typings/docker/api/exec_api.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker import utils @@ -24,6 +23,4 @@ class ExecApiMixin: def exec_inspect(self, exec_id): ... def exec_resize(self, exec_id, height=..., width=...): ... @utils.check_resource("exec_id") - def exec_start( - self, exec_id, detach=..., tty=..., stream=..., socket=..., demux=... - ): ... + def exec_start(self, exec_id, detach=..., tty=..., stream=..., socket=..., demux=...): ... diff --git a/typings/docker/api/image.pyi b/typings/docker/api/image.pyi index 1d91eb761..3b56a82aa 100644 --- a/typings/docker/api/image.pyi +++ b/typings/docker/api/image.pyi @@ -1,10 +1,9 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations import logging -from typing import Any, Dict, Iterator, List, Optional, Union +from typing import Any, Iterator from docker import utils @@ -20,60 +19,52 @@ class ImageApiMixin: name: str = ..., quiet: bool = ..., all: bool = ..., - filters: Optional[Dict[str, Any]] = ..., - ) -> Union[Dict[str, Any], List[Dict[str, Any]]]: ... + filters: dict[str, Any] | None = ..., + ) -> dict[str, Any] | list[dict[str, Any]]: ... def import_image( self, src=..., repository=..., tag=..., image=..., changes=..., stream_src=... ): ... def import_image_from_data(self, data, repository=..., tag=..., changes=...): ... - def import_image_from_file( - self, filename, repository=..., tag=..., changes=... - ): ... - def import_image_from_stream( - self, stream, repository=..., tag=..., changes=... - ): ... + def import_image_from_file(self, filename, repository=..., tag=..., changes=...): ... + def import_image_from_stream(self, stream, repository=..., tag=..., changes=...): ... def import_image_from_url(self, url, repository=..., tag=..., changes=...): ... def import_image_from_image(self, image, repository=..., tag=..., changes=...): ... @utils.check_resource("image") - def inspect_image(self, image: str) -> Dict[str, Any]: ... + def inspect_image(self, image: str) -> dict[str, Any]: ... @utils.minimum_version("1.30") @utils.check_resource("image") def inspect_distribution( - self, image: str, auth_config: Optional[Dict[str, Any]] = ... - ) -> Dict[str, Any]: ... - def load_image( - self, data: bytes, quiet: bool = ... - ) -> Iterator[Dict[str, Any]]: ... + self, image: str, auth_config: dict[str, Any] | None = ... + ) -> dict[str, Any]: ... + def load_image(self, data: bytes, quiet: bool = ...) -> Iterator[dict[str, Any]]: ... @utils.minimum_version("1.25") - def prune_images( - self, filters: Optional[Dict[str, Any]] = ... - ) -> Dict[str, Any]: ... + def prune_images(self, filters: dict[str, Any] | None = ...) -> dict[str, Any]: ... def pull( self, - repository: Optional[str], - tag: Optional[str] = ..., + repository: str | None, + tag: str | None = ..., stream: bool = ..., - auth_config: Optional[Dict[str, Any]] = ..., + auth_config: dict[str, Any] | None = ..., decode: bool = ..., - platform: Optional[str] = ..., + platform: str | None = ..., all_tags: bool = ..., - ) -> Union[Iterator[Dict[str, Any]], Iterator[bytes], Iterator[str], str]: ... + ) -> Iterator[dict[str, Any]] | Iterator[bytes] | Iterator[str] | str: ... def push( self, - repository: Optional[str], - tag: Optional[str] = ..., + repository: str | None, + tag: str | None = ..., stream: bool = ..., - auth_config: Optional[Dict[str, Any]] = ..., + auth_config: dict[str, Any] | None = ..., decode: bool = ..., - ) -> Union[Iterator[Dict[str, Any]], Iterator[bytes], Iterator[str], str]: ... + ) -> Iterator[dict[str, Any]] | Iterator[bytes] | Iterator[str] | str: ... @utils.check_resource("image") def remove_image( self, image: str, force: bool = ..., noprune: bool = ... - ) -> Dict[str, Any]: ... - def search(self, term: str, limit: Optional[int] = ...) -> List[Dict[str, Any]]: ... + ) -> dict[str, Any]: ... + def search(self, term: str, limit: int | None = ...) -> list[dict[str, Any]]: ... @utils.check_resource("image") def tag( - self, image: str, repository: Optional[str], tag: str = ..., force: bool = ... + self, image: str, repository: str | None, tag: str = ..., force: bool = ... ) -> bool: ... def is_file(src: str) -> bool: ... diff --git a/typings/docker/api/network.pyi b/typings/docker/api/network.pyi index c03b90bd6..68f058aef 100644 --- a/typings/docker/api/network.pyi +++ b/typings/docker/api/network.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.utils import check_resource, minimum_version diff --git a/typings/docker/api/plugin.pyi b/typings/docker/api/plugin.pyi index 520bb7cfc..3d01fc2dc 100644 --- a/typings/docker/api/plugin.pyi +++ b/typings/docker/api/plugin.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker import utils diff --git a/typings/docker/api/secret.pyi b/typings/docker/api/secret.pyi index 68b2f9dec..d281624a9 100644 --- a/typings/docker/api/secret.pyi +++ b/typings/docker/api/secret.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker import utils diff --git a/typings/docker/api/service.pyi b/typings/docker/api/service.pyi index 2187d00b7..0707a5b4b 100644 --- a/typings/docker/api/service.pyi +++ b/typings/docker/api/service.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker import utils diff --git a/typings/docker/api/swarm.pyi b/typings/docker/api/swarm.pyi index a4d4b7d54..46c939637 100644 --- a/typings/docker/api/swarm.pyi +++ b/typings/docker/api/swarm.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker import utils diff --git a/typings/docker/api/volume.pyi b/typings/docker/api/volume.pyi index a1bc8dd6f..2a5dc510b 100644 --- a/typings/docker/api/volume.pyi +++ b/typings/docker/api/volume.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker import utils diff --git a/typings/docker/auth.pyi b/typings/docker/auth.pyi index 9295443de..9e1482ece 100644 --- a/typings/docker/auth.pyi +++ b/typings/docker/auth.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations INDEX_NAME = ... diff --git a/typings/docker/client.pyi b/typings/docker/client.pyi index 2dc2b10e9..514c1223c 100644 --- a/typings/docker/client.pyi +++ b/typings/docker/client.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W from __future__ import annotations from typing import Any, Callable, Dict, Optional diff --git a/typings/docker/context/__init__.pyi b/typings/docker/context/__init__.pyi index ee928ab93..395a4de61 100644 --- a/typings/docker/context/__init__.pyi +++ b/typings/docker/context/__init__.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.context.api import ContextAPI diff --git a/typings/docker/context/api.pyi b/typings/docker/context/api.pyi index fbac2caa2..d06c35588 100644 --- a/typings/docker/context/api.pyi +++ b/typings/docker/context/api.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations class ContextAPI: diff --git a/typings/docker/context/config.pyi b/typings/docker/context/config.pyi index a76d9b0e6..989bfc0f2 100644 --- a/typings/docker/context/config.pyi +++ b/typings/docker/context/config.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations METAFILE = ... diff --git a/typings/docker/context/context.pyi b/typings/docker/context/context.pyi index 6cc742cd4..e93eb7f66 100644 --- a/typings/docker/context/context.pyi +++ b/typings/docker/context/context.pyi @@ -1,12 +1,9 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations class Context: - def __init__( - self, name, orchestrator=..., host=..., endpoints=..., tls=... - ) -> None: ... + def __init__(self, name, orchestrator=..., host=..., endpoints=..., tls=...) -> None: ... def set_endpoint( self, name=..., host=..., tls_cfg=..., skip_tls_verify=..., def_namespace=... ): ... diff --git a/typings/docker/credentials/__init__.pyi b/typings/docker/credentials/__init__.pyi index bbda7eb7d..cdff60906 100644 --- a/typings/docker/credentials/__init__.pyi +++ b/typings/docker/credentials/__init__.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.credentials.constants import * diff --git a/typings/docker/credentials/constants.pyi b/typings/docker/credentials/constants.pyi index d4cec0c3e..70fc204d1 100644 --- a/typings/docker/credentials/constants.pyi +++ b/typings/docker/credentials/constants.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations PROGRAM_PREFIX = ... diff --git a/typings/docker/credentials/errors.pyi b/typings/docker/credentials/errors.pyi index 46c7b9bbc..82b56aa7d 100644 --- a/typings/docker/credentials/errors.pyi +++ b/typings/docker/credentials/errors.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations class StoreError(RuntimeError): ... diff --git a/typings/docker/credentials/store.pyi b/typings/docker/credentials/store.pyi index 6db5ec8de..ecde83374 100644 --- a/typings/docker/credentials/store.pyi +++ b/typings/docker/credentials/store.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations class Store: diff --git a/typings/docker/credentials/utils.pyi b/typings/docker/credentials/utils.pyi index 8a2e80fe6..66ac821b7 100644 --- a/typings/docker/credentials/utils.pyi +++ b/typings/docker/credentials/utils.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations def find_executable(executable, path=...): ... diff --git a/typings/docker/errors.pyi b/typings/docker/errors.pyi index 9fc98295c..6aaa3f665 100644 --- a/typings/docker/errors.pyi +++ b/typings/docker/errors.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from typing import Optional diff --git a/typings/docker/models/__init__.pyi b/typings/docker/models/__init__.pyi index eb6b39d23..15de94a44 100644 --- a/typings/docker/models/__init__.pyi +++ b/typings/docker/models/__init__.pyi @@ -1,4 +1,3 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations diff --git a/typings/docker/models/configs.pyi b/typings/docker/models/configs.pyi index d4e9e86b5..6d1f3b1a3 100644 --- a/typings/docker/models/configs.pyi +++ b/typings/docker/models/configs.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.models.resource import Collection, Model diff --git a/typings/docker/models/containers.pyi b/typings/docker/models/containers.pyi index 3a3042182..9934f2365 100644 --- a/typings/docker/models/containers.pyi +++ b/typings/docker/models/containers.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations import datetime diff --git a/typings/docker/models/images.pyi b/typings/docker/models/images.pyi index ce1ce5c1f..eb301da89 100644 --- a/typings/docker/models/images.pyi +++ b/typings/docker/models/images.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from typing import Any, BinaryIO, Dict, Iterator, List, Optional, Tuple, Union, overload @@ -17,9 +16,7 @@ class Image(Model): @property def tags(self) -> List[str]: ... def history(self) -> str: ... - def save( - self, chunk_size: int = ..., named: Union[bool, str] = ... - ) -> Iterator[bytes]: ... + def save(self, chunk_size: int = ..., named: Union[bool, str] = ...) -> Iterator[bytes]: ... def tag( self, repository: Optional[str], @@ -70,9 +67,7 @@ class ImageCollection(Collection): use_config_proxy: bool = ..., ) -> Tuple[Image, Iterator[Dict[str, str]]]: ... def get(self, name: str) -> Image: ... - def get_registry_data( - self, name: str, auth_config: Dict[str, Any] = ... - ) -> RegistryData: ... + def get_registry_data(self, name: str, auth_config: Dict[str, Any] = ...) -> RegistryData: ... def list( self, name: Optional[str] = ..., @@ -138,12 +133,8 @@ class ImageCollection(Collection): decode: bool, stream: bool = ..., ) -> Union[Iterator[str], str]: ... - def remove( - self, *args: Any, force: bool = ..., image: str, noprune: bool = ... - ) -> Any: ... - def search( - self, *args: Any, limit: Optional[int] = ..., term: str - ) -> List[Dict[str, Any]]: ... + def remove(self, *args: Any, force: bool = ..., image: str, noprune: bool = ...) -> Any: ... + def search(self, *args: Any, limit: Optional[int] = ..., term: str) -> List[Dict[str, Any]]: ... def prune(self, filters: Optional[Dict[str, Any]] = ...) -> Dict[str, Any]: ... def prune_builds(self, *args: Any, **kwargs: Any) -> Dict[str, Any]: ... diff --git a/typings/docker/models/networks.pyi b/typings/docker/models/networks.pyi index e50cb4dc8..ae47587a2 100644 --- a/typings/docker/models/networks.pyi +++ b/typings/docker/models/networks.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.models.resource import Collection, Model diff --git a/typings/docker/models/nodes.pyi b/typings/docker/models/nodes.pyi index 42496d31b..6f1bcd7bc 100644 --- a/typings/docker/models/nodes.pyi +++ b/typings/docker/models/nodes.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.models.resource import Collection, Model diff --git a/typings/docker/models/plugins.pyi b/typings/docker/models/plugins.pyi index 67684b2c2..14d8f68e1 100644 --- a/typings/docker/models/plugins.pyi +++ b/typings/docker/models/plugins.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.models.resource import Collection, Model diff --git a/typings/docker/models/resource.pyi b/typings/docker/models/resource.pyi index 2f5c9138b..e7ebf79f4 100644 --- a/typings/docker/models/resource.pyi +++ b/typings/docker/models/resource.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from typing import Any, Dict, List, NoReturn, Optional, Type diff --git a/typings/docker/models/secrets.pyi b/typings/docker/models/secrets.pyi index 596ea4c9a..fcdac9cfd 100644 --- a/typings/docker/models/secrets.pyi +++ b/typings/docker/models/secrets.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.models.resource import Collection, Model diff --git a/typings/docker/models/services.pyi b/typings/docker/models/services.pyi index b5632c1e9..5ef0adeeb 100644 --- a/typings/docker/models/services.pyi +++ b/typings/docker/models/services.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.models.resource import Collection, Model diff --git a/typings/docker/models/swarm.pyi b/typings/docker/models/swarm.pyi index 8ba76a306..3fd681ce8 100644 --- a/typings/docker/models/swarm.pyi +++ b/typings/docker/models/swarm.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.models.resource import Model diff --git a/typings/docker/models/volumes.pyi b/typings/docker/models/volumes.pyi index dab510db7..9f0a40ee0 100644 --- a/typings/docker/models/volumes.pyi +++ b/typings/docker/models/volumes.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.models.resource import Collection, Model diff --git a/typings/docker/tls.pyi b/typings/docker/tls.pyi index 616c310fd..3739809f0 100644 --- a/typings/docker/tls.pyi +++ b/typings/docker/tls.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations class TLSConfig: diff --git a/typings/docker/transport/__init__.pyi b/typings/docker/transport/__init__.pyi index dcc905229..a97a8401b 100644 --- a/typings/docker/transport/__init__.pyi +++ b/typings/docker/transport/__init__.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.transport.ssladapter import SSLHTTPAdapter diff --git a/typings/docker/transport/basehttpadapter.pyi b/typings/docker/transport/basehttpadapter.pyi index adc87d0b0..8df9ac8aa 100644 --- a/typings/docker/transport/basehttpadapter.pyi +++ b/typings/docker/transport/basehttpadapter.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations import requests diff --git a/typings/docker/transport/npipeconn.pyi b/typings/docker/transport/npipeconn.pyi index 185766491..55d8bb929 100644 --- a/typings/docker/transport/npipeconn.pyi +++ b/typings/docker/transport/npipeconn.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations import http.client as httplib @@ -18,8 +17,6 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): class NpipeHTTPAdapter(BaseHTTPAdapter): __attrs__ = ... - def __init__( - self, base_url, timeout=..., pool_connections=..., max_pool_size=... - ) -> None: ... + def __init__(self, base_url, timeout=..., pool_connections=..., max_pool_size=...) -> None: ... def get_connection(self, url, proxies=...): ... def request_url(self, request, proxies): ... diff --git a/typings/docker/transport/npipesocket.pyi b/typings/docker/transport/npipesocket.pyi index d82110f40..84ce94b02 100644 --- a/typings/docker/transport/npipesocket.pyi +++ b/typings/docker/transport/npipesocket.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations import io diff --git a/typings/docker/transport/sshconn.pyi b/typings/docker/transport/sshconn.pyi index 596d8277c..993d2487b 100644 --- a/typings/docker/transport/sshconn.pyi +++ b/typings/docker/transport/sshconn.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations import http.client as httplib diff --git a/typings/docker/transport/ssladapter.pyi b/typings/docker/transport/ssladapter.pyi index 76c4dad05..441f9f1db 100644 --- a/typings/docker/transport/ssladapter.pyi +++ b/typings/docker/transport/ssladapter.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations import sys diff --git a/typings/docker/transport/unixconn.pyi b/typings/docker/transport/unixconn.pyi index 837bb40b9..4eb81d1d6 100644 --- a/typings/docker/transport/unixconn.pyi +++ b/typings/docker/transport/unixconn.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations import http.client as httplib diff --git a/typings/docker/types/__init__.pyi b/typings/docker/types/__init__.pyi index 1dfa98021..9af6c6eff 100644 --- a/typings/docker/types/__init__.pyi +++ b/typings/docker/types/__init__.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.types.containers import ( diff --git a/typings/docker/types/base.pyi b/typings/docker/types/base.pyi index d7e6fc757..dc27de019 100644 --- a/typings/docker/types/base.pyi +++ b/typings/docker/types/base.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations class DictType(dict): diff --git a/typings/docker/types/containers.pyi b/typings/docker/types/containers.pyi index 9113f86ea..a68a4e78f 100644 --- a/typings/docker/types/containers.pyi +++ b/typings/docker/types/containers.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from .base import DictType diff --git a/typings/docker/types/daemon.pyi b/typings/docker/types/daemon.pyi index 7a5adea2a..c566cdb0f 100644 --- a/typings/docker/types/daemon.pyi +++ b/typings/docker/types/daemon.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations class CancellableStream: diff --git a/typings/docker/types/healthcheck.pyi b/typings/docker/types/healthcheck.pyi index 0e29c2019..8018f2cb5 100644 --- a/typings/docker/types/healthcheck.pyi +++ b/typings/docker/types/healthcheck.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.types.base import DictType diff --git a/typings/docker/types/networks.pyi b/typings/docker/types/networks.pyi index 26fa5b13f..727910590 100644 --- a/typings/docker/types/networks.pyi +++ b/typings/docker/types/networks.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations class EndpointConfig(dict): @@ -22,6 +21,4 @@ class IPAMConfig(dict): def __init__(self, driver=..., pool_configs=..., options=...) -> None: ... class IPAMPool(dict): - def __init__( - self, subnet=..., iprange=..., gateway=..., aux_addresses=... - ) -> None: ... + def __init__(self, subnet=..., iprange=..., gateway=..., aux_addresses=...) -> None: ... diff --git a/typings/docker/types/services.pyi b/typings/docker/types/services.pyi index c81717186..6fd90ff71 100644 --- a/typings/docker/types/services.pyi +++ b/typings/docker/types/services.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from typing import Any, Dict, List, Optional, Tuple, Union diff --git a/typings/docker/types/swarm.pyi b/typings/docker/types/swarm.pyi index 708472f31..422845de3 100644 --- a/typings/docker/types/swarm.pyi +++ b/typings/docker/types/swarm.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations class SwarmSpec(dict): diff --git a/typings/docker/utils/__init__.pyi b/typings/docker/utils/__init__.pyi index 2a1f785e4..b3f2702cd 100644 --- a/typings/docker/utils/__init__.pyi +++ b/typings/docker/utils/__init__.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from docker.utils.build import create_archive, exclude_paths, mkbuildcontext, tar diff --git a/typings/docker/utils/build.pyi b/typings/docker/utils/build.pyi index 4768f0ca8..538345774 100644 --- a/typings/docker/utils/build.pyi +++ b/typings/docker/utils/build.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations _SEP = ... diff --git a/typings/docker/utils/config.pyi b/typings/docker/utils/config.pyi index f0eeaf649..1d0861ab3 100644 --- a/typings/docker/utils/config.pyi +++ b/typings/docker/utils/config.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations DOCKER_CONFIG_FILENAME = ... diff --git a/typings/docker/utils/decorators.pyi b/typings/docker/utils/decorators.pyi index f7cc4d3c7..d08a82a77 100644 --- a/typings/docker/utils/decorators.pyi +++ b/typings/docker/utils/decorators.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations from typing import Callable, TypeVar diff --git a/typings/docker/utils/fnmatch.pyi b/typings/docker/utils/fnmatch.pyi index d62816661..f0be5f6ca 100644 --- a/typings/docker/utils/fnmatch.pyi +++ b/typings/docker/utils/fnmatch.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations _cache = ... diff --git a/typings/docker/utils/json_stream.pyi b/typings/docker/utils/json_stream.pyi index 5a848c187..ce02f34d7 100644 --- a/typings/docker/utils/json_stream.pyi +++ b/typings/docker/utils/json_stream.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations json_decoder = ... diff --git a/typings/docker/utils/proxy.pyi b/typings/docker/utils/proxy.pyi index 42afe9ec0..2bc6c4833 100644 --- a/typings/docker/utils/proxy.pyi +++ b/typings/docker/utils/proxy.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations class ProxyConfig(dict): diff --git a/typings/docker/utils/socket.pyi b/typings/docker/utils/socket.pyi index d39c96805..d324b5397 100644 --- a/typings/docker/utils/socket.pyi +++ b/typings/docker/utils/socket.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations STDOUT = ... diff --git a/typings/docker/utils/utils.pyi b/typings/docker/utils/utils.pyi index 434b7b9b2..4bdb0192d 100644 --- a/typings/docker/utils/utils.pyi +++ b/typings/docker/utils/utils.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations def create_ipam_pool(*args, **kwargs): ... diff --git a/typings/docker/version.pyi b/typings/docker/version.pyi index 3c105daa9..a0b476047 100644 --- a/typings/docker/version.pyi +++ b/typings/docker/version.pyi @@ -1,6 +1,5 @@ """This type stub file was generated by pyright.""" -# pylint: disable=C,E,W,R from __future__ import annotations version = ... diff --git a/typings/s3transfer/__init__.pyi b/typings/s3transfer/__init__.pyi index c44c8575b..992adef53 100644 --- a/typings/s3transfer/__init__.pyi +++ b/typings/s3transfer/__init__.pyi @@ -35,9 +35,7 @@ class ReadFileChunk: enable_callback=..., ) -> None: ... @classmethod - def from_filename( - cls, filename, start_byte, chunk_size, callback=..., enable_callback=... - ): ... + def from_filename(cls, filename, start_byte, chunk_size, callback=..., enable_callback=...): ... def read(self, amount=...): ... def enable_callback(self): ... def disable_callback(self): ... @@ -71,9 +69,7 @@ class ShutdownQueue(queue.Queue): class MultipartDownloader: def __init__(self, client, config, osutil, executor_cls=...) -> None: ... - def download_file( - self, bucket, key, filename, object_size, extra_args, callback=... - ): ... + def download_file(self, bucket, key, filename, object_size, extra_args, callback=...): ... class TransferConfig: def __init__( diff --git a/typings/s3transfer/bandwidth.pyi b/typings/s3transfer/bandwidth.pyi index f44ffab42..03d8f197b 100644 --- a/typings/s3transfer/bandwidth.pyi +++ b/typings/s3transfer/bandwidth.pyi @@ -11,9 +11,7 @@ class TimeUtils: class BandwidthLimiter: def __init__(self, leaky_bucket, time_utils=...) -> None: ... - def get_bandwith_limited_stream( - self, fileobj, transfer_coordinator, enabled=... - ): ... + def get_bandwith_limited_stream(self, fileobj, transfer_coordinator, enabled=...): ... class BandwidthLimitedStream: def __init__( diff --git a/typings/s3transfer/download.pyi b/typings/s3transfer/download.pyi index bf3465b6a..7bf41156d 100644 --- a/typings/s3transfer/download.pyi +++ b/typings/s3transfer/download.pyi @@ -35,9 +35,7 @@ class DownloadSeekableOutputManager(DownloadOutputManager): def get_final_io_task(self): ... class DownloadNonSeekableOutputManager(DownloadOutputManager): - def __init__( - self, osutil, transfer_coordinator, io_executor, defer_queue=... - ) -> None: ... + def __init__(self, osutil, transfer_coordinator, io_executor, defer_queue=...) -> None: ... @classmethod def is_compatible(cls, download_target, osutil): ... def get_download_task_tag(self): ... @@ -47,9 +45,7 @@ class DownloadNonSeekableOutputManager(DownloadOutputManager): def get_io_write_task(self, fileobj, data, offset): ... class DownloadSpecialFilenameOutputManager(DownloadNonSeekableOutputManager): - def __init__( - self, osutil, transfer_coordinator, io_executor, defer_queue=... - ) -> None: ... + def __init__(self, osutil, transfer_coordinator, io_executor, defer_queue=...) -> None: ... @classmethod def is_compatible(cls, download_target, osutil): ... def get_fileobj_for_io_writes(self, transfer_future): ... diff --git a/typings/s3transfer/futures.pyi b/typings/s3transfer/futures.pyi index 1f892433a..55e287a04 100644 --- a/typings/s3transfer/futures.pyi +++ b/typings/s3transfer/futures.pyi @@ -3,19 +3,7 @@ from __future__ import annotations from concurrent import futures -from typing import ( - TYPE_CHECKING, - Any, - Callable, - ClassVar, - Dict, - List, - NamedTuple, - Optional, - Set, - Type, - Union, -) +from typing import TYPE_CHECKING, Any, Callable, ClassVar, NamedTuple from typing_extensions import Literal @@ -42,15 +30,15 @@ class BaseTransferMeta: @property def call_args(self) -> CallArgs: ... @property - def transfer_id(self) -> Optional[str]: ... + def transfer_id(self) -> str | None: ... @property - def user_context(self) -> Dict[Any, Any]: ... + def user_context(self) -> dict[Any, Any]: ... class TransferFuture(BaseTransferFuture): def __init__( self, - meta: Optional[TransferMeta] = ..., - coordinator: Optional[TransferCoordinator] = ..., + meta: TransferMeta | None = ..., + coordinator: TransferCoordinator | None = ..., ) -> None: ... @property def meta(self) -> TransferMeta: ... @@ -62,36 +50,32 @@ class TransferFuture(BaseTransferFuture): class TransferMeta(BaseTransferMeta): """Holds metadata about the TransferFuture""" - def __init__( - self, call_args: Optional[CallArgs] = ..., transfer_id: Optional[str] = ... - ) -> None: ... + def __init__(self, call_args: CallArgs | None = ..., transfer_id: str | None = ...) -> None: ... @property def call_args(self) -> CallArgs: ... @property - def transfer_id(self) -> Optional[str]: ... + def transfer_id(self) -> str | None: ... @property - def size(self) -> Optional[int]: ... + def size(self) -> int | None: ... @property - def user_context(self) -> Dict[Any, Any]: ... + def user_context(self) -> dict[Any, Any]: ... def provide_transfer_size(self, size: int) -> None: ... class TransferCoordinator: """A helper class for managing TransferFuture""" - def __init__(self, transfer_id: Optional[str] = ...) -> None: ... + def __init__(self, transfer_id: str | None = ...) -> None: ... def __repr__(self) -> str: ... @property - def exception(self) -> Optional[Exception]: ... + def exception(self) -> Exception | None: ... @property - def associated_futures(self) -> Set[futures.Future[Any]]: ... + def associated_futures(self) -> set[futures.Future[Any]]: ... @property - def failure_cleanups(self) -> List[Callable[..., Any]]: ... + def failure_cleanups(self) -> list[Callable[..., Any]]: ... @property def status( self, - ) -> Literal[ - "not-started", "queued", "running", "cancelled", "failed", "success" - ]: ... + ) -> Literal["not-started", "queued", "running", "cancelled", "failed", "success"]: ... def set_result(self, result: Any) -> None: ... def set_exception(self, exception: Exception, override: bool = ...) -> None: ... def result(self) -> Any: ... @@ -99,7 +83,7 @@ class TransferCoordinator: def set_status_to_queued(self) -> None: ... def set_status_to_running(self) -> None: ... def submit( - self, executor: BoundedExecutor, task: Task, tag: Optional[TaskTag] = ... + self, executor: BoundedExecutor, task: Task, tag: TaskTag | None = ... ) -> futures.Future: ... def done(self) -> bool: ... def add_associated_future(self, future: futures.Future[Any]) -> None: ... @@ -118,11 +102,11 @@ class BoundedExecutor: self, max_size: int, max_num_threads: int, - tag_semaphores: Dict[str, Any] = ..., - executor_cls: Type[Union[BaseExecutor, futures.ThreadPoolExecutor]] = ..., + tag_semaphores: dict[str, Any] = ..., + executor_cls: type[BaseExecutor | futures.ThreadPoolExecutor] = ..., ) -> None: ... def submit( - self, task: Task, tag: Optional[TaskTag] = ..., block: bool = ... + self, task: Task, tag: TaskTag | None = ..., block: bool = ... ) -> ExecutorFuture: ... def shutdown(self, wait: bool = ...) -> None: ... @@ -133,7 +117,7 @@ class ExecutorFuture: def done(self) -> bool: ... class BaseExecutor: - def __init__(self, max_workers: Optional[int] = ...) -> None: ... + def __init__(self, max_workers: int | None = ...) -> None: ... def submit( self, fn: Callable[..., Any], *args: Any, **kwargs: Any ) -> NonThreadedExecutorFuture: ... @@ -148,10 +132,8 @@ class NonThreadedExecutor(BaseExecutor): class NonThreadedExecutorFuture: def __init__(self) -> None: ... def set_result(self, result: Any) -> None: ... - def set_exception_info( - self, exception: Exception, traceback: TracebackException - ) -> None: ... - def result(self, timeout: Optional[int] = ...) -> Any: ... + def set_exception_info(self, exception: Exception, traceback: TracebackException) -> None: ... + def result(self, timeout: int | None = ...) -> Any: ... def done(self) -> bool: ... def add_done_callback(self, fn: Callable[..., Any]) -> None: ... diff --git a/typings/s3transfer/manager.pyi b/typings/s3transfer/manager.pyi index 9f8b94802..cbc39f51e 100644 --- a/typings/s3transfer/manager.pyi +++ b/typings/s3transfer/manager.pyi @@ -2,20 +2,7 @@ from __future__ import annotations -from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - Dict, - Generic, - List, - Optional, - Set, - TextIO, - Type, - TypeVar, - Union, -) +from typing import TYPE_CHECKING, Any, ClassVar, Generic, Optional, TextIO, TypeVar from .futures import BaseExecutor, TransferFuture from .subscribers import BaseSubscriber @@ -59,18 +46,18 @@ class TransferConfig: ) -> None: ... class TransferManager: - ALLOWED_DOWNLOAD_ARGS: ClassVar[List[str]] = ... - ALLOWED_UPLOAD_ARGS: ClassVar[List[str]] = ... - ALLOWED_COPY_ARGS: ClassVar[List[str]] = ... - ALLOWED_DELETE_ARGS: ClassVar[List[str]] = ... + ALLOWED_DOWNLOAD_ARGS: ClassVar[list[str]] = ... + ALLOWED_UPLOAD_ARGS: ClassVar[list[str]] = ... + ALLOWED_COPY_ARGS: ClassVar[list[str]] = ... + ALLOWED_DELETE_ARGS: ClassVar[list[str]] = ... VALIDATE_SUPPORTED_BUCKET_VALUES: ClassVar[bool] = ... - _UNSUPPORTED_BUCKET_PATTERNS: ClassVar[Dict[str, Pattern[str]]] = ... + _UNSUPPORTED_BUCKET_PATTERNS: ClassVar[dict[str, Pattern[str]]] = ... def __init__( self, client: S3Client, config: Optional[TransferConfig] = ..., osutil: Optional[OSUtils] = ..., - executor_cls: Optional[Type[BaseExecutor]] = ..., + executor_cls: Optional[type[BaseExecutor]] = ..., ) -> None: ... @property def client(self) -> S3Client: ... @@ -78,40 +65,40 @@ class TransferManager: def config(self) -> TransferConfig: ... def upload( self, - fileobj: Union[str, TextIO], + fileobj: str | TextIO, bucket: str, key: str, - extra_args: Optional[Dict[str, Any]] = ..., - subscribers: Optional[List[BaseSubscriber]] = ..., + extra_args: Optional[dict[str, Any]] = ..., + subscribers: Optional[list[BaseSubscriber]] = ..., ) -> TransferFuture: ... def download( self, bucket: str, key: str, - fileobj: Union[str, TextIO], - extra_args: Optional[Dict[str, Any]] = ..., - subscribers: Optional[List[BaseSubscriber]] = ..., + fileobj: str | TextIO, + extra_args: Optional[dict[str, Any]] = ..., + subscribers: Optional[list[BaseSubscriber]] = ..., ) -> TransferFuture: ... def copy( self, - copy_source: Dict[str, str], + copy_source: dict[str, str], bucket: str, key: str, - extra_args: Optional[Dict[str, Any]] = ..., - subscribers: Optional[List[BaseSubscriber]] = ..., + extra_args: Optional[dict[str, Any]] = ..., + subscribers: Optional[list[BaseSubscriber]] = ..., source_client: S3Client = ..., ) -> TransferFuture: ... def delete( self, bucket: str, key: str, - extra_args: Optional[Dict[str, Any]] = ..., - subscribers: Optional[List[BaseSubscriber]] = ..., + extra_args: Optional[dict[str, Any]] = ..., + subscribers: Optional[list[BaseSubscriber]] = ..., ) -> TransferFuture: ... def __enter__(self) -> TransferConfig: ... def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_value: Optional[BaseException], *args: Any, ) -> None: ... @@ -122,8 +109,8 @@ _T = TypeVar("_T") class TransferCoordinatorController(Generic[_T]): def __init__(self) -> None: ... @property - def tracked_transfer_coordinators(self) -> Set[_T]: ... + def tracked_transfer_coordinators(self) -> set[_T]: ... def add_transfer_coordinator(self, transfer_coordinator: _T) -> None: ... def remove_transfer_coordinator(self, transfer_coordinator: _T) -> None: ... - def cancel(self, msg: str = ..., exc_type: Type[BaseException] = ...) -> None: ... + def cancel(self, msg: str = ..., exc_type: type[BaseException] = ...) -> None: ... def wait(self) -> None: ... diff --git a/typings/s3transfer/processpool.pyi b/typings/s3transfer/processpool.pyi index 917ae75c6..9a0f2d69f 100644 --- a/typings/s3transfer/processpool.pyi +++ b/typings/s3transfer/processpool.pyi @@ -45,9 +45,7 @@ class ProcessTransferConfig: class ProcessPoolDownloader: def __init__(self, client_kwargs=..., config=...) -> None: ... - def download_file( - self, bucket, key, filename, extra_args=..., expected_size=... - ): ... + def download_file(self, bucket, key, filename, extra_args=..., expected_size=...): ... def shutdown(self): ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, *args): ... diff --git a/typings/s3transfer/utils.pyi b/typings/s3transfer/utils.pyi index b9f7a2e64..f7bfa1fa9 100644 --- a/typings/s3transfer/utils.pyi +++ b/typings/s3transfer/utils.pyi @@ -2,19 +2,7 @@ from __future__ import annotations -from typing import ( - TYPE_CHECKING, - Any, - Callable, - ClassVar, - Dict, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TypeVar from typing_extensions import TypedDict @@ -22,22 +10,22 @@ if TYPE_CHECKING: from logging import Logger from pathlib import Path + from typing_extensions import TypeAlias + from .futures import TransferFuture _K = TypeVar("_K") _V = TypeVar("_V") -_AnyPath = Union[Path, str] +_AnyPath: TypeAlias = "Path | str" MAX_PARTS: int = ... MAX_SINGLE_UPLOAD_SIZE: int = 5 * 1024**3 MIN_UPLOAD_CHUNKSIZE: int = ... logger: Logger = ... -S3_RETRYABLE_DOWNLOAD_ERRORS: Tuple[Type[Exception], ...] = ... +S3_RETRYABLE_DOWNLOAD_ERRORS: tuple[type[Exception], ...] = ... def random_file_extension(num_digits: int = ...) -> str: ... -def signal_not_transferring( - request: Any, operation_name: str, **kwargs: Any -) -> None: ... +def signal_not_transferring(request: Any, operation_name: str, **kwargs: Any) -> None: ... def signal_transferring(request: Any, operation_name: str, **kwargs: Any) -> None: ... def calculate_num_parts(size: int, part_size: int) -> int: ... def calculate_range_parameter( @@ -45,20 +33,18 @@ def calculate_range_parameter( ) -> str: ... def get_callbacks( transfer_future: TransferFuture, callback_type: str -) -> List[Callable[..., Any]]: ... +) -> list[Callable[..., Any]]: ... def invoke_progress_callbacks( - callbacks: List[Callable[..., Any]], bytes_transferred: int + callbacks: list[Callable[..., Any]], bytes_transferred: int ) -> None: ... -def get_filtered_dict( - original_dict: Dict[_K, _V], whitelisted_keys: List[str] -) -> Dict[_K, _V]: ... +def get_filtered_dict(original_dict: dict[_K, _V], whitelisted_keys: list[str]) -> dict[_K, _V]: ... _CopySource = TypedDict("_CopySource", Bucket=str, Key=str) class CallArgs: bucket: str copy_source: _CopySource - extra_args: Dict[str, Any] + extra_args: dict[str, Any] fileobj: _AnyPath key: str def __init__(self, **kwargs: Any) -> None: ... @@ -84,30 +70,26 @@ class OSUtils: filename: _AnyPath, start_byte: int, size: int, - callbacks: List[Callable[..., Any]], + callbacks: list[Callable[..., Any]], ) -> ReadFileChunk: ... def open_file_chunk_reader_from_fileobj( self, fileobj: _AnyPath, chunk_size: int, full_file_size: int, - callbacks: List[Callable[..., Any]], - close_callbacks: Optional[List[Callable[..., Any]]] = ..., + callbacks: list[Callable[..., Any]], + close_callbacks: Optional[list[Callable[..., Any]]] = ..., ) -> ReadFileChunk: ... def open(self, filename: _AnyPath, mode: str) -> Any: ... def remove_file(self, filename: _AnyPath) -> None: ... - def rename_file( - self, current_filename: _AnyPath, new_filename: _AnyPath - ) -> None: ... + def rename_file(self, current_filename: _AnyPath, new_filename: _AnyPath) -> None: ... @classmethod def is_special_file(cls, filename: _AnyPath) -> bool: ... def get_temp_filename(self, filename: _AnyPath) -> str: ... def allocate(self, filename: _AnyPath, size: int) -> None: ... class DeferredOpenFile: - def __init__( - self, filename, start_byte=..., mode=..., open_function=... - ) -> None: ... + def __init__(self, filename, start_byte=..., mode=..., open_function=...) -> None: ... @property def name(self): ... def read(self, amount=...): ...