From 16f3e2811fd28e9e868650274ee753ce364fe380 Mon Sep 17 00:00:00 2001 From: Javy de Koning Date: Sat, 6 Apr 2024 09:10:59 +0000 Subject: [PATCH] Linter fixes and enable Ruff --- .mega-linter.yml | 1 + .../scripts/start.sh | 2 +- .../scripts/validate.sh | 2 +- .../scripts/install-codedeploy.sh | 33 ++- .../scripts/install-deps.sh | 2 +- .../sample-fargate-node-app/build/docker.sh | 3 +- samples/sample-serverless-app/handler.py | 1 - src/lambda_codebase/account/main.py | 1 - .../cross_region_bucket/main.py | 1 - .../determine_default_branch.py | 1 - .../initial_commit/initial_commit.py | 1 - .../adf-build/provisioner/src/__init__.py | 4 - .../tests/test_adf_codepipeline_generate.py | 4 +- .../test_adf_codepipeline_output_artifacts.py | 3 +- .../tests/test_pipeline_creation.py | 1 - .../shared/helpers/package_transform.sh | 26 ++- .../shared/helpers/terraform/adf_terraform.sh | 200 ++++++++---------- .../adf-build/shared/python/target.py | 2 +- .../python/tests/test_cloudformation.py | 1 - .../shared/python/tests/test_codepipeline.py | 1 - .../python/tests/test_deployment_map.py | 4 +- .../shared/python/tests/test_organizations.py | 1 - .../python/tests/test_parameter_store.py | 1 - .../shared/python/tests/test_pipeline.py | 4 - .../python/tests/test_step_functions.py | 1 - .../shared/python/tests/test_target.py | 3 - .../initial_commit/initial_commit.py | 1 - 27 files changed, 133 insertions(+), 172 deletions(-) diff --git a/.mega-linter.yml b/.mega-linter.yml index 29fb905a3..240e0aec7 100644 --- a/.mega-linter.yml +++ b/.mega-linter.yml @@ -21,6 +21,7 @@ ENABLE_LINTERS: - MARKDOWN_MARKDOWN_LINK_CHECK - MARKDOWN_MARKDOWNLINT - MARKDOWN_MARKDOWN_TABLE_FORMATTER + - PYTHON_RUFF - SPELL_CSPELL - TERRAFORM_TFLINT - YAML_YAMLLINT diff --git a/samples/sample-ec2-java-app-codedeploy/scripts/start.sh b/samples/sample-ec2-java-app-codedeploy/scripts/start.sh index 649b805f9..34b0d4c56 100755 --- a/samples/sample-ec2-java-app-codedeploy/scripts/start.sh +++ b/samples/sample-ec2-java-app-codedeploy/scripts/start.sh @@ -2,4 +2,4 @@ cd /home/ec2-user/server sudo /usr/bin/java -jar -Dserver.port=80 \ - *.jar > /dev/null 2> /dev/null < /dev/null & + *.jar >/dev/null 2>/dev/null >(tee /var/log/user-data.log|logger -t user-data -s 2>/dev/console) 2>&1 +exec > >(tee /var/log/user-data.log | logger -t user-data -s 2>/dev/console) 2>&1 AUTOUPDATE=false -function installdep(){ +function installdep() { if [ ${PLAT} = "ubuntu" ]; then apt-get -y update # Satisfying even Ubuntu older versions. @@ -18,7 +18,7 @@ function installdep(){ fi } -function platformize(){ +function platformize() { # Linux OS detection if hash lsb_release; then echo "Ubuntu server OS detected" @@ -32,8 +32,7 @@ function platformize(){ fi } - -function execute(){ +function execute() { if [ ${PLAT} = "ubuntu" ]; then cd /tmp/ wget https://aws-codedeploy-${REGION}.s3.${REGION}.amazonaws.com/latest/install @@ -41,12 +40,12 @@ function execute(){ if ./install auto; then echo "Installation completed" - if ! ${AUTOUPDATE}; then - echo "Disabling Auto Update" - sed -i '/@reboot/d' /etc/cron.d/codedeploy-agent-update - chattr +i /etc/cron.d/codedeploy-agent-update - rm -f /tmp/install - fi + if ! ${AUTOUPDATE}; then + echo "Disabling Auto Update" + sed -i '/@reboot/d' /etc/cron.d/codedeploy-agent-update + chattr +i /etc/cron.d/codedeploy-agent-update + rm -f /tmp/install + fi exit 0 else echo "Installation script failed, please investigate" @@ -61,12 +60,12 @@ function execute(){ if ./install auto; then echo "Installation completed" - if ! ${AUTOUPDATE}; then - echo "Disabling auto update" - sed -i '/@reboot/d' /etc/cron.d/codedeploy-agent-update - chattr +i /etc/cron.d/codedeploy-agent-update - rm -f /tmp/install - fi + if ! ${AUTOUPDATE}; then + echo "Disabling auto update" + sed -i '/@reboot/d' /etc/cron.d/codedeploy-agent-update + chattr +i /etc/cron.d/codedeploy-agent-update + rm -f /tmp/install + fi exit 0 else echo "Installation script failed, please investigate" diff --git a/samples/sample-ec2-with-codedeploy/scripts/install-deps.sh b/samples/sample-ec2-with-codedeploy/scripts/install-deps.sh index 874e4f050..8a97f0f83 100755 --- a/samples/sample-ec2-with-codedeploy/scripts/install-deps.sh +++ b/samples/sample-ec2-with-codedeploy/scripts/install-deps.sh @@ -34,7 +34,7 @@ echo " ProxyRequests Off ProxyPass / http://localhost:8080/ ProxyPassReverse / http://localhost:8080/ -" >> sudo /etc/httpd/conf/httpd.conf +" /etc/httpd/conf/httpd.conf >>sudo # start the httpd service now and stop it until userdata sudo service httpd start diff --git a/samples/sample-fargate-node-app/build/docker.sh b/samples/sample-fargate-node-app/build/docker.sh index 2e628a446..46948b66e 100755 --- a/samples/sample-fargate-node-app/build/docker.sh +++ b/samples/sample-fargate-node-app/build/docker.sh @@ -11,4 +11,5 @@ docker tag $REPOSITORY_URI:latest $REPOSITORY_URI:$IMAGE_TAG docker push $REPOSITORY_URI:latest docker push $REPOSITORY_URI:$IMAGE_TAG -tmp=$(mktemp); jq --arg REPOSITORY_URI "$REPOSITORY_URI" --arg IMAGE_TAG "$IMAGE_TAG" '.Parameters.Image = $REPOSITORY_URI+":"+$IMAGE_TAG' params/global.json > "$tmp" && mv "$tmp" params/global.json +tmp=$(mktemp) +jq --arg REPOSITORY_URI "$REPOSITORY_URI" --arg IMAGE_TAG "$IMAGE_TAG" '.Parameters.Image = $REPOSITORY_URI+":"+$IMAGE_TAG' params/global.json >"$tmp" && mv "$tmp" params/global.json diff --git a/samples/sample-serverless-app/handler.py b/samples/sample-serverless-app/handler.py index 26fdb835f..90b2e4275 100644 --- a/samples/sample-serverless-app/handler.py +++ b/samples/sample-serverless-app/handler.py @@ -1,4 +1,3 @@ -import json def lambda_handler(event, context): print(event) diff --git a/src/lambda_codebase/account/main.py b/src/lambda_codebase/account/main.py index cc855d58a..0b1f63eb5 100644 --- a/src/lambda_codebase/account/main.py +++ b/src/lambda_codebase/account/main.py @@ -14,7 +14,6 @@ import boto3 from botocore.exceptions import ClientError from cfn_custom_resource import ( # pylint: disable=unused-import - lambda_handler, create, update, delete, diff --git a/src/lambda_codebase/cross_region_bucket/main.py b/src/lambda_codebase/cross_region_bucket/main.py index c02f45d8c..a21c155de 100644 --- a/src/lambda_codebase/cross_region_bucket/main.py +++ b/src/lambda_codebase/cross_region_bucket/main.py @@ -17,7 +17,6 @@ # ^ https://www.logilab.org/ticket/2481 import boto3 from cfn_custom_resource import ( # pylint: disable=unused-import - lambda_handler, create, update, delete, diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/determine_default_branch/determine_default_branch.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/determine_default_branch/determine_default_branch.py index 39faed35d..b0d1b899b 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/determine_default_branch/determine_default_branch.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/determine_default_branch/determine_default_branch.py @@ -6,7 +6,6 @@ from dataclasses import dataclass import boto3 from cfn_custom_resource import ( # pylint: disable=unused-import - lambda_handler, create, update, delete, diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit/initial_commit.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit/initial_commit.py index b272800f8..5c711381c 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit/initial_commit.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit/initial_commit.py @@ -14,7 +14,6 @@ import boto3 import jinja2 from cfn_custom_resource import ( # pylint: disable=unused-import - lambda_handler, create, update, delete, diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/provisioner/src/__init__.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/provisioner/src/__init__.py index 8f1a9c905..f2d1c4ca4 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/provisioner/src/__init__.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/provisioner/src/__init__.py @@ -4,7 +4,3 @@ """__init__ """ -from .configparser import read_config_files -from .vpc import delete_default_vpc -from .account import Account -from .support import Support, SupportLevel diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/test_adf_codepipeline_generate.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/test_adf_codepipeline_generate.py index c59eb3cb2..4bb607351 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/test_adf_codepipeline_generate.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/test_adf_codepipeline_generate.py @@ -38,5 +38,5 @@ def test_generates_without_input_and_output_artifacts(input_mock, output_mock, a category='Build', provider='CodeBuild', ) - assert not 'input_artifacts' in action.config - assert not 'output_artifacts' in action.config + assert 'input_artifacts' not in action.config + assert 'output_artifacts' not in action.config diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/test_adf_codepipeline_output_artifacts.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/test_adf_codepipeline_output_artifacts.py index 2d1842e46..8541c1047 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/test_adf_codepipeline_output_artifacts.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/test_adf_codepipeline_output_artifacts.py @@ -4,7 +4,6 @@ # pylint: skip-file from mock import patch -from copy import deepcopy from cdk_constructs.adf_codepipeline import Action from aws_cdk import ( aws_codepipeline ) from adf_codepipeline_test_constants import BASE_MAP_PARAMS @@ -20,7 +19,7 @@ def test_get_output_artifacts_no_base_output(base_output_name_mock, action_decl_ category='Build', provider='CodeBuild', ) - assert not 'output_artifacts' in action.config + assert 'output_artifacts' not in action.config @patch('cdk_constructs.adf_codepipeline._codepipeline.CfnPipeline.ActionDeclarationProperty') diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py index 4c73f5921..a399db7ee 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py @@ -4,7 +4,6 @@ # pylint: skip-file import pytest -import os from mock import patch from aws_cdk import App diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/package_transform.sh b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/package_transform.sh index c325be39e..044560146 100755 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/package_transform.sh +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/package_transform.sh @@ -11,16 +11,15 @@ set -e SKIP_BUILD=0 # Walk through the options passed to this script -for i in "$@" -do +for i in "$@"; do case $i in - --no-build) - SKIP_BUILD=1 - ;; - *) - echo "Unknown option: $i" - exit 1 - ;; + --no-build) + SKIP_BUILD=1 + ;; + *) + echo "Unknown option: $i" + exit 1 + ;; esac done @@ -34,16 +33,15 @@ fi # Get list of regions supported by this application echo "Determine which regions need to be prepared" -app_regions=`aws ssm get-parameters --names /adf/deployment/$ADF_DEPLOYMENT_MAP_SOURCE/$ADF_PROJECT_NAME/regions --with-decryption --output=text --query='Parameters[0].Value'` +app_regions=$(aws ssm get-parameters --names /adf/deployment/$ADF_DEPLOYMENT_MAP_SOURCE/$ADF_PROJECT_NAME/regions --with-decryption --output=text --query='Parameters[0].Value') # Convert json list to bash list (space delimited regions) -regions="`echo $app_regions | sed -e 's/\[\([^]]*\)\]/\1/g' | sed 's/,/ /g' | sed "s/'//g"`" +regions="$(echo $app_regions | sed -e 's/\[\([^]]*\)\]/\1/g' | sed 's/,/ /g' | sed "s/'//g")" -for region in $regions -do +for region in $regions; do if [ $CONTAINS_TRANSFORM ]; then echo "Packaging templates for region $region" ssm_bucket_name="/adf/cross_region/s3_regional_bucket/$region" - bucket=`aws ssm get-parameters --names $ssm_bucket_name --with-decryption --output=text --query='Parameters[0].Value'` + bucket=$(aws ssm get-parameters --names $ssm_bucket_name --with-decryption --output=text --query='Parameters[0].Value') sam package --s3-bucket $bucket --output-template-file $CODEBUILD_SRC_DIR/template_$region.yml --region $region else # If package is not needed, just copy the file for each region diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/terraform/adf_terraform.sh b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/terraform/adf_terraform.sh index d9635ab73..a6d073e9b 100755 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/terraform/adf_terraform.sh +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/terraform/adf_terraform.sh @@ -5,123 +5,111 @@ CURRENT=$(pwd) terraform --version echo "Terraform stage: $TF_STAGE" -tfinit(){ - # retrieve regional S3 bucket name from parameter store - S3_BUCKET_REGION_NAME=$(aws ssm get-parameter --name "/adf/cross_region/s3_regional_bucket/$AWS_REGION" --region "$AWS_DEFAULT_REGION" | jq .Parameter.Value | sed s/\"//g) - mkdir -p "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" - cd "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" || exit - cp -R "${CURRENT}"/tf/. "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" - # if account related variables exist copy the folder in the work directory - if [ -d "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}" ]; then - cp -R "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/." "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" - fi - if [ -d "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/${AWS_REGION}" ]; then - cp -R "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/${AWS_REGION}"/. "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" - fi - if [ -f "${CURRENT}/tfvars/global.auto.tfvars" ]; then - cp -R "${CURRENT}/tfvars/global.auto.tfvars" "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" - fi - terraform init \ - -backend-config "bucket=$S3_BUCKET_REGION_NAME" \ - -backend-config "region=$AWS_REGION" \ - -backend-config "key=$ADF_PROJECT_NAME/$ACCOUNT_ID.tfstate" \ - -backend-config "dynamodb_table=adf-tflocktable" +tfinit() { + # retrieve regional S3 bucket name from parameter store + S3_BUCKET_REGION_NAME=$(aws ssm get-parameter --name "/adf/cross_region/s3_regional_bucket/$AWS_REGION" --region "$AWS_DEFAULT_REGION" | jq .Parameter.Value | sed s/\"//g) + mkdir -p "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" + cd "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" || exit + cp -R "${CURRENT}"/tf/. "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" + # if account related variables exist copy the folder in the work directory + if [ -d "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}" ]; then + cp -R "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/." "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" + fi + if [ -d "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/${AWS_REGION}" ]; then + cp -R "${CURRENT}/tfvars/${TF_VAR_TARGET_ACCOUNT_ID}/${AWS_REGION}"/. "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" + fi + if [ -f "${CURRENT}/tfvars/global.auto.tfvars" ]; then + cp -R "${CURRENT}/tfvars/global.auto.tfvars" "${CURRENT}/tmp/${TF_VAR_TARGET_ACCOUNT_ID}-${AWS_REGION}" + fi + terraform init \ + -backend-config "bucket=$S3_BUCKET_REGION_NAME" \ + -backend-config "region=$AWS_REGION" \ + -backend-config "key=$ADF_PROJECT_NAME/$ACCOUNT_ID.tfstate" \ + -backend-config "dynamodb_table=adf-tflocktable" - echo "Bucket: $S3_BUCKET_REGION_NAME" - echo "Region: $AWS_REGION" - echo "Key: $ADF_PROJECT_NAME/$ACCOUNT_ID.tfstate" - echo "DynamoDB table: adf-tflocktable" + echo "Bucket: $S3_BUCKET_REGION_NAME" + echo "Region: $AWS_REGION" + echo "Key: $ADF_PROJECT_NAME/$ACCOUNT_ID.tfstate" + echo "DynamoDB table: adf-tflocktable" } -tfplan(){ - DATE=$(date +%Y-%m-%d) - TS=$(date +%Y%m%d%H%M%S) - bash "${CURRENT}/adf-build/helpers/sts.sh" "${TF_VAR_TARGET_ACCOUNT_ID}" "${TF_VAR_TARGET_ACCOUNT_ROLE}" - set -o pipefail - terraform plan -out "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}" 2>&1 | tee -a "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log" - set +o pipefail - # Save Terraform plan results to the S3 bucket - aws s3 cp "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log" "s3://${S3_BUCKET_REGION_NAME}/${ADF_PROJECT_NAME}/tf-plan/${DATE}/${TF_VAR_TARGET_ACCOUNT_ID}/${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log" - echo "Path to terraform plan s3://$S3_BUCKET_REGION_NAME/$ADF_PROJECT_NAME/tf-plan/$DATE/$TF_VAR_TARGET_ACCOUNT_ID/$ADF_PROJECT_NAME-$TF_VAR_TARGET_ACCOUNT_ID-$TS.log" +tfplan() { + DATE=$(date +%Y-%m-%d) + TS=$(date +%Y%m%d%H%M%S) + bash "${CURRENT}/adf-build/helpers/sts.sh" "${TF_VAR_TARGET_ACCOUNT_ID}" "${TF_VAR_TARGET_ACCOUNT_ROLE}" + set -o pipefail + terraform plan -out "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}" 2>&1 | tee -a "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log" + set +o pipefail + # Save Terraform plan results to the S3 bucket + aws s3 cp "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log" "s3://${S3_BUCKET_REGION_NAME}/${ADF_PROJECT_NAME}/tf-plan/${DATE}/${TF_VAR_TARGET_ACCOUNT_ID}/${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-${TS}.log" + echo "Path to terraform plan s3://$S3_BUCKET_REGION_NAME/$ADF_PROJECT_NAME/tf-plan/$DATE/$TF_VAR_TARGET_ACCOUNT_ID/$ADF_PROJECT_NAME-$TF_VAR_TARGET_ACCOUNT_ID-$TS.log" } -tfapply(){ - terraform apply "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}" +tfapply() { + terraform apply "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}" } -tfplandestroy(){ - terraform plan -destroy -out "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-destroy" +tfplandestroy() { + terraform plan -destroy -out "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-destroy" } -tfdestroy(){ - terraform apply "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-destroy" +tfdestroy() { + terraform apply "${ADF_PROJECT_NAME}-${TF_VAR_TARGET_ACCOUNT_ID}-destroy" } -tfrun(){ - export TF_VAR_TARGET_ACCOUNT_ID=$ACCOUNT_ID - echo "Running terraform $TF_STAGE on account $ACCOUNT_ID and region $REGION" - if [[ "$TF_STAGE" = "init" ]] - then - set -e - tfinit - set +e - elif [[ "$TF_STAGE" = "plan" ]] - then - set -e - tfinit - tfplan - set +e - elif [[ "$TF_STAGE" = "apply" ]] - then - set -e - tfinit - tfplan - tfapply - set +e - elif [[ "$TF_STAGE" = "destroy" ]] - then - set -e - tfinit - tfplandestroy - tfdestroy - set +e - else - echo "Invalid Terraform stage: TF_STAGE = $TF_STAGE" - exit 1 - fi +tfrun() { + export TF_VAR_TARGET_ACCOUNT_ID=$ACCOUNT_ID + echo "Running terraform $TF_STAGE on account $ACCOUNT_ID and region $REGION" + if [[ "$TF_STAGE" = "init" ]]; then + set -e + tfinit + set +e + elif [[ "$TF_STAGE" = "plan" ]]; then + set -e + tfinit + tfplan + set +e + elif [[ "$TF_STAGE" = "apply" ]]; then + set -e + tfinit + tfplan + tfapply + set +e + elif [[ "$TF_STAGE" = "destroy" ]]; then + set -e + tfinit + tfplandestroy + tfdestroy + set +e + else + echo "Invalid Terraform stage: TF_STAGE = $TF_STAGE" + exit 1 + fi } # if REGIONS is not defined as pipeline parameters use default region -if [[ -z "$REGIONS" ]] -then - REGIONS=$AWS_DEFAULT_REGION +if [[ -z "$REGIONS" ]]; then + REGIONS=$AWS_DEFAULT_REGION fi echo "List of target regions: $REGIONS" -for REGION in $(echo "$REGIONS" | sed "s/,/ /g") -do - AWS_REGION=$(echo -n "$REGION" | sed 's/^[ \t]*//;s/[ \t]*$//') # sed trims whitespaces - export TF_VAR_TARGET_REGION=$AWS_REGION - # if TARGET_ACCOUNTS and TARGET_OUS are not defined apply to all accounts - if [[ -z "$TARGET_ACCOUNTS" ]] && [[ -z "$TARGET_OUS" ]] - then - echo "Apply to all accounts" - for ACCOUNT_ID in $(jq '.[].AccountId' "${CURRENT}/accounts.json" | sed 's/"//g' ) - do - tfrun - done - fi +for REGION in $(echo "$REGIONS" | sed "s/,/ /g"); do + AWS_REGION=$(echo -n "$REGION" | sed 's/^[ \t]*//;s/[ \t]*$//') # sed trims whitespaces + export TF_VAR_TARGET_REGION=$AWS_REGION + # if TARGET_ACCOUNTS and TARGET_OUS are not defined apply to all accounts + if [[ -z "$TARGET_ACCOUNTS" ]] && [[ -z "$TARGET_OUS" ]]; then + echo "Apply to all accounts" + for ACCOUNT_ID in $(jq '.[].AccountId' "${CURRENT}/accounts.json" | sed 's/"//g'); do + tfrun + done + fi - if ! [[ -z "$TARGET_ACCOUNTS" ]] - then - # apply only on a subset of accounts (TARGET_ACCOUNTS) - echo "List of target account: $TARGET_ACCOUNTS" - for ACCOUNT_ID in $(echo "$TARGET_ACCOUNTS" | sed "s/,/ /g") - do - tfrun - done - fi + if ! [[ -z "$TARGET_ACCOUNTS" ]]; then + # apply only on a subset of accounts (TARGET_ACCOUNTS) + echo "List of target account: $TARGET_ACCOUNTS" + for ACCOUNT_ID in $(echo "$TARGET_ACCOUNTS" | sed "s/,/ /g"); do + tfrun + done + fi - if ! [[ -z "$TARGET_OUS" ]] - then - echo "List target OUs: $TARGET_OUS" - for ACCOUNT_ID in $(jq '.[].AccountId' "${CURRENT}/accounts_from_ous.json" | sed 's/"//g' ) - do - tfrun - done - fi + if ! [[ -z "$TARGET_OUS" ]]; then + echo "List target OUs: $TARGET_OUS" + for ACCOUNT_ID in $(jq '.[].AccountId' "${CURRENT}/accounts_from_ous.json" | sed 's/"//g'); do + tfrun + done + fi done diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py index 303fc2bda..171c77306 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py @@ -181,7 +181,7 @@ def _create_response_object(self, responses): for response in responses: is_active_not_excluded = ( Target._account_is_active(response) - and not response.get('Id') in self.target_structure.exclude + and response.get('Id') not in self.target_structure.exclude ) if is_active_not_excluded: accounts_found += 1 diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_cloudformation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_cloudformation.py index 3a767c846..c06dc4efc 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_cloudformation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_cloudformation.py @@ -3,7 +3,6 @@ # pylint: skip-file -import os import boto3 from pytest import fixture from stubs import stub_cloudformation diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_codepipeline.py index 91a2c2134..ec26b5f8a 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_codepipeline.py @@ -9,7 +9,6 @@ from stubs import stub_codepipeline from mock import Mock -from paginator import paginator from codepipeline import CodePipeline diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_deployment_map.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_deployment_map.py index 5e62ce2ba..e87ff03d2 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_deployment_map.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_deployment_map.py @@ -4,10 +4,8 @@ # pylint: skip-file import os -import boto3 -from errors import InvalidDeploymentMapError -from pytest import fixture, raises +from pytest import fixture from mock import Mock from ..pipeline import Pipeline from ..deployment_map import DeploymentMap diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_organizations.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_organizations.py index 050102377..60d21dde5 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_organizations.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_organizations.py @@ -4,7 +4,6 @@ # pylint: skip-file from datetime import datetime, timezone -import os import boto3 from pytest import fixture diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_parameter_store.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_parameter_store.py index f8e3d277a..d0eb54bdd 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_parameter_store.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_parameter_store.py @@ -3,7 +3,6 @@ # pylint: skip-file -import os import boto3 from pytest import fixture, mark from stubs import stub_parameter_store diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py index 8d99173a5..8d721d628 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py @@ -3,10 +3,6 @@ # pylint: skip-file -import os -import sys -import yaml -import boto3 from pytest import fixture from ..pipeline import Pipeline diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_step_functions.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_step_functions.py index f7c85ab66..5ad16a265 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_step_functions.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_step_functions.py @@ -3,7 +3,6 @@ # pylint: skip-file -import os import boto3 from pytest import fixture, raises from stubs import stub_step_functions diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_target.py index c23c8c1e7..ffdc0760b 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_target.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_target.py @@ -3,12 +3,9 @@ # pylint: skip-file -import os -import boto3 from errors import InvalidDeploymentMapError from pytest import fixture, raises from mock import Mock, patch,call -from .stubs import stub_target from ..target import Target, TargetStructure from parameter_store import ParameterStore diff --git a/src/lambda_codebase/initial_commit/initial_commit.py b/src/lambda_codebase/initial_commit/initial_commit.py index 9cc2ad320..0f4adb6f0 100644 --- a/src/lambda_codebase/initial_commit/initial_commit.py +++ b/src/lambda_codebase/initial_commit/initial_commit.py @@ -14,7 +14,6 @@ import boto3 import jinja2 from cfn_custom_resource import ( # pylint: disable=unused-import - lambda_handler, create, update, delete,