From d3500394a8035cc94221dee4de0a48e1bccc42b7 Mon Sep 17 00:00:00 2001 From: tony griffin <54268925+tony-griffin@users.noreply.github.com> Date: Thu, 8 Aug 2024 12:29:45 +0100 Subject: [PATCH] feat: DBTP-1137 trigger prod pipeline from non-prod pipeline (#195) Co-authored-by: Chiara Mapelli Co-authored-by: Chiara Mapelli Co-authored-by: Will Gibson <8738245+WillGibson@users.noreply.github.com> --- .gitignore | 2 + environment-pipelines/buildspec-apply.yml | 13 +- ....yml => buildspec-install-build-tools.yml} | 16 +- environment-pipelines/buildspec-plan.yml | 19 +- environment-pipelines/buildspec-trigger.yml | 52 ++++ environment-pipelines/codebuild.tf | 41 ++- environment-pipelines/codepipeline.tf | 10 +- environment-pipelines/iam.tf | 60 ++++ environment-pipelines/locals.tf | 160 ++++++---- environment-pipelines/stage_config.yml | 7 + environment-pipelines/tests/unit.tftest.hcl | 294 +++++++++++++++++- environment-pipelines/variables.tf | 25 +- s3/main.tf | 2 - 13 files changed, 609 insertions(+), 92 deletions(-) rename environment-pipelines/{buildspec.yml => buildspec-install-build-tools.yml} (74%) create mode 100644 environment-pipelines/buildspec-trigger.yml diff --git a/.gitignore b/.gitignore index 72c39c68c..eb5eb4ddd 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,5 @@ *.envrc *.idea* *.DS_Store +*.env* +*venv* diff --git a/environment-pipelines/buildspec-apply.yml b/environment-pipelines/buildspec-apply.yml index 4892aadd5..2090c4a93 100644 --- a/environment-pipelines/buildspec-apply.yml +++ b/environment-pipelines/buildspec-apply.yml @@ -17,17 +17,18 @@ phases: - echo -e "\nWorking on environment ${ENVIRONMENT}" - cd "terraform/environments/${ENVIRONMENT}" - terraform apply plan.tfplan - - echo -e "\nGenerating manifests and deploying AWS Copilot environment resources" - - cd "${CODEBUILD_SRC_DIR}" post_build: commands: - | if [ "${CODEBUILD_BUILD_SUCCEEDING}" == "1" ] then - MESSAGE="Terraform apply phase complete" + MESSAGE="Terraform apply phase complete for the ${ENVIRONMENT} environment." + ADDITIONAL_OPTIONS="" else - MESSAGE="Terraform apply phase FAILED" + MESSAGE=":alert: Terraform apply phase FAILED for the ${ENVIRONMENT} environment." + ADDITIONAL_OPTIONS="--send-to-main-channel true" fi - - platform-helper notify add-comment "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" "${SLACK_REF}" "${MESSAGE} for the ${ENVIRONMENT} environment." + - platform-helper notify add-comment "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" "${SLACK_REF}" "${MESSAGE}" ${ADDITIONAL_OPTIONS} artifacts: - files: [] + files: + - "**/*" diff --git a/environment-pipelines/buildspec.yml b/environment-pipelines/buildspec-install-build-tools.yml similarity index 74% rename from environment-pipelines/buildspec.yml rename to environment-pipelines/buildspec-install-build-tools.yml index c19d33228..893b7f4fc 100644 --- a/environment-pipelines/buildspec.yml +++ b/environment-pipelines/buildspec-install-build-tools.yml @@ -28,11 +28,17 @@ phases: - export "PATH=$(pwd)/bin:$PATH" - export PYTHONPATH=$(pwd) - | - export SLACK_REF=$(platform-helper notify environment-progress "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" \ - "*Deploying ${APPLICATION} environments*" \ - --build-arn "${CODEBUILD_BUILD_ARN}" \ - --repository "${REPOSITORY}" \ - --commit-sha "${CODEBUILD_RESOLVED_SOURCE_VERSION: -7}") + if [ "${SLACK_THREAD_ID}" == "NONE" ] + then + export SLACK_REF=$(platform-helper notify environment-progress "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" \ + "*Pipeline ${PIPELINE_NAME}* is deploying ${APPLICATION} environments" \ + --build-arn "${CODEBUILD_BUILD_ARN}" \ + --repository "${REPOSITORY}" \ + --commit-sha "${CODEBUILD_RESOLVED_SOURCE_VERSION: -7}") + else + export SLACK_REF="${SLACK_THREAD_ID}" + fi + - echo "Build SLACK_REF is - ${SLACK_REF}" - cd bin - curl -s -qL -o terraform_install.zip https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip - unzip terraform_install.zip diff --git a/environment-pipelines/buildspec-plan.yml b/environment-pipelines/buildspec-plan.yml index 44bd280b7..f9152f331 100644 --- a/environment-pipelines/buildspec-plan.yml +++ b/environment-pipelines/buildspec-plan.yml @@ -15,11 +15,11 @@ phases: commands: - set -e - echo "Cancelling any pending approvals for ${APPLICATION}-${ENVIRONMENT}-environment-pipeline" - - PIPELINE_STATE=$(aws codepipeline get-pipeline-state --name "${APPLICATION}-${ENVIRONMENT}-environment-pipeline") + - PIPELINE_STATE=$(aws codepipeline get-pipeline-state --name "${APPLICATION}-${PIPELINE_NAME}-environment-pipeline") - PIPELINE_APPROVAL_EXECID=$(echo $PIPELINE_STATE | jq --arg stage "Approve-${ENVIRONMENT}" -r '.stageStates[] | select(.stageName == $stage and .latestExecution.status == "InProgress") | .latestExecution.pipelineExecutionId') - | if [ -n "${PIPELINE_APPROVAL_EXECID}" ]; then - aws codepipeline stop-pipeline-execution --pipeline-name "${APPLICATION}-${ENVIRONMENT}-environment-pipeline" --pipeline-execution-id $PIPELINE_APPROVAL_EXECID --abandon --reason "Abandoning previous pipeline execution pending approval to run terraform plan" + aws codepipeline stop-pipeline-execution --pipeline-name "${APPLICATION}-${PIPELINE_NAME}-environment-pipeline" --pipeline-execution-id $PIPELINE_APPROVAL_EXECID --abandon --reason "Abandoning previous pipeline execution pending approval to run terraform plan" fi - echo "Terraform Plan Phase" - platform-helper notify add-comment "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" "${SLACK_REF}" "Starting terraform plan phase for the ${ENVIRONMENT} environment." @@ -37,16 +37,17 @@ phases: commands: - export BUILD_ID="$CODEBUILD_BUILD_ID" - | - if [ "${CODEBUILD_BUILD_SUCCEEDING}" == "1" ] - then - platform-helper notify add-comment "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" "${SLACK_REF}" "Terraform plan phase complete for the ${ENVIRONMENT} environment." - if [ "${NEEDS_APPROVAL}" == "yes" ] - then - platform-helper notify add-comment "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" "${SLACK_REF}" "Waiting for approval for the ${ENVIRONMENT} environment." + if [ "${CODEBUILD_BUILD_SUCCEEDING}" == "1" ]; then + MESSAGE="Terraform plan phase complete for the ${ENVIRONMENT} environment." + if [ "${NEEDS_APPROVAL}" == "yes" ]; then + MESSAGE="${MESSAGE} Waiting for approval for the ${ENVIRONMENT} environment." fi + ADDITIONAL_OPTIONS="" else - platform-helper notify add-comment "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" "${SLACK_REF}" "Terraform plan phase FAILED for the ${ENVIRONMENT} environment." + MESSAGE=":alert: Terraform plan phase FAILED for the ${ENVIRONMENT} environment." + ADDITIONAL_OPTIONS="--send-to-main-channel true" fi + - platform-helper notify add-comment "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" "${SLACK_REF}" "${MESSAGE}" "${ADDITIONAL_OPTIONS}" artifacts: files: - "**/*" diff --git a/environment-pipelines/buildspec-trigger.yml b/environment-pipelines/buildspec-trigger.yml new file mode 100644 index 000000000..8f4475009 --- /dev/null +++ b/environment-pipelines/buildspec-trigger.yml @@ -0,0 +1,52 @@ +version: 0.2 + +env: + parameter-store: + SLACK_TOKEN: /codebuild/slack_oauth_token + +phases: + install: + commands: + - export PATH="${CODEBUILD_SRC_DIR}/build-tools/bin:$PATH" + - export PYTHONPATH="${CODEBUILD_SRC_DIR}/build-tools" + - echo -e "\nAssume triggered account role to trigger ${TRIGGERED_PIPELINE_NAME} pipeline" + + - assumed_role=$(aws sts assume-role --role-arn "${TRIGGERED_ACCOUNT_ROLE_ARN}" --role-session-name "trigger-prod-pipeline-$(date +%s)") + + - PROD_AWS_ACCESS_KEY_ID=$(echo $assumed_role | jq -r .Credentials.AccessKeyId) + - PROD_AWS_SECRET_ACCESS_KEY=$(echo $assumed_role | jq -r .Credentials.SecretAccessKey) + - PROD_AWS_SESSION_TOKEN=$(echo $assumed_role | jq -r .Credentials.SessionToken) + + - export PROFILE_NAME="${TRIGGERED_PIPELINE_AWS_PROFILE}" + # Populate the ~/.aws/credentials file.. + - aws configure set aws_access_key_id "${PROD_AWS_ACCESS_KEY_ID}" --profile "${PROFILE_NAME}" + - aws configure set aws_secret_access_key "${PROD_AWS_SECRET_ACCESS_KEY}" --profile "${PROFILE_NAME}" + - aws configure set aws_session_token "${PROD_AWS_SESSION_TOKEN}" --profile "${PROFILE_NAME}" + # Populate the ~/.aws/config file.. + - aws configure set region "eu-west-2" --profile "${PROFILE_NAME}" + - aws configure set output "json" --profile "${PROFILE_NAME}" + + build: + commands: + - set -e + - MESSAGE="Triggering ${TRIGGERED_PIPELINE_NAME} pipeline" + - echo "${MESSAGE}" + - platform-helper notify add-comment "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" "${SLACK_REF}" "${MESSAGE}" + + - aws codepipeline start-pipeline-execution --name "${TRIGGERED_PIPELINE_NAME}" --profile "${PROFILE_NAME}" --variables "name=SLACK_THREAD_ID,value=${SLACK_REF}" + + post_build: + commands: + - | + if [ "${CODEBUILD_BUILD_SUCCEEDING}" == "1" ] + then + MESSAGE="SUCCESSFULLY triggered ${TRIGGERED_PIPELINE_NAME}" + ADDITIONAL_OPTIONS="" + else + MESSAGE=":alert: @here FAILED to trigger ${TRIGGERED_PIPELINE_NAME}" + ADDITIONAL_OPTIONS="--send-to-main-channel true" + fi + - platform-helper notify add-comment "${SLACK_CHANNEL_ID}" "${SLACK_TOKEN}" "${SLACK_REF}" "${MESSAGE} in the ${ACCOUNT_NAME} account." "${ADDITIONAL_OPTIONS}" + +artifacts: + files: [] diff --git a/environment-pipelines/codebuild.tf b/environment-pipelines/codebuild.tf index 7aeed2ae2..421426019 100644 --- a/environment-pipelines/codebuild.tf +++ b/environment-pipelines/codebuild.tf @@ -30,7 +30,7 @@ resource "aws_codebuild_project" "environment_pipeline_build" { source { type = "CODEPIPELINE" - buildspec = file("${path.module}/buildspec.yml") + buildspec = file("${path.module}/buildspec-install-build-tools.yml") } tags = local.tags @@ -124,3 +124,42 @@ resource "aws_codebuild_project" "environment_pipeline_apply" { tags = local.tags } + +resource "aws_codebuild_project" "trigger_other_environment_pipeline" { + for_each = toset(local.triggers_another_pipeline ? [""] : []) + name = "${var.application}-${var.pipeline_name}-environment-pipeline-trigger" + description = "Triggers a target pipeline" + build_timeout = 5 + service_role = aws_iam_role.environment_pipeline_codebuild.arn + encryption_key = module.artifact_store.kms_key_arn + + artifacts { + type = "CODEPIPELINE" + } + + cache { + type = "S3" + location = module.artifact_store.bucket_name + } + + environment { + compute_type = "BUILD_GENERAL1_SMALL" + image = "aws/codebuild/amazonlinux2-x86_64-standard:5.0" + type = "LINUX_CONTAINER" + image_pull_credentials_type = "CODEBUILD" + } + + logs_config { + cloudwatch_logs { + group_name = aws_cloudwatch_log_group.environment_pipeline_codebuild.name + stream_name = aws_cloudwatch_log_stream.environment_pipeline_codebuild.name + } + } + + source { + type = "CODEPIPELINE" + buildspec = file("${path.module}/buildspec-trigger.yml") + } + + tags = local.tags +} diff --git a/environment-pipelines/codepipeline.tf b/environment-pipelines/codepipeline.tf index 451f718aa..f37c56525 100644 --- a/environment-pipelines/codepipeline.tf +++ b/environment-pipelines/codepipeline.tf @@ -8,6 +8,12 @@ resource "aws_codepipeline" "environment_pipeline" { depends_on = [aws_iam_role_policy.artifact_store_access_for_environment_codebuild] pipeline_type = "V2" + variable { + name = "SLACK_THREAD_ID" + default_value = "NONE" + description = "This can be set by a triggering pipeline to continue an existing message thread" + } + artifact_store { location = module.artifact_store.bucket_name type = "S3" @@ -39,7 +45,7 @@ resource "aws_codepipeline" "environment_pipeline" { } stage { - name = "Build" + name = "Install-Build-Tools" action { name = "InstallTools" @@ -56,8 +62,10 @@ resource "aws_codepipeline" "environment_pipeline" { PrimarySource = "project_deployment_source" EnvironmentVariables : jsonencode([ { name : "APPLICATION", value : var.application }, + { name : "PIPELINE_NAME", value : var.pipeline_name }, { name : "REPOSITORY", value : var.repository }, { name : "SLACK_CHANNEL_ID", value : var.slack_channel, type : "PARAMETER_STORE" }, + { name : "SLACK_THREAD_ID", value : "#{variables.SLACK_THREAD_ID}" }, ]) } } diff --git a/environment-pipelines/iam.tf b/environment-pipelines/iam.tf index a89fc738f..ecbccf0fc 100644 --- a/environment-pipelines/iam.tf +++ b/environment-pipelines/iam.tf @@ -907,3 +907,63 @@ resource "aws_iam_role_policy" "copilot_assume_role_for_environment_codebuild" { role = aws_iam_role.environment_pipeline_codebuild.name policy = data.aws_iam_policy_document.copilot_assume_role.json } + +########### TRIGGERED PIPELINE RESOURCES ########## + +#------PROD-TARGET-ACCOUNT------ +resource "aws_iam_role" "trigger_pipeline" { + for_each = local.set_of_triggering_pipeline_names + name = "${var.application}-${var.pipeline_name}-trigger-pipeline-from-${each.value}" + assume_role_policy = data.aws_iam_policy_document.assume_trigger_pipeline.json + tags = local.tags +} + +data "aws_iam_policy_document" "assume_trigger_pipeline" { + statement { + effect = "Allow" + principals { + type = "AWS" + identifiers = local.triggering_pipeline_role_arns + } + actions = ["sts:AssumeRole"] + } +} + +resource "aws_iam_role_policy" "trigger_pipeline" { + for_each = local.set_of_triggering_pipeline_names + name = "${var.application}-${var.pipeline_name}-trigger-pipeline-from-${each.value}" + role = aws_iam_role.trigger_pipeline[each.value].name + policy = data.aws_iam_policy_document.trigger_pipeline[each.value].json +} + +data "aws_iam_policy_document" "trigger_pipeline" { + for_each = local.set_of_triggering_pipeline_names + statement { + actions = [ + "codepipeline:StartPipelineExecution", + ] + resources = [ + aws_codepipeline.environment_pipeline.arn + ] + } +} + + +#------NON-PROD-SOURCE-ACCOUNT------ + +resource "aws_iam_role_policy" "assume_role_to_trigger_pipeline_policy" { + for_each = toset(local.triggers_another_pipeline ? [""] : []) + name = "${var.application}-${var.pipeline_name}-assume-role-to-trigger-codepipeline-policy" + role = aws_iam_role.environment_pipeline_codebuild.name + policy = data.aws_iam_policy_document.assume_role_to_trigger_codepipeline_policy_document[""].json +} + +data "aws_iam_policy_document" "assume_role_to_trigger_codepipeline_policy_document" { + for_each = toset(local.triggers_another_pipeline ? [""] : []) + statement { + actions = [ + "sts:AssumeRole" + ] + resources = [local.triggered_pipeline_account_role] + } +} diff --git a/environment-pipelines/locals.tf b/environment-pipelines/locals.tf index 645a3c938..f7df1e451 100644 --- a/environment-pipelines/locals.tf +++ b/environment-pipelines/locals.tf @@ -7,76 +7,128 @@ locals { stage_config = yamldecode(file("${path.module}/stage_config.yml")) - base_env_config = { for name, config in var.environment_config : name => merge(lookup(var.environment_config, "*", {}), config) } + base_env_config = { for name, config in var.environment_config : name => merge(lookup(var.environment_config, "*", {}), config) if name != "*" } + + extracted_account_names_and_ids = toset(flatten([ + for env, env_config in local.base_env_config : [ + for account_type, account_details in env_config.accounts : { + "name" = account_details.name, + "id" = account_details.id + } + ] + ])) + + account_map = { for account in local.extracted_account_names_and_ids : account["name"] => account["id"] } + # Convert the env config into a list and add env name and vpc / requires_approval from the environments config. - environment_config = [for name, env in var.environments : merge(lookup(local.base_env_config, name, {}), env, { "name" = name })] + environment_config = [for name, env in var.environments : merge(lookup(local.base_env_config, name, {}), env, { "name" = name })] + triggers_another_pipeline = var.pipeline_to_trigger != null - # We flatten a list of lists for each env: - initial_stages = flatten( - [for env in local.environment_config : [ - # The first element of the inner list for an env is the Plan stage. - { - type : "plan", - stage_name : "Plan-${env.name}", - env : env.name, - accounts : env.accounts, - input_artifacts : ["build_output"], - output_artifacts : ["${env.name}_terraform_plan"], - configuration : { - ProjectName : "${var.application}-${var.pipeline_name}-environment-pipeline-plan" - PrimarySource : "build_output" - EnvironmentVariables : jsonencode([ - { name : "APPLICATION", value : var.application }, - { name : "ENVIRONMENT", value : env.name }, - { name : "COPILOT_PROFILE", value : env.accounts.deploy.name }, - { name : "SLACK_CHANNEL_ID", value : var.slack_channel, type : "PARAMETER_STORE" }, - { name : "SLACK_REF", value : "#{slack.SLACK_REF}" }, - { name : "NEEDS_APPROVAL", value : lookup(env, "requires_approval", false) ? "yes" : "no" } - ]) - } - namespace : "${env.name}-plan" + triggered_pipeline_account_name = local.triggers_another_pipeline ? var.all_pipelines[var.pipeline_to_trigger].account : null + triggered_account_id = local.triggers_another_pipeline ? local.account_map[local.triggered_pipeline_account_name] : null + + list_of_triggering_pipelines = [for pipeline, config in var.all_pipelines : merge(config, { name = pipeline }) if lookup(config, "pipeline_to_trigger", null) == var.pipeline_name] + set_of_triggering_pipeline_names = toset([for pipeline in local.list_of_triggering_pipelines : pipeline.name]) + + triggering_pipeline_role_arns = [for name in local.set_of_triggering_pipeline_names : "arn:aws:iam::${local.account_map[var.all_pipelines[name].account]}:role/demodjango-${name}-environment-pipeline-codebuild"] + + + initial_stages = [for env in local.environment_config : [ + # The first element of the inner list for an env is the Plan stage. + { + type : "plan", + stage_name : "Plan-${env.name}", + env : env.name, + accounts : env.accounts, + input_artifacts : ["build_output"], + output_artifacts : ["${env.name}_terraform_plan"], + configuration : { + ProjectName : "${var.application}-${var.pipeline_name}-environment-pipeline-plan" + PrimarySource : "build_output" + EnvironmentVariables : jsonencode([ + { name : "APPLICATION", value : var.application }, + { name : "ENVIRONMENT", value : env.name }, + { name : "PIPELINE_NAME", value : var.pipeline_name }, + { name : "COPILOT_PROFILE", value : env.accounts.deploy.name }, + { name : "SLACK_CHANNEL_ID", value : var.slack_channel, type : "PARAMETER_STORE" }, + { name : "SLACK_REF", value : "#{slack.SLACK_REF}" }, + { name : "NEEDS_APPROVAL", value : lookup(env, "requires_approval", false) ? "yes" : "no" }, + { name : "SLACK_THREAD_ID", value : "#{variables.SLACK_THREAD_ID}" }, + ]) + } + namespace : "${env.name}-plan" + }, + # The second element of the inner list for an env is the Approval stage if required, or the empty list otherwise. + lookup(env, "requires_approval", false) ? [{ + type : "approve", + stage_name : "Approve-${env.name}", + env : "", + input_artifacts : [], + output_artifacts : [], + configuration : { + CustomData : "Review Terraform Plan" + ExternalEntityLink : "https://${data.aws_region.current.name}.console.aws.amazon.com/codesuite/codebuild/${data.aws_caller_identity.current.account_id}/projects/${var.application}-${var.pipeline_name}-environment-pipeline-plan/build/#{${env.name}-plan.BUILD_ID}" }, - # The second element of the inner list for an env is the Approval stage if required, or the empty list otherwise. - lookup(env, "requires_approval", false) ? [{ - type : "approve", - stage_name : "Approve-${env.name}", - env : "", - input_artifacts : [], - output_artifacts : [], - configuration : { - CustomData : "Review Terraform Plan" - ExternalEntityLink : "https://${data.aws_region.current.name}.console.aws.amazon.com/codesuite/codebuild/${data.aws_caller_identity.current.account_id}/projects/${var.application}-${var.pipeline_name}-environment-pipeline-plan/build/#{${env.name}-plan.BUILD_ID}" - }, - namespace : null - }] : [], - # The third element of the inner list for an env is the Apply stage. + namespace : null + }] : [], + # The third element of the inner list for an env is the Apply stage. + { + type : "apply", + env : env.name, + stage_name : "Apply-${env.name}", + accounts : env.accounts, + input_artifacts : ["${env.name}_terraform_plan"], + output_artifacts : [], + configuration : { + ProjectName : "${var.application}-${var.pipeline_name}-environment-pipeline-apply" + PrimarySource : "${env.name}_terraform_plan" + EnvironmentVariables : jsonencode([ + { name : "ENVIRONMENT", value : env.name }, + { name : "SLACK_CHANNEL_ID", value : var.slack_channel, type : "PARAMETER_STORE" }, + { name : "SLACK_REF", value : "#{slack.SLACK_REF}" }, + { name : "VPC", value : local.base_env_config[env.name].vpc }, + { name : "SLACK_THREAD_ID", value : "#{variables.SLACK_THREAD_ID}" }, + ]) + }, + namespace : null + } + ] + ] + + triggered_pipeline_account_role = local.triggers_another_pipeline ? "arn:aws:iam::${local.triggered_account_id}:role/${var.application}-${var.pipeline_to_trigger}-trigger-pipeline-from-${var.pipeline_name}" : null + target_pipeline = local.triggers_another_pipeline ? "${var.application}-${var.pipeline_to_trigger}-environment-pipeline" : null + + + all_stages = flatten( + concat(local.initial_stages, local.triggers_another_pipeline ? [ { - type : "apply", - env : env.name, - stage_name : "Apply-${env.name}", - accounts : env.accounts, - input_artifacts : ["${env.name}_terraform_plan"], + type : "trigger", + stage_name : "Trigger-Pipeline", + input_artifacts : ["build_output"], output_artifacts : [], configuration : { - ProjectName : "${var.application}-${var.pipeline_name}-environment-pipeline-apply" - PrimarySource : "${env.name}_terraform_plan" + ProjectName : "${var.application}-${var.pipeline_name}-environment-pipeline-trigger[\"\"]" + PrimarySource : "build_output" EnvironmentVariables : jsonencode([ - { name : "ENVIRONMENT", value : env.name }, + { name : "TRIGGERED_ACCOUNT_ROLE_ARN", value : local.triggered_pipeline_account_role }, + { name : "TRIGGERED_PIPELINE_NAME", value : local.target_pipeline }, + { name : "TRIGGERED_PIPELINE_AWS_PROFILE", value : local.triggered_pipeline_account_name }, + { name : "SLACK_THREAD_ID", value : "#{variables.SLACK_THREAD_ID}" }, { name : "SLACK_CHANNEL_ID", value : var.slack_channel, type : "PARAMETER_STORE" }, { name : "SLACK_REF", value : "#{slack.SLACK_REF}" }, - { name : "VPC", value : local.base_env_config[env.name].vpc } + { name : "ACCOUNT_NAME", value : local.triggered_pipeline_account_name }, ]) }, namespace : null - } - ] - ]) + }] : []) + ) - dns_ids = tolist(toset(flatten([for stage in local.initial_stages : lookup(stage, "accounts", null) != null ? [stage.accounts.dns.id] : []]))) + dns_ids = tolist(toset(flatten([for stage in local.all_stages : lookup(stage, "accounts", null) != null ? [stage.accounts.dns.id] : []]))) dns_account_assumed_roles = [for id in local.dns_ids : "arn:aws:iam::${id}:role/environment-pipeline-assumed-role"] + # Merge in the stage specific config from the stage_config.yml file: - stages = [for stage in local.initial_stages : merge(stage, local.stage_config[stage["type"]])] + stages = [for stage in local.all_stages : merge(stage, local.stage_config[stage["type"]])] central_log_destination_arn = "arn:aws:logs:eu-west-2:812359060647:destination:cwl_log_destination" } diff --git a/environment-pipelines/stage_config.yml b/environment-pipelines/stage_config.yml index 97ff5a548..e53340dac 100644 --- a/environment-pipelines/stage_config.yml +++ b/environment-pipelines/stage_config.yml @@ -19,3 +19,10 @@ apply: owner: "AWS" provider: "CodeBuild" version: "1" + +trigger: + name: "Trigger" + category: "Build" + owner: "AWS" + provider: "CodeBuild" + version: "1" diff --git a/environment-pipelines/tests/unit.tftest.hcl b/environment-pipelines/tests/unit.tftest.hcl index 1c94045c7..6aabc9a5c 100644 --- a/environment-pipelines/tests/unit.tftest.hcl +++ b/environment-pipelines/tests/unit.tftest.hcl @@ -154,6 +154,27 @@ override_data { } } +override_data { + target = data.aws_iam_policy_document.trigger_pipeline + values = { + json = "{\"Sid\": \"TriggerCodePipeline\"}" + } +} + +override_data { + target = data.aws_iam_policy_document.assume_role_to_trigger_codepipeline_policy_document + values = { + json = "{\"Sid\": \"AssumeRoleToTriggerCodePipeline\"}" + } +} + +override_data { + target = data.aws_iam_policy_document.access_artifact_store + values = { + json = "{\"Sid\": \"AccessArtifactStore\"}" + } +} + override_data { target = data.aws_iam_policy_document.codepipeline values = { @@ -161,6 +182,13 @@ override_data { } } +override_data { + target = data.aws_iam_policy_document.assume_trigger_pipeline + values = { + json = "{\"Sid\": \"AssumeTriggerPolicy\"}" + } +} + variables { application = "my-app" repository = "my-repository" @@ -171,6 +199,29 @@ variables { managed-by = "DBT Platform - Terraform" } + all_pipelines = { + my-pipeline = { + account = "sandbox" + branch = "" + slack_channel = "" + trigger_on_push = true + pipeline_to_trigger = "triggered-pipeline" + environments = { + environment1 = "" + } + } + + triggered-pipeline = { + account = "prod" + branch = "" + slack_channel = "" + trigger_on_push = false + environments = { + environment2 = "" + } + } + } + environment_config = { "*" = { accounts = { @@ -217,6 +268,21 @@ run "test_code_pipeline" { error_message = "Should be: my-app-my-pipeline-environment-pipeline" } # aws_codepipeline.environment_pipeline.role_arn cannot be tested on a plan + assert { + condition = aws_codepipeline.environment_pipeline.variable[0].name == "SLACK_THREAD_ID" + error_message = "Should be: 'SLACK_THREAD_ID'" + } + + assert { + condition = aws_codepipeline.environment_pipeline.variable[0].default_value == "NONE" + error_message = "Should be: 'NONE'" + } + + assert { + condition = aws_codepipeline.environment_pipeline.variable[0].description == "This can be set by a triggering pipeline to continue an existing message thread" + error_message = "Should be: 'This can be set by a triggering pipeline to continue an existing message thread'" + } + assert { condition = tolist(aws_codepipeline.environment_pipeline.artifact_store)[0].location == "my-app-my-pipeline-environment-pipeline-artifact-store" error_message = "Should be: my-app-my-pipeline-environment-pipeline-artifact-store" @@ -272,8 +338,8 @@ run "test_code_pipeline" { # Build stage assert { - condition = aws_codepipeline.environment_pipeline.stage[1].name == "Build" - error_message = "Should be: Build" + condition = aws_codepipeline.environment_pipeline.stage[1].name == "Install-Build-Tools" + error_message = "Should be: Install-Build-Tools" } assert { condition = aws_codepipeline.environment_pipeline.stage[1].action[0].name == "InstallTools" @@ -695,6 +761,218 @@ run "test_iam" { } } +run "test_triggering_pipelines" { + command = plan + + variables { + pipeline_to_trigger = "triggered-pipeline" + } + + assert { + condition = aws_codebuild_project.trigger_other_environment_pipeline[""].name == "my-app-my-pipeline-environment-pipeline-trigger" + error_message = "Should be: 'my-app-my-pipeline-environment-pipeline-trigger" + } + + assert { + condition = aws_codebuild_project.trigger_other_environment_pipeline[""].description == "Triggers a target pipeline" + error_message = "Should be: 'Triggers a target pipeline'" + } + + assert { + condition = aws_codebuild_project.trigger_other_environment_pipeline[""].build_timeout == 5 + error_message = "Should be: 5" + } + + assert { + condition = one(aws_codebuild_project.trigger_other_environment_pipeline[""].artifacts).type == "CODEPIPELINE" + error_message = "Should be: 'CODEPIPELINE'" + } + + assert { + condition = one(aws_codebuild_project.trigger_other_environment_pipeline[""].cache).type == "S3" + error_message = "Should be: 'S3'" + } + + assert { + condition = one(aws_codebuild_project.trigger_other_environment_pipeline[""].cache).location == "my-app-my-pipeline-environment-pipeline-artifact-store" + error_message = "Should be: 'my-app-my-pipeline-environment-pipeline-artifact-store'" + } + + assert { + condition = one(aws_codebuild_project.trigger_other_environment_pipeline[""].environment).compute_type == "BUILD_GENERAL1_SMALL" + error_message = "Should be: 'BUILD_GENERAL1_SMALL'" + } + + assert { + condition = one(aws_codebuild_project.trigger_other_environment_pipeline[""].environment).image == "aws/codebuild/amazonlinux2-x86_64-standard:5.0" + error_message = "Should be: 'aws/codebuild/amazonlinux2-x86_64-standard:5.0'" + } + + assert { + condition = one(aws_codebuild_project.trigger_other_environment_pipeline[""].environment).type == "LINUX_CONTAINER" + error_message = "Should be: 'LINUX_CONTAINER'" + } + + assert { + condition = one(aws_codebuild_project.trigger_other_environment_pipeline[""].environment).image_pull_credentials_type == "CODEBUILD" + error_message = "Should be: 'CODEBUILD'" + } + + assert { + condition = aws_codebuild_project.trigger_other_environment_pipeline[""].logs_config[0].cloudwatch_logs[0].group_name == "codebuild/my-app-my-pipeline-environment-terraform/log-group" + error_message = "Should be: 'codebuild/my-app-my-pipeline-environment-terraform/log-group'" + } + assert { + condition = aws_codebuild_project.trigger_other_environment_pipeline[""].logs_config[0].cloudwatch_logs[0].stream_name == "codebuild/my-app-my-pipeline-environment-terraform/log-stream" + error_message = "Should be: 'codebuild/my-app-my-pipeline-environment-terraform/log-group'" + } + + assert { + condition = aws_iam_role_policy.assume_role_to_trigger_pipeline_policy[""].name == "my-app-my-pipeline-assume-role-to-trigger-codepipeline-policy" + error_message = "Should be: 'my-app-my-pipeline-assume-role-to-trigger-codepipeline-policy" + } + + assert { + condition = aws_iam_role_policy.assume_role_to_trigger_pipeline_policy[""].role == "my-app-my-pipeline-environment-pipeline-codebuild" + error_message = "Should be: 'my-app-my-pipeline-environment-pipeline-codebuild" + } + + assert { + condition = aws_iam_role_policy.assume_role_to_trigger_pipeline_policy[""].policy == "{\"Sid\": \"AssumeRoleToTriggerCodePipeline\"}" + error_message = "Should be: 'AssumeRoleToTriggerCodePipeline'" + } + + assert { + condition = jsonencode(aws_codebuild_project.trigger_other_environment_pipeline[""].tags) == jsonencode(var.expected_tags) + error_message = "Should be: ${jsonencode(var.expected_tags)}" + } + + assert { + condition = local.triggers_another_pipeline + error_message = "" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].name == "Trigger-Pipeline" + error_message = "Should be: Trigger-Pipeline" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].action[0].name == "Trigger" + error_message = "Action name incorrect" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].action[0].category == "Build" + error_message = "Action category incorrect" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].action[0].owner == "AWS" + error_message = "Action owner incorrect" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].action[0].provider == "CodeBuild" + error_message = "Action provider incorrect" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].action[0].version == "1" + error_message = "Action version incorrect" + } + + assert { + condition = length(aws_codepipeline.environment_pipeline.stage[7].action[0].input_artifacts) == 1 + error_message = "Input artifacts incorrect" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].action[0].input_artifacts[0] == "build_output" + error_message = "Input artifacts incorrect" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].action[0].configuration.EnvironmentVariables == "[{\"name\":\"TRIGGERED_ACCOUNT_ROLE_ARN\",\"value\":\"arn:aws:iam::000123456789:role/my-app-triggered-pipeline-trigger-pipeline-from-my-pipeline\"},{\"name\":\"TRIGGERED_PIPELINE_NAME\",\"value\":\"my-app-triggered-pipeline-environment-pipeline\"},{\"name\":\"TRIGGERED_PIPELINE_AWS_PROFILE\",\"value\":\"prod\"},{\"name\":\"SLACK_THREAD_ID\",\"value\":\"#{variables.SLACK_THREAD_ID}\"},{\"name\":\"SLACK_CHANNEL_ID\",\"type\":\"PARAMETER_STORE\",\"value\":\"/codebuild/slack_pipeline_notifications_channel\"},{\"name\":\"SLACK_REF\",\"value\":\"#{slack.SLACK_REF}\"},{\"name\":\"ACCOUNT_NAME\",\"value\":\"prod\"}]" + error_message = "Configuration Env Vars incorrect" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].action[0].configuration.ProjectName == "my-app-my-pipeline-environment-pipeline-trigger[\"\"]" + error_message = "Configuration ProjectName incorrect" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].action[0].configuration.PrimarySource == "build_output" + error_message = "Configuration PrimarySource incorrect" + } + + assert { + condition = aws_codepipeline.environment_pipeline.stage[7].action[0].namespace == null + error_message = "Namespace incorrect" + } + + assert { + condition = local.triggered_pipeline_account_role == "arn:aws:iam::000123456789:role/my-app-triggered-pipeline-trigger-pipeline-from-my-pipeline" + error_message = "Triggered pipeline account role is incorrect" + } +} + +run "test_triggered_pipelines" { + command = plan + + variables { + pipeline_name = "triggered-pipeline" + } + + assert { + condition = local.triggers_another_pipeline == false + error_message = "" + } + + assert { + condition = aws_iam_role.trigger_pipeline["my-pipeline"].name == "my-app-triggered-pipeline-trigger-pipeline-from-my-pipeline" + error_message = "" + } + + assert { + condition = aws_iam_role.trigger_pipeline["my-pipeline"].assume_role_policy == "{\"Sid\": \"AssumeTriggerPolicy\"}" + error_message = "" + } + + assert { + condition = jsonencode(aws_iam_role.trigger_pipeline["my-pipeline"].tags) == jsonencode(var.expected_tags) + error_message = "" + } + + assert { + condition = aws_iam_role_policy.trigger_pipeline["my-pipeline"].name == "my-app-triggered-pipeline-trigger-pipeline-from-my-pipeline" + error_message = "" + } + + assert { + condition = aws_iam_role_policy.trigger_pipeline["my-pipeline"].role == "my-app-triggered-pipeline-trigger-pipeline-from-my-pipeline" + error_message = "" + } + + # aws_iam_role_policy.trigger_pipeline["my-pipeline"].policy cannot be tested on a plan + + assert { + condition = local.list_of_triggering_pipelines[0].name == "my-pipeline" + error_message = "List of triggering pipelines should include my-pipeline" + } + + assert { + condition = contains(local.set_of_triggering_pipeline_names, "my-pipeline") + error_message = "The set of triggering pipeline names should contain my-pipeline" + } + + assert { + condition = local.triggering_pipeline_role_arns == ["arn:aws:iam::000123456789:role/demodjango-my-pipeline-environment-pipeline-codebuild"] + error_message = "ARN for triggering role is incorrect" + } +} + run "test_artifact_store" { command = plan @@ -717,8 +995,8 @@ run "test_stages" { error_message = "Should be: Source" } assert { - condition = aws_codepipeline.environment_pipeline.stage[1].name == "Build" - error_message = "Should be: Build" + condition = aws_codepipeline.environment_pipeline.stage[1].name == "Install-Build-Tools" + error_message = "Should be: Install-Build-Tools" } # Stage: dev plan @@ -771,7 +1049,7 @@ run "test_stages" { error_message = "Configuration PrimarySource incorrect" } assert { - condition = aws_codepipeline.environment_pipeline.stage[2].action[0].configuration.EnvironmentVariables == "[{\"name\":\"APPLICATION\",\"value\":\"my-app\"},{\"name\":\"ENVIRONMENT\",\"value\":\"dev\"},{\"name\":\"COPILOT_PROFILE\",\"value\":\"sandbox\"},{\"name\":\"SLACK_CHANNEL_ID\",\"type\":\"PARAMETER_STORE\",\"value\":\"/codebuild/slack_pipeline_notifications_channel\"},{\"name\":\"SLACK_REF\",\"value\":\"#{slack.SLACK_REF}\"},{\"name\":\"NEEDS_APPROVAL\",\"value\":\"no\"}]" + condition = aws_codepipeline.environment_pipeline.stage[2].action[0].configuration.EnvironmentVariables == "[{\"name\":\"APPLICATION\",\"value\":\"my-app\"},{\"name\":\"ENVIRONMENT\",\"value\":\"dev\"},{\"name\":\"PIPELINE_NAME\",\"value\":\"my-pipeline\"},{\"name\":\"COPILOT_PROFILE\",\"value\":\"sandbox\"},{\"name\":\"SLACK_CHANNEL_ID\",\"type\":\"PARAMETER_STORE\",\"value\":\"/codebuild/slack_pipeline_notifications_channel\"},{\"name\":\"SLACK_REF\",\"value\":\"#{slack.SLACK_REF}\"},{\"name\":\"NEEDS_APPROVAL\",\"value\":\"no\"},{\"name\":\"SLACK_THREAD_ID\",\"value\":\"#{variables.SLACK_THREAD_ID}\"}]" error_message = "Configuration Env Vars incorrect" } assert { @@ -825,7 +1103,7 @@ run "test_stages" { error_message = "Configuration PrimarySource incorrect" } assert { - condition = aws_codepipeline.environment_pipeline.stage[3].action[0].configuration.EnvironmentVariables == "[{\"name\":\"ENVIRONMENT\",\"value\":\"dev\"},{\"name\":\"SLACK_CHANNEL_ID\",\"type\":\"PARAMETER_STORE\",\"value\":\"/codebuild/slack_pipeline_notifications_channel\"},{\"name\":\"SLACK_REF\",\"value\":\"#{slack.SLACK_REF}\"},{\"name\":\"VPC\",\"value\":\"platform-sandbox-dev\"}]" + condition = aws_codepipeline.environment_pipeline.stage[3].action[0].configuration.EnvironmentVariables == "[{\"name\":\"ENVIRONMENT\",\"value\":\"dev\"},{\"name\":\"SLACK_CHANNEL_ID\",\"type\":\"PARAMETER_STORE\",\"value\":\"/codebuild/slack_pipeline_notifications_channel\"},{\"name\":\"SLACK_REF\",\"value\":\"#{slack.SLACK_REF}\"},{\"name\":\"VPC\",\"value\":\"platform-sandbox-dev\"},{\"name\":\"SLACK_THREAD_ID\",\"value\":\"#{variables.SLACK_THREAD_ID}\"}]" error_message = "Configuration Env Vars incorrect" } @@ -879,7 +1157,7 @@ run "test_stages" { error_message = "Configuration PrimarySource incorrect" } assert { - condition = aws_codepipeline.environment_pipeline.stage[4].action[0].configuration.EnvironmentVariables == "[{\"name\":\"APPLICATION\",\"value\":\"my-app\"},{\"name\":\"ENVIRONMENT\",\"value\":\"prod\"},{\"name\":\"COPILOT_PROFILE\",\"value\":\"prod\"},{\"name\":\"SLACK_CHANNEL_ID\",\"type\":\"PARAMETER_STORE\",\"value\":\"/codebuild/slack_pipeline_notifications_channel\"},{\"name\":\"SLACK_REF\",\"value\":\"#{slack.SLACK_REF}\"},{\"name\":\"NEEDS_APPROVAL\",\"value\":\"yes\"}]" + condition = aws_codepipeline.environment_pipeline.stage[4].action[0].configuration.EnvironmentVariables == "[{\"name\":\"APPLICATION\",\"value\":\"my-app\"},{\"name\":\"ENVIRONMENT\",\"value\":\"prod\"},{\"name\":\"PIPELINE_NAME\",\"value\":\"my-pipeline\"},{\"name\":\"COPILOT_PROFILE\",\"value\":\"prod\"},{\"name\":\"SLACK_CHANNEL_ID\",\"type\":\"PARAMETER_STORE\",\"value\":\"/codebuild/slack_pipeline_notifications_channel\"},{\"name\":\"SLACK_REF\",\"value\":\"#{slack.SLACK_REF}\"},{\"name\":\"NEEDS_APPROVAL\",\"value\":\"yes\"},{\"name\":\"SLACK_THREAD_ID\",\"value\":\"#{variables.SLACK_THREAD_ID}\"}]" error_message = "Configuration Env Vars incorrect" } assert { @@ -975,7 +1253,7 @@ run "test_stages" { error_message = "Configuration PrimarySource incorrect" } assert { - condition = aws_codepipeline.environment_pipeline.stage[6].action[0].configuration.EnvironmentVariables == "[{\"name\":\"ENVIRONMENT\",\"value\":\"prod\"},{\"name\":\"SLACK_CHANNEL_ID\",\"type\":\"PARAMETER_STORE\",\"value\":\"/codebuild/slack_pipeline_notifications_channel\"},{\"name\":\"SLACK_REF\",\"value\":\"#{slack.SLACK_REF}\"},{\"name\":\"VPC\",\"value\":\"platform-sandbox-prod\"}]" + condition = aws_codepipeline.environment_pipeline.stage[6].action[0].configuration.EnvironmentVariables == "[{\"name\":\"ENVIRONMENT\",\"value\":\"prod\"},{\"name\":\"SLACK_CHANNEL_ID\",\"type\":\"PARAMETER_STORE\",\"value\":\"/codebuild/slack_pipeline_notifications_channel\"},{\"name\":\"SLACK_REF\",\"value\":\"#{slack.SLACK_REF}\"},{\"name\":\"VPC\",\"value\":\"platform-sandbox-prod\"},{\"name\":\"SLACK_THREAD_ID\",\"value\":\"#{variables.SLACK_THREAD_ID}\"}]" error_message = "Configuration Env Vars incorrect" } } diff --git a/environment-pipelines/variables.tf b/environment-pipelines/variables.tf index cc151751f..386a27921 100644 --- a/environment-pipelines/variables.tf +++ b/environment-pipelines/variables.tf @@ -2,12 +2,14 @@ variable "application" { type = string } -variable "repository" { - type = string +variable "all_pipelines" { + type = any + default = {} } -variable "pipeline_name" { - type = string +variable "branch" { + type = string + default = "main" } variable "environments" { @@ -25,9 +27,19 @@ variable "environment_config" { type = any } -variable "branch" { + +variable "pipeline_name" { + type = string +} + +variable "pipeline_to_trigger" { type = string - default = "main" + default = null +} + + +variable "repository" { + type = string } variable "slack_channel" { @@ -39,3 +51,4 @@ variable "trigger_on_push" { type = bool default = true } + diff --git a/s3/main.tf b/s3/main.tf index a91060557..7b878f79e 100644 --- a/s3/main.tf +++ b/s3/main.tf @@ -1,5 +1,3 @@ -data "aws_caller_identity" "current" {} - resource "aws_s3_bucket" "this" { bucket = var.config.bucket_name