From f7de0454974a29d50973fe8d45a5af23f6c0cf36 Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Fri, 8 Oct 2021 14:25:27 +0100 Subject: [PATCH 01/17] Feature 372: Pipeline Triggers --- .../cdk/cdk_constructs/adf_codepipeline.py | 618 +++++++++++------- .../cdk/cdk_stacks/adf_default_pipeline.py | 4 + .../tests/test_pipeline_creation.py | 70 ++ .../adf-build/shared/schema_validation.py | 10 +- 4 files changed, 477 insertions(+), 225 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index cf1eb739a..fcf8c0a06 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -6,11 +6,14 @@ import os import json +from typing import Tuple import boto3 from aws_cdk import ( aws_codepipeline as _codepipeline, - core + aws_events as _eventbridge, + aws_events_targets as _eventbridge_targets, + core, ) from cdk_constructs import adf_events @@ -24,242 +27,333 @@ LOGGER = configure_logger(__name__) + class Action: _version = "1" def __init__(self, **kwargs): - self.name = kwargs.get('name') - self.target = kwargs.get('target', {}) - self.provider = kwargs.get('provider') - self.category = kwargs.get('category') - self.map_params = kwargs.get('map_params') - self.project_name = kwargs.get('project_name') - self.owner = kwargs.get('owner') or 'AWS' - self.run_order = kwargs.get('run_order') - self.index = kwargs.get('index') - self.action_name = kwargs.get('action_name') - self.action_mode = kwargs.get('action_mode', '').upper() - self.region = kwargs.get('region') or ADF_DEPLOYMENT_REGION - self.account_id = self.map_params["default_providers"]["source"].get('properties', {}).get("account_id") + self.name = kwargs.get("name") + self.target = kwargs.get("target", {}) + self.provider = kwargs.get("provider") + self.category = kwargs.get("category") + self.map_params = kwargs.get("map_params") + self.project_name = kwargs.get("project_name") + self.owner = kwargs.get("owner") or "AWS" + self.run_order = kwargs.get("run_order") + self.index = kwargs.get("index") + self.action_name = kwargs.get("action_name") + self.action_mode = kwargs.get("action_mode", "").upper() + self.region = kwargs.get("region") or ADF_DEPLOYMENT_REGION + self.account_id = ( + self.map_params["default_providers"]["source"] + .get("properties", {}) + .get("account_id") + ) self.role_arn = self._generate_role_arn() self.notification_endpoint = self.map_params.get("topic_arn") self.configuration = self._generate_configuration() self.config = self.generate() def _generate_role_arn(self): - if self.category not in ['Build', 'Deploy']: + if self.category not in ["Build", "Deploy"]: return None - default_provider = self.map_params['default_providers'][self.category.lower()] - specific_role = self.target.get('properties', {}).get('role') or default_provider.get('properties', {}).get('role') + default_provider = self.map_params["default_providers"][self.category.lower()] + specific_role = self.target.get("properties", {}).get( + "role" + ) or default_provider.get("properties", {}).get("role") if specific_role: - account_id = self.account_id if self.provider == 'CodeBuild' else self.target['id'] - return 'arn:aws:iam::{0}:role/{1}'.format(account_id, specific_role) + account_id = ( + self.account_id if self.provider == "CodeBuild" else self.target["id"] + ) + return "arn:aws:iam::{0}:role/{1}".format(account_id, specific_role) return None - def _generate_configuration(self): #pylint: disable=R0912, R0911, R0915 + def _generate_configuration(self): # pylint: disable=R0912, R0911, R0915 if self.provider == "Manual" and self.category == "Approval": _props = { - "CustomData": self.target.get('properties', {}).get('message') or "Approval stage for {0}".format(self.map_params['name']) + "CustomData": self.target.get("properties", {}).get("message") + or "Approval stage for {0}".format(self.map_params["name"]) } if self.notification_endpoint: _props["NotificationArn"] = self.notification_endpoint - if self.target.get('properties', {}).get('sns_topic_arn'): - _props["NotificationArn"] = self.target.get('properties', {}).get('sns_topic_arn') + if self.target.get("properties", {}).get("sns_topic_arn"): + _props["NotificationArn"] = self.target.get("properties", {}).get( + "sns_topic_arn" + ) return _props if self.provider == "S3" and self.category == "Source": return { - "S3Bucket": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('bucket_name'), - "S3ObjectKey": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('object_key'), - "PollForSourceChanges": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('trigger_on_changes', True), + "S3Bucket": self.map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("bucket_name"), + "S3ObjectKey": self.map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("object_key"), + "PollForSourceChanges": self.map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("trigger_on_changes", True), } if self.provider == "S3" and self.category == "Deploy": return { - "BucketName": self.map_params.get( - 'default_providers', {}).get( - 'deploy', {}).get( - 'properties', {}).get( - 'bucket_name') or self.target.get( - 'properties', {}).get( - 'bucket_name'), - "Extract": self.map_params.get( - 'default_providers', {}).get( - 'deploy', {}).get( - 'properties', {}).get( - 'extract') or self.target.get( - 'properties', {}).get( - 'extract', False), - "ObjectKey": self.map_params.get( - 'default_providers', {}).get( - 'deploy', {}).get( - 'properties', {}).get( - 'object_key') or self.target.get( - 'properties', {}).get( - 'object_key') + "BucketName": self.map_params.get("default_providers", {}) + .get("deploy", {}) + .get("properties", {}) + .get("bucket_name") + or self.target.get("properties", {}).get("bucket_name"), + "Extract": self.map_params.get("default_providers", {}) + .get("deploy", {}) + .get("properties", {}) + .get("extract") + or self.target.get("properties", {}).get("extract", False), + "ObjectKey": self.map_params.get("default_providers", {}) + .get("deploy", {}) + .get("properties", {}) + .get("object_key") + or self.target.get("properties", {}).get("object_key"), } if self.provider == "CodeStarSourceConnection": - owner = self.map_params.get('default_providers', {}).get('source').get('properties', {}).get('owner', {}) - repo = self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('repository', {}) or self.map_params['name'] - codestar_connection_path = self.map_params.get('default_providers', {}).get('source').get('properties', {}).get('codestar_connection_path', {}) - ssm_client = boto3.client('ssm') + owner = ( + self.map_params.get("default_providers", {}) + .get("source") + .get("properties", {}) + .get("owner", {}) + ) + repo = ( + self.map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("repository", {}) + or self.map_params["name"] + ) + codestar_connection_path = ( + self.map_params.get("default_providers", {}) + .get("source") + .get("properties", {}) + .get("codestar_connection_path", {}) + ) + ssm_client = boto3.client("ssm") try: response = ssm_client.get_parameter(Name=codestar_connection_path) except Exception as e: - LOGGER.error(f"No parameter found at {codestar_connection_path}. Check the path/value.") + LOGGER.error( + f"No parameter found at {codestar_connection_path}. Check the path/value." + ) raise e - connection_arn = response['Parameter']['Value'] + connection_arn = response["Parameter"]["Value"] return { "ConnectionArn": connection_arn, "FullRepositoryId": f"{owner}/{repo}", - "BranchName": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('branch', {}) or 'master' + "BranchName": self.map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("branch", {}) + or "master", } if self.provider == "GitHub": return { - "Owner": self.map_params.get('default_providers', {}).get('source').get('properties', {}).get('owner', {}), - "Repo": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('repository', {}) or self.map_params['name'], - "Branch": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('branch', {}) or 'master', + "Owner": self.map_params.get("default_providers", {}) + .get("source") + .get("properties", {}) + .get("owner", {}), + "Repo": self.map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("repository", {}) + or self.map_params["name"], + "Branch": self.map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("branch", {}) + or "master", # pylint: disable=no-value-for-parameter "OAuthToken": core.SecretValue.secrets_manager( - self.map_params['default_providers']['source'].get('properties', {}).get('oauth_token_path'), - json_field=self.map_params['default_providers']['source'].get('properties', {}).get('json_field') + self.map_params["default_providers"]["source"] + .get("properties", {}) + .get("oauth_token_path"), + json_field=self.map_params["default_providers"]["source"] + .get("properties", {}) + .get("json_field"), ), - "PollForSourceChanges": False + "PollForSourceChanges": False, } if self.provider == "Lambda": return { - "FunctionName": self.map_params.get( - 'default_providers', {}).get( - 'deploy', {}).get( - 'properties', {}).get( - 'function_name', '') or self.target.get( - 'properties', {}).get( - 'function_name', ''), - "UserParameters": str(self.map_params.get( - 'default_providers', {}).get( - 'deploy', {}).get( - 'properties', {}).get('input', '') or self.target.get( - 'properties', {}).get( - 'input', '')) + "FunctionName": self.map_params.get("default_providers", {}) + .get("deploy", {}) + .get("properties", {}) + .get("function_name", "") + or self.target.get("properties", {}).get("function_name", ""), + "UserParameters": str( + self.map_params.get("default_providers", {}) + .get("deploy", {}) + .get("properties", {}) + .get("input", "") + or self.target.get("properties", {}).get("input", "") + ), } if self.provider == "CloudFormation": - _path_prefix = self.target.get( - 'properties', {}).get( - 'root_dir') or self.map_params.get( - 'default_providers', {}).get( - 'deploy', {}).get( - 'properties', {}).get( - 'root_dir') or "" - if _path_prefix and not _path_prefix.endswith('/'): + _path_prefix = ( + self.target.get("properties", {}).get("root_dir") + or self.map_params.get("default_providers", {}) + .get("deploy", {}) + .get("properties", {}) + .get("root_dir") + or "" + ) + if _path_prefix and not _path_prefix.endswith("/"): _path_prefix = "{}/".format(_path_prefix) _input_artifact = "{map_name}-build".format( - map_name=self.map_params['name'], + map_name=self.map_params["name"], ) _props = { "ActionMode": self.action_mode, - "StackName": self.target.get( - 'properties', {}).get('stack_name') or self.map_params.get( - 'default_providers', {}).get( - 'deploy', {}).get( - 'properties', {}).get( - 'stack_name') or "{0}{1}".format( - ADF_STACK_PREFIX, self.map_params['name']), - "ChangeSetName": "{0}{1}".format(ADF_STACK_PREFIX, self.map_params['name']), + "StackName": self.target.get("properties", {}).get("stack_name") + or self.map_params.get("default_providers", {}) + .get("deploy", {}) + .get("properties", {}) + .get("stack_name") + or "{0}{1}".format(ADF_STACK_PREFIX, self.map_params["name"]), + "ChangeSetName": "{0}{1}".format( + ADF_STACK_PREFIX, self.map_params["name"] + ), "TemplateConfiguration": "{input_artifact}::{path_prefix}params/{target_name}_{region}.json".format( input_artifact=_input_artifact, path_prefix=_path_prefix, - target_name=self.target['name'], + target_name=self.target["name"], region=self.region, ), "Capabilities": "CAPABILITY_NAMED_IAM,CAPABILITY_AUTO_EXPAND", - "RoleArn": "arn:aws:iam::{0}:role/adf-cloudformation-deployment-role".format(self.target['id']) if not self.role_arn else self.role_arn + "RoleArn": "arn:aws:iam::{0}:role/adf-cloudformation-deployment-role".format( + self.target["id"] + ) + if not self.role_arn + else self.role_arn, } - if self.map_params.get('default_providers', {}).get('build', {}).get('properties', {}).get('environment_variables', {}).get('CONTAINS_TRANSFORM'): - _props["TemplatePath"] = "{input_artifact}::{path_prefix}template_{region}.yml".format( + if ( + self.map_params.get("default_providers", {}) + .get("build", {}) + .get("properties", {}) + .get("environment_variables", {}) + .get("CONTAINS_TRANSFORM") + ): + _props[ + "TemplatePath" + ] = "{input_artifact}::{path_prefix}template_{region}.yml".format( input_artifact=_input_artifact, path_prefix=_path_prefix, region=self.region, ) else: - _template_filename = self.target.get( - 'properties', {}).get( - 'template_filename') or self.map_params.get( - 'default_providers', {}).get( - 'deploy', {}).get( - 'properties', {}).get( - 'template_filename') or "template.yml" - _props["TemplatePath"] = "{input_artifact}::{path_prefix}{filename}".format( + _template_filename = ( + self.target.get("properties", {}).get("template_filename") + or self.map_params.get("default_providers", {}) + .get("deploy", {}) + .get("properties", {}) + .get("template_filename") + or "template.yml" + ) + _props[ + "TemplatePath" + ] = "{input_artifact}::{path_prefix}{filename}".format( input_artifact=_input_artifact, path_prefix=_path_prefix, filename=_template_filename, ) - if self.target.get('properties', {}).get('outputs'): - _props['OutputFileName'] = '{path_prefix}{filename}.json'.format( + if self.target.get("properties", {}).get("outputs"): + _props["OutputFileName"] = "{path_prefix}{filename}.json".format( path_prefix=_path_prefix, - filename=self.target['properties']['outputs'], + filename=self.target["properties"]["outputs"], ) - if self.target.get('properties', {}).get('param_overrides'): + if self.target.get("properties", {}).get("param_overrides"): _overrides = {} - for override in self.target.get('properties', {}).get('param_overrides', []): - _overrides["{0}".format( - override['param'])] = {"Fn::GetParam": ["{0}".format( - override['inputs']), "{0}.json".format( - override['inputs']), "{0}".format( - override['key_name'])]} - _props['ParameterOverrides'] = json.dumps(_overrides) + for override in self.target.get("properties", {}).get( + "param_overrides", [] + ): + _overrides["{0}".format(override["param"])] = { + "Fn::GetParam": [ + "{0}".format(override["inputs"]), + "{0}.json".format(override["inputs"]), + "{0}".format(override["key_name"]), + ] + } + _props["ParameterOverrides"] = json.dumps(_overrides) return _props if self.provider == "Jenkins": return { - "ProjectName": self.map_params['default_providers']['build'].get( - 'properties', {}).get( - 'project_name', self.map_params['name']), # Enter the name of the project you created in the Jenkins plugin - "ServerURL": self.map_params['default_providers']['build'].get('properties', {}).get('server_url'), # Server URL - "ProviderName": self.map_params['default_providers']['build'].get('properties', {}).get('provider_name') # Enter the provider name you configured in the Jenkins plugin + "ProjectName": self.map_params["default_providers"]["build"] + .get("properties", {}) + .get( + "project_name", self.map_params["name"] + ), # Enter the name of the project you created in the Jenkins plugin + "ServerURL": self.map_params["default_providers"]["build"] + .get("properties", {}) + .get("server_url"), # Server URL + "ProviderName": self.map_params["default_providers"]["build"] + .get("properties", {}) + .get( + "provider_name" + ), # Enter the provider name you configured in the Jenkins plugin } if self.provider == "CodeBuild": if self.project_name is None: - self.project_name = "adf-build-{0}".format(self.map_params['name']) - return { - "ProjectName": self.project_name - } + self.project_name = "adf-build-{0}".format(self.map_params["name"]) + return {"ProjectName": self.project_name} if self.provider == "ServiceCatalog": return { - "ConfigurationFilePath": self.target.get('properties', {}).get('configuration_file_path') or "params/{0}_{1}.json".format(self.target['name'], self.region), - "ProductId": self.target.get( - 'properties', {}).get( - 'product_id') or self.map_params['default_providers']['deploy'].get( - 'properties', {}).get( - 'product_id') # product_id is required for Service Catalog, meaning the product must already exist. + "ConfigurationFilePath": self.target.get("properties", {}).get( + "configuration_file_path" + ) + or "params/{0}_{1}.json".format(self.target["name"], self.region), + "ProductId": self.target.get("properties", {}).get("product_id") + or self.map_params["default_providers"]["deploy"] + .get("properties", {}) + .get( + "product_id" + ), # product_id is required for Service Catalog, meaning the product must already exist. } if self.provider == "CodeDeploy": return { - "ApplicationName": self.map_params.get( - 'default_providers', {}).get( - 'deploy', {}).get( - 'properties', {}).get( - 'application_name', {}) or self.target.get( - 'properties', {}).get( - 'application_name'), - "DeploymentGroupName": self.map_params.get( - 'default_providers', {}).get( - 'deploy', {}).get( - 'properties', {}).get( - 'deployment_group_name', {}) or self.target.get( - 'properties', {}).get( - 'deployment_group_name') + "ApplicationName": self.map_params.get("default_providers", {}) + .get("deploy", {}) + .get("properties", {}) + .get("application_name", {}) + or self.target.get("properties", {}).get("application_name"), + "DeploymentGroupName": self.map_params.get("default_providers", {}) + .get("deploy", {}) + .get("properties", {}) + .get("deployment_group_name", {}) + or self.target.get("properties", {}).get("deployment_group_name"), } if self.provider == "CodeCommit": return { - "BranchName": self.map_params['default_providers']['source'].get('properties', {}).get('branch', 'master'), - "RepositoryName": self.map_params['default_providers']['source'].get('properties', {}).get('repository', {}) or self.map_params['name'], + "BranchName": self.map_params["default_providers"]["source"] + .get("properties", {}) + .get("branch", "master"), + "RepositoryName": self.map_params["default_providers"]["source"] + .get("properties", {}) + .get("repository", {}) + or self.map_params["name"], "PollForSourceChanges": ( - self.map_params['default_providers']['source'].get('properties', {}).get('trigger_on_changes', True) - and self.map_params['default_providers']['source'].get('properties', {}).get('poll_for_changes', False) - ) + self.map_params["default_providers"]["source"] + .get("properties", {}) + .get("trigger_on_changes", True) + and self.map_params["default_providers"]["source"] + .get("properties", {}) + .get("poll_for_changes", False) + ), } raise Exception("{0} is not a valid provider".format(self.provider)) - def _generate_codepipeline_access_role(self): #pylint: disable=R0911 + def _generate_codepipeline_access_role(self): # pylint: disable=R0911 if self.provider == "CodeCommit": - return "arn:aws:iam::{0}:role/adf-codecommit-role".format(self.map_params['default_providers']['source']['properties']['account_id']) + return "arn:aws:iam::{0}:role/adf-codecommit-role".format( + self.map_params["default_providers"]["source"]["properties"][ + "account_id" + ] + ) if self.provider == "GitHub": return None if self.provider == "CodeStarSourceConnection": @@ -268,38 +362,50 @@ def _generate_codepipeline_access_role(self): #pylint: disable=R0911 return None if self.provider == "S3" and self.category == "Source": # This could be changed to use a new role that is bootstrapped, ideally we rename adf-cloudformation-role to a generic deployment role name - return "arn:aws:iam::{0}:role/adf-codecommit-role".format(self.map_params['default_providers']['source']['properties']['account_id']) + return "arn:aws:iam::{0}:role/adf-codecommit-role".format( + self.map_params["default_providers"]["source"]["properties"][ + "account_id" + ] + ) if self.provider == "S3" and self.category == "Deploy": # This could be changed to use a new role that is bootstrapped, ideally we rename adf-cloudformation-role to a generic deployment role name - return "arn:aws:iam::{0}:role/adf-cloudformation-role".format(self.target['id']) + return "arn:aws:iam::{0}:role/adf-cloudformation-role".format( + self.target["id"] + ) if self.provider == "ServiceCatalog": # This could be changed to use a new role that is bootstrapped, ideally we rename adf-cloudformation-role to a generic deployment role name - return "arn:aws:iam::{0}:role/adf-cloudformation-role".format(self.target['id']) + return "arn:aws:iam::{0}:role/adf-cloudformation-role".format( + self.target["id"] + ) if self.provider == "CodeDeploy": # This could be changed to use a new role that is bootstrapped, ideally we rename adf-cloudformation-role to a generic deployment role name - return "arn:aws:iam::{0}:role/adf-cloudformation-role".format(self.target['id']) + return "arn:aws:iam::{0}:role/adf-cloudformation-role".format( + self.target["id"] + ) if self.provider == "Lambda": # This could be changed to use a new role that is bootstrapped, ideally we rename adf-cloudformation-role to a generic deployment role name return None if self.provider == "CloudFormation": - return "arn:aws:iam::{0}:role/adf-cloudformation-role".format(self.target['id']) + return "arn:aws:iam::{0}:role/adf-cloudformation-role".format( + self.target["id"] + ) if self.provider == "Manual": return None - raise Exception('Invalid Provider {0}'.format(self.provider)) + raise Exception("Invalid Provider {0}".format(self.provider)) def generate(self): _role = self._generate_codepipeline_access_role() action_props = { - "action_type_id":_codepipeline.CfnPipeline.ActionTypeIdProperty( + "action_type_id": _codepipeline.CfnPipeline.ActionTypeIdProperty( version=Action._version, owner=self.owner, provider=self.provider, - category=self.category + category=self.category, ), "configuration": self.configuration, "name": self.action_name, "region": self.region or ADF_DEPLOYMENT_REGION, - "run_order": self.run_order + "run_order": self.run_order, } input_artifacts = self._get_input_artifacts() if input_artifacts: @@ -309,12 +415,10 @@ def generate(self): action_props["output_artifacts"] = output_artifacts if _role: action_props["role_arn"] = _role - if self.category == 'Manual': - del action_props['region'] + if self.category == "Manual": + del action_props["region"] - return _codepipeline.CfnPipeline.ActionDeclarationProperty( - **action_props - ) + return _codepipeline.CfnPipeline.ActionDeclarationProperty(**action_props) def _get_base_input_artifact_name(self): """ @@ -323,13 +427,12 @@ def _get_base_input_artifact_name(self): Returns: str: The output artifact name as a string """ - use_output_source = ( - not self.target or - not self.map_params.get('default_providers', {}).get('build', {}).get('enabled', True) - ) + use_output_source = not self.target or not self.map_params.get( + "default_providers", {} + ).get("build", {}).get("enabled", True) if use_output_source: return "output-source" - return "{0}-build".format(self.map_params['name']) + return "{0}-build".format(self.map_params["name"]) def _get_input_artifacts(self): """ @@ -338,19 +441,25 @@ def _get_input_artifacts(self): Returns: list: The Input Artifacts """ - if not self.category in ['Build', 'Deploy']: + if not self.category in ["Build", "Deploy"]: return [] input_artifacts = [ _codepipeline.CfnPipeline.InputArtifactProperty( name=self._get_base_input_artifact_name(), ), ] - if self.category == 'Deploy': - for override in self.target.get('properties', {}).get('param_overrides', []): - if self.provider == "CloudFormation" and override.get('inputs') and self.action_mode != "CHANGE_SET_EXECUTE": + if self.category == "Deploy": + for override in self.target.get("properties", {}).get( + "param_overrides", [] + ): + if ( + self.provider == "CloudFormation" + and override.get("inputs") + and self.action_mode != "CHANGE_SET_EXECUTE" + ): input_artifacts.append( _codepipeline.CfnPipeline.InputArtifactProperty( - name=override.get('inputs') + name=override.get("inputs") ) ) return input_artifacts @@ -362,15 +471,15 @@ def _get_base_output_artifact_name(self): Returns: str: The output artifact name as a string """ - if self.category == 'Source': + if self.category == "Source": return "output-source" - if self.category == 'Build' and not self.target: - return "{0}-build".format(self.map_params['name']) - if self.category == 'Deploy' and self.provider == "CloudFormation": - outputs_name = self.target.get('properties', {}).get('outputs', '') - if outputs_name and self.action_mode != 'CHANGE_SET_REPLACE': + if self.category == "Build" and not self.target: + return "{0}-build".format(self.map_params["name"]) + if self.category == "Deploy" and self.provider == "CloudFormation": + outputs_name = self.target.get("properties", {}).get("outputs", "") + if outputs_name and self.action_mode != "CHANGE_SET_REPLACE": return outputs_name - return '' + return "" def _get_output_artifacts(self): """ @@ -391,48 +500,86 @@ def _get_output_artifacts(self): class Pipeline(core.Construct): _import_arns = [ - 'CodePipelineRoleArn', - 'CodeBuildRoleArn', - 'SendSlackNotificationLambdaArn' + "CodePipelineRoleArn", + "CodeBuildRoleArn", + "SendSlackNotificationLambdaArn", ] - def __init__(self, scope: core.Construct, id: str, map_params: dict, ssm_params: dict, stages, **kwargs): #pylint: disable=W0622 + _accepted_triggers = {"code_artifact": "CODEARTIFACT"} + + def __init__( + self, + scope: core.Construct, + id: str, + map_params: dict, + ssm_params: dict, + stages, + **kwargs, + ): # pylint: disable=W0622 super().__init__(scope, id, **kwargs) - [_codepipeline_role_arn, _code_build_role_arn, _send_slack_notification_lambda_arn] = Pipeline.import_required_arns() #pylint: disable=W0632 + # pylint: disable=W0632 + [ + _codepipeline_role_arn, + _code_build_role_arn, + _send_slack_notification_lambda_arn, + ] = Pipeline.import_required_arns() + # pylint: enable=W0632 _pipeline_args = { "role_arn": _codepipeline_role_arn, - "restart_execution_on_update": map_params.get('params', {}).get('restart_execution_on_update', False), - "name": "{0}{1}".format(ADF_PIPELINE_PREFIX, map_params['name']), + "restart_execution_on_update": map_params.get("params", {}).get( + "restart_execution_on_update", False + ), + "name": "{0}{1}".format(ADF_PIPELINE_PREFIX, map_params["name"]), "stages": stages, - "artifact_stores": Pipeline.generate_artifact_stores(map_params, ssm_params), - "tags": Pipeline.restructure_tags(map_params.get('tags', {})) + "artifact_stores": Pipeline.generate_artifact_stores( + map_params, ssm_params + ), + "tags": Pipeline.restructure_tags(map_params.get("tags", {})), } - self.cfn = _codepipeline.CfnPipeline( + self.cfn = _codepipeline.CfnPipeline(self, "pipeline", **_pipeline_args) + adf_events.Events( self, - 'pipeline', - **_pipeline_args + "events", + { + "pipeline": "arn:aws:codepipeline:{0}:{1}:{2}".format( + ADF_DEPLOYMENT_REGION, + ADF_DEPLOYMENT_ACCOUNT_ID, + "{0}{1}".format( + os.environ.get("ADF_PIPELINE_PREFIX"), map_params["name"] + ), + ), + "topic_arn": map_params.get("topic_arn"), + "name": map_params["name"], + "completion_trigger": map_params.get("completion_trigger"), + "schedule": map_params.get("schedule"), + "source": { + "provider": map_params.get("default_providers", {}) + .get("source", {}) + .get("provider"), + "account_id": map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("account_id"), + "repo_name": map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("repository") + or map_params["name"], + "branch": map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("branch", "master"), + "poll_for_changes": map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("poll_for_changes", False), + "trigger_on_changes": map_params.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("trigger_on_changes", True), + }, + }, ) - adf_events.Events(self, 'events', { - "pipeline": 'arn:aws:codepipeline:{0}:{1}:{2}'.format( - ADF_DEPLOYMENT_REGION, - ADF_DEPLOYMENT_ACCOUNT_ID, - "{0}{1}".format( - os.environ.get( - "ADF_PIPELINE_PREFIX"), - map_params['name'])), - "topic_arn": map_params.get('topic_arn'), - "name": map_params['name'], - "completion_trigger": map_params.get('completion_trigger'), - "schedule": map_params.get('schedule'), - "source": { - "provider": map_params.get('default_providers', {}).get('source', {}).get('provider'), - "account_id": map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('account_id'), - "repo_name": map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('repository') or map_params['name'], - "branch": map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('branch', 'master'), - "poll_for_changes": map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('poll_for_changes', False), - "trigger_on_changes": map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('trigger_on_changes', True), - } - }) @staticmethod def restructure_tags(current_tags): @@ -445,17 +592,18 @@ def restructure_tags(current_tags): def generate_artifact_stores(map_params, ssm_params): output = [] for region in map_params["regions"]: - output.append(_codepipeline.CfnPipeline.ArtifactStoreMapProperty( - artifact_store=_codepipeline.CfnPipeline.ArtifactStoreProperty( - location=ssm_params[region]["s3"], - type="S3", - encryption_key=_codepipeline.CfnPipeline.EncryptionKeyProperty( - id=ssm_params[region]["kms"], - type="KMS" - ) - ), - region=region - )) + output.append( + _codepipeline.CfnPipeline.ArtifactStoreMapProperty( + artifact_store=_codepipeline.CfnPipeline.ArtifactStoreProperty( + location=ssm_params[region]["s3"], + type="S3", + encryption_key=_codepipeline.CfnPipeline.EncryptionKeyProperty( + id=ssm_params[region]["kms"], type="KMS" + ), + ), + region=region, + ) + ) return output @staticmethod @@ -465,3 +613,25 @@ def import_required_arns(): # pylint: disable=no-value-for-parameter _output.append(core.Fn.import_value(arn)) return _output + + def add_pipeline_trigger(self, trigger: Tuple): + (trigger_type, trigger_config) = trigger + if trigger_type in self._accepted_triggers.keys(): + trigger_type = self._accepted_triggers[trigger_type] + else: + raise Exception(trigger) + + if trigger_type == "CODEARTIFACT": + details = {"repositoryName": trigger_config["repository"]} + if trigger_config.get("package"): + details["packageName"] = trigger_config["package"] + trigger = _eventbridge.Rule( + self, + f"codeartifact-pipeline-trigger-{trigger_config['repository']}-{trigger_config['package'] if trigger_config.get('package') else 'all'}", + event_pattern=_eventbridge.EventPattern( + source=["aws.codeartifact"], + detail_type=["CodeArtifact Package Version State Change"], + detail=details, + ), + targets=[_eventbridge_targets.CodePipeline(pipeline=_codepipeline.Pipeline.from_pipeline_arn(self, "imported", pipeline_arn=self.cfn.ref))], + ) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py index 6ca6889c3..bbc3589fe 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py @@ -44,6 +44,10 @@ def generate_adf_default_pipeline(scope: core.Stack, stack_input): if "github" in _source_name: adf_github.GitHub.create_webhook_when_required(scope, _pipeline.cfn, stack_input["input"]) + pipeline_triggers = stack_input["input"].get("triggers", {}).get("triggered_by", None) + if pipeline_triggers: + for k, v in pipeline_triggers.items(): + _pipeline.add_pipeline_trigger((k,v)) def generate_source_stage_for_pipeline(_stages, scope, stack_input): _source_name = stack_input["input"]["default_providers"]["source"][ diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py index fba72ecb6..df639370c 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py @@ -140,3 +140,73 @@ def test_pipeline_creation_outputs_as_expected_when_source_is_codecommit_and_bui assert build_stage_action['ActionTypeId']['Provider'] == "CodeBuild" assert len(build_stage['Actions']) == 1 + +def test_pipeline_creation_outputs_with_codeartifact_trigger(): + region_name = "eu-central-1" + acount_id = "123456789012" + + stack_input = { + "input": {"params": {}, "default_providers": {}, "regions": {}, "triggers": {"triggered_by": {"code_artifact": {"repository": "my_test_repo"} }}}, + "ssm_params": {"fake-region": {}}, + } + + stack_input["input"]["name"] = "test-stack" + + stack_input["input"]["default_providers"]["source"] = { + "provider": "codecommit", + "properties": {"account_id": "123456789012"}, + } + stack_input["input"]["default_providers"]["build"] = { + "provider": "codebuild", + "properties": {"account_id": "123456789012"}, + } + + stack_input["ssm_params"][region_name] = { + "modules": "fake-bucket-name", + "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", + } + app = core.App() + PipelineStack(app, stack_input) + + cloud_assembly = app.synth() + resources = {k[0:-8]: v for k, v in cloud_assembly.stacks[0].template['Resources'].items()} + trigger = resources['codepipelinecodeartifactpipelinetriggermytestrepoall'] + assert trigger["Type"] == "AWS::Events::Rule" + assert trigger["Properties"]["EventPattern"]["detail-type"] == ["CodeArtifact Package Version State Change"] + assert trigger["Properties"]["EventPattern"]["source"] == ["aws.codeartifact"] + assert trigger["Properties"]["EventPattern"]["detail"] == {"repositoryName": "my_test_repo"} + +def test_pipeline_creation_outputs_with_codeartifact_trigger_with_package_name(): + region_name = "eu-central-1" + acount_id = "123456789012" + + stack_input = { + "input": {"params": {}, "default_providers": {}, "regions": {}, "triggers": {"triggered_by": {"code_artifact": {"repository": "my_test_repo", "package": "my_test_package"} }}}, + "ssm_params": {"fake-region": {}}, + } + + stack_input["input"]["name"] = "test-stack" + + stack_input["input"]["default_providers"]["source"] = { + "provider": "codecommit", + "properties": {"account_id": "123456789012"}, + } + stack_input["input"]["default_providers"]["build"] = { + "provider": "codebuild", + "properties": {"account_id": "123456789012"}, + } + + stack_input["ssm_params"][region_name] = { + "modules": "fake-bucket-name", + "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", + } + app = core.App() + PipelineStack(app, stack_input) + + cloud_assembly = app.synth() + resources = {k[0:-8]: v for k, v in cloud_assembly.stacks[0].template['Resources'].items()} + trigger = resources['codepipelinecodeartifactpipelinetriggermytestrepomytestpackage'] + assert trigger["Type"] == "AWS::Events::Rule" + assert trigger["Properties"]["EventPattern"]["detail-type"] == ["CodeArtifact Package Version State Change"] + assert trigger["Properties"]["EventPattern"]["source"] == ["aws.codeartifact"] + assert trigger["Properties"]["EventPattern"]["detail"] == {"repositoryName": "my_test_repo", "packageName": "my_test_package"} diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py index 90d22dad9..8716a6445 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py @@ -310,6 +310,13 @@ COMPLETION_TRIGGERS_SCHEMA = { "pipelines": [str] } +PIPELINE_TRIGGERS_SCHEMA = { + "codeartifact": str +} +TRIGGERS_SCHEMA = { + Optional("on_complete"): COMPLETION_TRIGGERS_SCHEMA, + Optional("triggered_by"): [PIPELINE_TRIGGERS_SCHEMA], +} PIPELINE_SCHEMA = { "name": And(str, len), "default_providers": PROVIDER_SCHEMA, @@ -317,7 +324,8 @@ Optional("tags"): dict, Optional("targets"): [Or(str, int, TARGET_SCHEMA, TARGET_LIST_SCHEMA)], Optional("regions"): REGION_SCHEMA, - Optional("completion_trigger"): COMPLETION_TRIGGERS_SCHEMA + Optional("completion_trigger"): COMPLETION_TRIGGERS_SCHEMA, + Optional("triggers"): TRIGGERS_SCHEMA } TOP_LEVEL_SCHEMA = { "pipelines": [PIPELINE_SCHEMA], From f89f695aac81671eab6d55c4c9e200460bed965f Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Fri, 8 Oct 2021 14:37:20 +0100 Subject: [PATCH 02/17] Undoing black linting --- .../cdk/cdk_constructs/adf_codepipeline.py | 592 +++++++----------- 1 file changed, 225 insertions(+), 367 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index fcf8c0a06..ada7c564b 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -13,7 +13,7 @@ aws_codepipeline as _codepipeline, aws_events as _eventbridge, aws_events_targets as _eventbridge_targets, - core, + core ) from cdk_constructs import adf_events @@ -27,333 +27,242 @@ LOGGER = configure_logger(__name__) - class Action: _version = "1" def __init__(self, **kwargs): - self.name = kwargs.get("name") - self.target = kwargs.get("target", {}) - self.provider = kwargs.get("provider") - self.category = kwargs.get("category") - self.map_params = kwargs.get("map_params") - self.project_name = kwargs.get("project_name") - self.owner = kwargs.get("owner") or "AWS" - self.run_order = kwargs.get("run_order") - self.index = kwargs.get("index") - self.action_name = kwargs.get("action_name") - self.action_mode = kwargs.get("action_mode", "").upper() - self.region = kwargs.get("region") or ADF_DEPLOYMENT_REGION - self.account_id = ( - self.map_params["default_providers"]["source"] - .get("properties", {}) - .get("account_id") - ) + self.name = kwargs.get('name') + self.target = kwargs.get('target', {}) + self.provider = kwargs.get('provider') + self.category = kwargs.get('category') + self.map_params = kwargs.get('map_params') + self.project_name = kwargs.get('project_name') + self.owner = kwargs.get('owner') or 'AWS' + self.run_order = kwargs.get('run_order') + self.index = kwargs.get('index') + self.action_name = kwargs.get('action_name') + self.action_mode = kwargs.get('action_mode', '').upper() + self.region = kwargs.get('region') or ADF_DEPLOYMENT_REGION + self.account_id = self.map_params["default_providers"]["source"].get('properties', {}).get("account_id") self.role_arn = self._generate_role_arn() self.notification_endpoint = self.map_params.get("topic_arn") self.configuration = self._generate_configuration() self.config = self.generate() def _generate_role_arn(self): - if self.category not in ["Build", "Deploy"]: + if self.category not in ['Build', 'Deploy']: return None - default_provider = self.map_params["default_providers"][self.category.lower()] - specific_role = self.target.get("properties", {}).get( - "role" - ) or default_provider.get("properties", {}).get("role") + default_provider = self.map_params['default_providers'][self.category.lower()] + specific_role = self.target.get('properties', {}).get('role') or default_provider.get('properties', {}).get('role') if specific_role: - account_id = ( - self.account_id if self.provider == "CodeBuild" else self.target["id"] - ) - return "arn:aws:iam::{0}:role/{1}".format(account_id, specific_role) + account_id = self.account_id if self.provider == 'CodeBuild' else self.target['id'] + return 'arn:aws:iam::{0}:role/{1}'.format(account_id, specific_role) return None - def _generate_configuration(self): # pylint: disable=R0912, R0911, R0915 + def _generate_configuration(self): #pylint: disable=R0912, R0911, R0915 if self.provider == "Manual" and self.category == "Approval": _props = { - "CustomData": self.target.get("properties", {}).get("message") - or "Approval stage for {0}".format(self.map_params["name"]) + "CustomData": self.target.get('properties', {}).get('message') or "Approval stage for {0}".format(self.map_params['name']) } if self.notification_endpoint: _props["NotificationArn"] = self.notification_endpoint - if self.target.get("properties", {}).get("sns_topic_arn"): - _props["NotificationArn"] = self.target.get("properties", {}).get( - "sns_topic_arn" - ) + if self.target.get('properties', {}).get('sns_topic_arn'): + _props["NotificationArn"] = self.target.get('properties', {}).get('sns_topic_arn') return _props if self.provider == "S3" and self.category == "Source": return { - "S3Bucket": self.map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("bucket_name"), - "S3ObjectKey": self.map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("object_key"), - "PollForSourceChanges": self.map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("trigger_on_changes", True), + "S3Bucket": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('bucket_name'), + "S3ObjectKey": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('object_key'), + "PollForSourceChanges": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('trigger_on_changes', True), } if self.provider == "S3" and self.category == "Deploy": return { - "BucketName": self.map_params.get("default_providers", {}) - .get("deploy", {}) - .get("properties", {}) - .get("bucket_name") - or self.target.get("properties", {}).get("bucket_name"), - "Extract": self.map_params.get("default_providers", {}) - .get("deploy", {}) - .get("properties", {}) - .get("extract") - or self.target.get("properties", {}).get("extract", False), - "ObjectKey": self.map_params.get("default_providers", {}) - .get("deploy", {}) - .get("properties", {}) - .get("object_key") - or self.target.get("properties", {}).get("object_key"), + "BucketName": self.map_params.get( + 'default_providers', {}).get( + 'deploy', {}).get( + 'properties', {}).get( + 'bucket_name') or self.target.get( + 'properties', {}).get( + 'bucket_name'), + "Extract": self.map_params.get( + 'default_providers', {}).get( + 'deploy', {}).get( + 'properties', {}).get( + 'extract') or self.target.get( + 'properties', {}).get( + 'extract', False), + "ObjectKey": self.map_params.get( + 'default_providers', {}).get( + 'deploy', {}).get( + 'properties', {}).get( + 'object_key') or self.target.get( + 'properties', {}).get( + 'object_key') } if self.provider == "CodeStarSourceConnection": - owner = ( - self.map_params.get("default_providers", {}) - .get("source") - .get("properties", {}) - .get("owner", {}) - ) - repo = ( - self.map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("repository", {}) - or self.map_params["name"] - ) - codestar_connection_path = ( - self.map_params.get("default_providers", {}) - .get("source") - .get("properties", {}) - .get("codestar_connection_path", {}) - ) - ssm_client = boto3.client("ssm") + owner = self.map_params.get('default_providers', {}).get('source').get('properties', {}).get('owner', {}) + repo = self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('repository', {}) or self.map_params['name'] + codestar_connection_path = self.map_params.get('default_providers', {}).get('source').get('properties', {}).get('codestar_connection_path', {}) + ssm_client = boto3.client('ssm') try: response = ssm_client.get_parameter(Name=codestar_connection_path) except Exception as e: - LOGGER.error( - f"No parameter found at {codestar_connection_path}. Check the path/value." - ) + LOGGER.error(f"No parameter found at {codestar_connection_path}. Check the path/value.") raise e - connection_arn = response["Parameter"]["Value"] + connection_arn = response['Parameter']['Value'] return { "ConnectionArn": connection_arn, "FullRepositoryId": f"{owner}/{repo}", - "BranchName": self.map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("branch", {}) - or "master", + "BranchName": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('branch', {}) or 'master' } if self.provider == "GitHub": return { - "Owner": self.map_params.get("default_providers", {}) - .get("source") - .get("properties", {}) - .get("owner", {}), - "Repo": self.map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("repository", {}) - or self.map_params["name"], - "Branch": self.map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("branch", {}) - or "master", + "Owner": self.map_params.get('default_providers', {}).get('source').get('properties', {}).get('owner', {}), + "Repo": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('repository', {}) or self.map_params['name'], + "Branch": self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('branch', {}) or 'master', # pylint: disable=no-value-for-parameter "OAuthToken": core.SecretValue.secrets_manager( - self.map_params["default_providers"]["source"] - .get("properties", {}) - .get("oauth_token_path"), - json_field=self.map_params["default_providers"]["source"] - .get("properties", {}) - .get("json_field"), + self.map_params['default_providers']['source'].get('properties', {}).get('oauth_token_path'), + json_field=self.map_params['default_providers']['source'].get('properties', {}).get('json_field') ), - "PollForSourceChanges": False, + "PollForSourceChanges": False } if self.provider == "Lambda": return { - "FunctionName": self.map_params.get("default_providers", {}) - .get("deploy", {}) - .get("properties", {}) - .get("function_name", "") - or self.target.get("properties", {}).get("function_name", ""), - "UserParameters": str( - self.map_params.get("default_providers", {}) - .get("deploy", {}) - .get("properties", {}) - .get("input", "") - or self.target.get("properties", {}).get("input", "") - ), + "FunctionName": self.map_params.get( + 'default_providers', {}).get( + 'deploy', {}).get( + 'properties', {}).get( + 'function_name', '') or self.target.get( + 'properties', {}).get( + 'function_name', ''), + "UserParameters": str(self.map_params.get( + 'default_providers', {}).get( + 'deploy', {}).get( + 'properties', {}).get('input', '') or self.target.get( + 'properties', {}).get( + 'input', '')) } if self.provider == "CloudFormation": - _path_prefix = ( - self.target.get("properties", {}).get("root_dir") - or self.map_params.get("default_providers", {}) - .get("deploy", {}) - .get("properties", {}) - .get("root_dir") - or "" - ) - if _path_prefix and not _path_prefix.endswith("/"): + _path_prefix = self.target.get( + 'properties', {}).get( + 'root_dir') or self.map_params.get( + 'default_providers', {}).get( + 'deploy', {}).get( + 'properties', {}).get( + 'root_dir') or "" + if _path_prefix and not _path_prefix.endswith('/'): _path_prefix = "{}/".format(_path_prefix) _input_artifact = "{map_name}-build".format( - map_name=self.map_params["name"], + map_name=self.map_params['name'], ) _props = { "ActionMode": self.action_mode, - "StackName": self.target.get("properties", {}).get("stack_name") - or self.map_params.get("default_providers", {}) - .get("deploy", {}) - .get("properties", {}) - .get("stack_name") - or "{0}{1}".format(ADF_STACK_PREFIX, self.map_params["name"]), - "ChangeSetName": "{0}{1}".format( - ADF_STACK_PREFIX, self.map_params["name"] - ), + "StackName": self.target.get( + 'properties', {}).get('stack_name') or self.map_params.get( + 'default_providers', {}).get( + 'deploy', {}).get( + 'properties', {}).get( + 'stack_name') or "{0}{1}".format( + ADF_STACK_PREFIX, self.map_params['name']), + "ChangeSetName": "{0}{1}".format(ADF_STACK_PREFIX, self.map_params['name']), "TemplateConfiguration": "{input_artifact}::{path_prefix}params/{target_name}_{region}.json".format( input_artifact=_input_artifact, path_prefix=_path_prefix, - target_name=self.target["name"], + target_name=self.target['name'], region=self.region, ), "Capabilities": "CAPABILITY_NAMED_IAM,CAPABILITY_AUTO_EXPAND", - "RoleArn": "arn:aws:iam::{0}:role/adf-cloudformation-deployment-role".format( - self.target["id"] - ) - if not self.role_arn - else self.role_arn, + "RoleArn": "arn:aws:iam::{0}:role/adf-cloudformation-deployment-role".format(self.target['id']) if not self.role_arn else self.role_arn } - if ( - self.map_params.get("default_providers", {}) - .get("build", {}) - .get("properties", {}) - .get("environment_variables", {}) - .get("CONTAINS_TRANSFORM") - ): - _props[ - "TemplatePath" - ] = "{input_artifact}::{path_prefix}template_{region}.yml".format( + if self.map_params.get('default_providers', {}).get('build', {}).get('properties', {}).get('environment_variables', {}).get('CONTAINS_TRANSFORM'): + _props["TemplatePath"] = "{input_artifact}::{path_prefix}template_{region}.yml".format( input_artifact=_input_artifact, path_prefix=_path_prefix, region=self.region, ) else: - _template_filename = ( - self.target.get("properties", {}).get("template_filename") - or self.map_params.get("default_providers", {}) - .get("deploy", {}) - .get("properties", {}) - .get("template_filename") - or "template.yml" - ) - _props[ - "TemplatePath" - ] = "{input_artifact}::{path_prefix}{filename}".format( + _template_filename = self.target.get( + 'properties', {}).get( + 'template_filename') or self.map_params.get( + 'default_providers', {}).get( + 'deploy', {}).get( + 'properties', {}).get( + 'template_filename') or "template.yml" + _props["TemplatePath"] = "{input_artifact}::{path_prefix}{filename}".format( input_artifact=_input_artifact, path_prefix=_path_prefix, filename=_template_filename, ) - if self.target.get("properties", {}).get("outputs"): - _props["OutputFileName"] = "{path_prefix}{filename}.json".format( + if self.target.get('properties', {}).get('outputs'): + _props['OutputFileName'] = '{path_prefix}{filename}.json'.format( path_prefix=_path_prefix, - filename=self.target["properties"]["outputs"], + filename=self.target['properties']['outputs'], ) - if self.target.get("properties", {}).get("param_overrides"): + if self.target.get('properties', {}).get('param_overrides'): _overrides = {} - for override in self.target.get("properties", {}).get( - "param_overrides", [] - ): - _overrides["{0}".format(override["param"])] = { - "Fn::GetParam": [ - "{0}".format(override["inputs"]), - "{0}.json".format(override["inputs"]), - "{0}".format(override["key_name"]), - ] - } - _props["ParameterOverrides"] = json.dumps(_overrides) + for override in self.target.get('properties', {}).get('param_overrides', []): + _overrides["{0}".format( + override['param'])] = {"Fn::GetParam": ["{0}".format( + override['inputs']), "{0}.json".format( + override['inputs']), "{0}".format( + override['key_name'])]} + _props['ParameterOverrides'] = json.dumps(_overrides) return _props if self.provider == "Jenkins": return { - "ProjectName": self.map_params["default_providers"]["build"] - .get("properties", {}) - .get( - "project_name", self.map_params["name"] - ), # Enter the name of the project you created in the Jenkins plugin - "ServerURL": self.map_params["default_providers"]["build"] - .get("properties", {}) - .get("server_url"), # Server URL - "ProviderName": self.map_params["default_providers"]["build"] - .get("properties", {}) - .get( - "provider_name" - ), # Enter the provider name you configured in the Jenkins plugin + "ProjectName": self.map_params['default_providers']['build'].get( + 'properties', {}).get( + 'project_name', self.map_params['name']), # Enter the name of the project you created in the Jenkins plugin + "ServerURL": self.map_params['default_providers']['build'].get('properties', {}).get('server_url'), # Server URL + "ProviderName": self.map_params['default_providers']['build'].get('properties', {}).get('provider_name') # Enter the provider name you configured in the Jenkins plugin } if self.provider == "CodeBuild": if self.project_name is None: - self.project_name = "adf-build-{0}".format(self.map_params["name"]) - return {"ProjectName": self.project_name} + self.project_name = "adf-build-{0}".format(self.map_params['name']) + return { + "ProjectName": self.project_name + } if self.provider == "ServiceCatalog": return { - "ConfigurationFilePath": self.target.get("properties", {}).get( - "configuration_file_path" - ) - or "params/{0}_{1}.json".format(self.target["name"], self.region), - "ProductId": self.target.get("properties", {}).get("product_id") - or self.map_params["default_providers"]["deploy"] - .get("properties", {}) - .get( - "product_id" - ), # product_id is required for Service Catalog, meaning the product must already exist. + "ConfigurationFilePath": self.target.get('properties', {}).get('configuration_file_path') or "params/{0}_{1}.json".format(self.target['name'], self.region), + "ProductId": self.target.get( + 'properties', {}).get( + 'product_id') or self.map_params['default_providers']['deploy'].get( + 'properties', {}).get( + 'product_id') # product_id is required for Service Catalog, meaning the product must already exist. } if self.provider == "CodeDeploy": return { - "ApplicationName": self.map_params.get("default_providers", {}) - .get("deploy", {}) - .get("properties", {}) - .get("application_name", {}) - or self.target.get("properties", {}).get("application_name"), - "DeploymentGroupName": self.map_params.get("default_providers", {}) - .get("deploy", {}) - .get("properties", {}) - .get("deployment_group_name", {}) - or self.target.get("properties", {}).get("deployment_group_name"), + "ApplicationName": self.map_params.get( + 'default_providers', {}).get( + 'deploy', {}).get( + 'properties', {}).get( + 'application_name', {}) or self.target.get( + 'properties', {}).get( + 'application_name'), + "DeploymentGroupName": self.map_params.get( + 'default_providers', {}).get( + 'deploy', {}).get( + 'properties', {}).get( + 'deployment_group_name', {}) or self.target.get( + 'properties', {}).get( + 'deployment_group_name') } if self.provider == "CodeCommit": return { - "BranchName": self.map_params["default_providers"]["source"] - .get("properties", {}) - .get("branch", "master"), - "RepositoryName": self.map_params["default_providers"]["source"] - .get("properties", {}) - .get("repository", {}) - or self.map_params["name"], + "BranchName": self.map_params['default_providers']['source'].get('properties', {}).get('branch', 'master'), + "RepositoryName": self.map_params['default_providers']['source'].get('properties', {}).get('repository', {}) or self.map_params['name'], "PollForSourceChanges": ( - self.map_params["default_providers"]["source"] - .get("properties", {}) - .get("trigger_on_changes", True) - and self.map_params["default_providers"]["source"] - .get("properties", {}) - .get("poll_for_changes", False) - ), + self.map_params['default_providers']['source'].get('properties', {}).get('trigger_on_changes', True) + and self.map_params['default_providers']['source'].get('properties', {}).get('poll_for_changes', False) + ) } raise Exception("{0} is not a valid provider".format(self.provider)) - def _generate_codepipeline_access_role(self): # pylint: disable=R0911 + def _generate_codepipeline_access_role(self): #pylint: disable=R0911 if self.provider == "CodeCommit": - return "arn:aws:iam::{0}:role/adf-codecommit-role".format( - self.map_params["default_providers"]["source"]["properties"][ - "account_id" - ] - ) + return "arn:aws:iam::{0}:role/adf-codecommit-role".format(self.map_params['default_providers']['source']['properties']['account_id']) if self.provider == "GitHub": return None if self.provider == "CodeStarSourceConnection": @@ -362,50 +271,38 @@ def _generate_codepipeline_access_role(self): # pylint: disable=R0911 return None if self.provider == "S3" and self.category == "Source": # This could be changed to use a new role that is bootstrapped, ideally we rename adf-cloudformation-role to a generic deployment role name - return "arn:aws:iam::{0}:role/adf-codecommit-role".format( - self.map_params["default_providers"]["source"]["properties"][ - "account_id" - ] - ) + return "arn:aws:iam::{0}:role/adf-codecommit-role".format(self.map_params['default_providers']['source']['properties']['account_id']) if self.provider == "S3" and self.category == "Deploy": # This could be changed to use a new role that is bootstrapped, ideally we rename adf-cloudformation-role to a generic deployment role name - return "arn:aws:iam::{0}:role/adf-cloudformation-role".format( - self.target["id"] - ) + return "arn:aws:iam::{0}:role/adf-cloudformation-role".format(self.target['id']) if self.provider == "ServiceCatalog": # This could be changed to use a new role that is bootstrapped, ideally we rename adf-cloudformation-role to a generic deployment role name - return "arn:aws:iam::{0}:role/adf-cloudformation-role".format( - self.target["id"] - ) + return "arn:aws:iam::{0}:role/adf-cloudformation-role".format(self.target['id']) if self.provider == "CodeDeploy": # This could be changed to use a new role that is bootstrapped, ideally we rename adf-cloudformation-role to a generic deployment role name - return "arn:aws:iam::{0}:role/adf-cloudformation-role".format( - self.target["id"] - ) + return "arn:aws:iam::{0}:role/adf-cloudformation-role".format(self.target['id']) if self.provider == "Lambda": # This could be changed to use a new role that is bootstrapped, ideally we rename adf-cloudformation-role to a generic deployment role name return None if self.provider == "CloudFormation": - return "arn:aws:iam::{0}:role/adf-cloudformation-role".format( - self.target["id"] - ) + return "arn:aws:iam::{0}:role/adf-cloudformation-role".format(self.target['id']) if self.provider == "Manual": return None - raise Exception("Invalid Provider {0}".format(self.provider)) + raise Exception('Invalid Provider {0}'.format(self.provider)) def generate(self): _role = self._generate_codepipeline_access_role() action_props = { - "action_type_id": _codepipeline.CfnPipeline.ActionTypeIdProperty( + "action_type_id":_codepipeline.CfnPipeline.ActionTypeIdProperty( version=Action._version, owner=self.owner, provider=self.provider, - category=self.category, + category=self.category ), "configuration": self.configuration, "name": self.action_name, "region": self.region or ADF_DEPLOYMENT_REGION, - "run_order": self.run_order, + "run_order": self.run_order } input_artifacts = self._get_input_artifacts() if input_artifacts: @@ -415,10 +312,12 @@ def generate(self): action_props["output_artifacts"] = output_artifacts if _role: action_props["role_arn"] = _role - if self.category == "Manual": - del action_props["region"] + if self.category == 'Manual': + del action_props['region'] - return _codepipeline.CfnPipeline.ActionDeclarationProperty(**action_props) + return _codepipeline.CfnPipeline.ActionDeclarationProperty( + **action_props + ) def _get_base_input_artifact_name(self): """ @@ -427,12 +326,13 @@ def _get_base_input_artifact_name(self): Returns: str: The output artifact name as a string """ - use_output_source = not self.target or not self.map_params.get( - "default_providers", {} - ).get("build", {}).get("enabled", True) + use_output_source = ( + not self.target or + not self.map_params.get('default_providers', {}).get('build', {}).get('enabled', True) + ) if use_output_source: return "output-source" - return "{0}-build".format(self.map_params["name"]) + return "{0}-build".format(self.map_params['name']) def _get_input_artifacts(self): """ @@ -441,25 +341,19 @@ def _get_input_artifacts(self): Returns: list: The Input Artifacts """ - if not self.category in ["Build", "Deploy"]: + if not self.category in ['Build', 'Deploy']: return [] input_artifacts = [ _codepipeline.CfnPipeline.InputArtifactProperty( name=self._get_base_input_artifact_name(), ), ] - if self.category == "Deploy": - for override in self.target.get("properties", {}).get( - "param_overrides", [] - ): - if ( - self.provider == "CloudFormation" - and override.get("inputs") - and self.action_mode != "CHANGE_SET_EXECUTE" - ): + if self.category == 'Deploy': + for override in self.target.get('properties', {}).get('param_overrides', []): + if self.provider == "CloudFormation" and override.get('inputs') and self.action_mode != "CHANGE_SET_EXECUTE": input_artifacts.append( _codepipeline.CfnPipeline.InputArtifactProperty( - name=override.get("inputs") + name=override.get('inputs') ) ) return input_artifacts @@ -471,15 +365,15 @@ def _get_base_output_artifact_name(self): Returns: str: The output artifact name as a string """ - if self.category == "Source": + if self.category == 'Source': return "output-source" - if self.category == "Build" and not self.target: - return "{0}-build".format(self.map_params["name"]) - if self.category == "Deploy" and self.provider == "CloudFormation": - outputs_name = self.target.get("properties", {}).get("outputs", "") - if outputs_name and self.action_mode != "CHANGE_SET_REPLACE": + if self.category == 'Build' and not self.target: + return "{0}-build".format(self.map_params['name']) + if self.category == 'Deploy' and self.provider == "CloudFormation": + outputs_name = self.target.get('properties', {}).get('outputs', '') + if outputs_name and self.action_mode != 'CHANGE_SET_REPLACE': return outputs_name - return "" + return '' def _get_output_artifacts(self): """ @@ -500,86 +394,50 @@ def _get_output_artifacts(self): class Pipeline(core.Construct): _import_arns = [ - "CodePipelineRoleArn", - "CodeBuildRoleArn", - "SendSlackNotificationLambdaArn", + 'CodePipelineRoleArn', + 'CodeBuildRoleArn', + 'SendSlackNotificationLambdaArn' ] _accepted_triggers = {"code_artifact": "CODEARTIFACT"} - def __init__( - self, - scope: core.Construct, - id: str, - map_params: dict, - ssm_params: dict, - stages, - **kwargs, - ): # pylint: disable=W0622 + def __init__(self, scope: core.Construct, id: str, map_params: dict, ssm_params: dict, stages, **kwargs): #pylint: disable=W0622 super().__init__(scope, id, **kwargs) - # pylint: disable=W0632 - [ - _codepipeline_role_arn, - _code_build_role_arn, - _send_slack_notification_lambda_arn, - ] = Pipeline.import_required_arns() - # pylint: enable=W0632 + [_codepipeline_role_arn, _code_build_role_arn, _send_slack_notification_lambda_arn] = Pipeline.import_required_arns() #pylint: disable=W0632 _pipeline_args = { "role_arn": _codepipeline_role_arn, - "restart_execution_on_update": map_params.get("params", {}).get( - "restart_execution_on_update", False - ), - "name": "{0}{1}".format(ADF_PIPELINE_PREFIX, map_params["name"]), + "restart_execution_on_update": map_params.get('params', {}).get('restart_execution_on_update', False), + "name": "{0}{1}".format(ADF_PIPELINE_PREFIX, map_params['name']), "stages": stages, - "artifact_stores": Pipeline.generate_artifact_stores( - map_params, ssm_params - ), - "tags": Pipeline.restructure_tags(map_params.get("tags", {})), + "artifact_stores": Pipeline.generate_artifact_stores(map_params, ssm_params), + "tags": Pipeline.restructure_tags(map_params.get('tags', {})) } - self.cfn = _codepipeline.CfnPipeline(self, "pipeline", **_pipeline_args) - adf_events.Events( + self.cfn = _codepipeline.CfnPipeline( self, - "events", - { - "pipeline": "arn:aws:codepipeline:{0}:{1}:{2}".format( - ADF_DEPLOYMENT_REGION, - ADF_DEPLOYMENT_ACCOUNT_ID, - "{0}{1}".format( - os.environ.get("ADF_PIPELINE_PREFIX"), map_params["name"] - ), - ), - "topic_arn": map_params.get("topic_arn"), - "name": map_params["name"], - "completion_trigger": map_params.get("completion_trigger"), - "schedule": map_params.get("schedule"), - "source": { - "provider": map_params.get("default_providers", {}) - .get("source", {}) - .get("provider"), - "account_id": map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("account_id"), - "repo_name": map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("repository") - or map_params["name"], - "branch": map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("branch", "master"), - "poll_for_changes": map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("poll_for_changes", False), - "trigger_on_changes": map_params.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("trigger_on_changes", True), - }, - }, + 'pipeline', + **_pipeline_args ) + adf_events.Events(self, 'events', { + "pipeline": 'arn:aws:codepipeline:{0}:{1}:{2}'.format( + ADF_DEPLOYMENT_REGION, + ADF_DEPLOYMENT_ACCOUNT_ID, + "{0}{1}".format( + os.environ.get( + "ADF_PIPELINE_PREFIX"), + map_params['name'])), + "topic_arn": map_params.get('topic_arn'), + "name": map_params['name'], + "completion_trigger": map_params.get('completion_trigger'), + "schedule": map_params.get('schedule'), + "source": { + "provider": map_params.get('default_providers', {}).get('source', {}).get('provider'), + "account_id": map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('account_id'), + "repo_name": map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('repository') or map_params['name'], + "branch": map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('branch', 'master'), + "poll_for_changes": map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('poll_for_changes', False), + "trigger_on_changes": map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('trigger_on_changes', True), + } + }) @staticmethod def restructure_tags(current_tags): @@ -592,18 +450,17 @@ def restructure_tags(current_tags): def generate_artifact_stores(map_params, ssm_params): output = [] for region in map_params["regions"]: - output.append( - _codepipeline.CfnPipeline.ArtifactStoreMapProperty( - artifact_store=_codepipeline.CfnPipeline.ArtifactStoreProperty( - location=ssm_params[region]["s3"], - type="S3", - encryption_key=_codepipeline.CfnPipeline.EncryptionKeyProperty( - id=ssm_params[region]["kms"], type="KMS" - ), - ), - region=region, - ) - ) + output.append(_codepipeline.CfnPipeline.ArtifactStoreMapProperty( + artifact_store=_codepipeline.CfnPipeline.ArtifactStoreProperty( + location=ssm_params[region]["s3"], + type="S3", + encryption_key=_codepipeline.CfnPipeline.EncryptionKeyProperty( + id=ssm_params[region]["kms"], + type="KMS" + ) + ), + region=region + )) return output @staticmethod @@ -614,6 +471,7 @@ def import_required_arns(): _output.append(core.Fn.import_value(arn)) return _output + def add_pipeline_trigger(self, trigger: Tuple): (trigger_type, trigger_config) = trigger if trigger_type in self._accepted_triggers.keys(): From 062d52c34bdbc2f0f4ecd3f9c18fcdc6e730692e Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Fri, 22 Oct 2021 14:15:30 +0100 Subject: [PATCH 03/17] Update user-guide.md --- docs/user-guide.md | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index 97337c452..c5d718773 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -239,7 +239,28 @@ Pipelines can also trigger other pipelines upon completion. To do this, use the name: web-app-testing ``` -In the above example, the *ami-builder* pipeline runs every 7 days based on its schedule. When it completes, it executes the *my-web-app-pipeline* pipeline as defined in its *completion_trigger* property. +### Additional Triggers + +Pipelines can also be trigger by other events For example, a new version of a package hosted on CodeArtifact: + +```yaml + - name: ami-builder + default_providers: + source: + provider: codecommit + properties: + account_id: 222222222222 + build: + provider: codebuild + role: packer + size: medium + triggers: # What should happen when this pipeline completes + triggered_by: + code_artifact: + repository: my_test_repository +``` + +In the above example, the *ami-builder* pipeline is triggered by everytime a new package version is published to the my_test_repository repo. ### Additional Deployment Maps From f4ac55cbb35f56f76d28c9077394b1d3fcd2dd1d Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Fri, 22 Oct 2021 16:47:47 +0100 Subject: [PATCH 04/17] Addressing PR feedback --- docs/user-guide.md | 5 +-- .../cdk/cdk_constructs/adf_codepipeline.py | 2 +- .../tests/test_pipeline_creation.py | 34 +++++++++++++++++++ 3 files changed, 38 insertions(+), 3 deletions(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index c5d718773..df5715541 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -6,6 +6,7 @@ - [Params](#params) - [Repositories](#repositories) - [Completion Triggers](#completion-triggers) + - [Additional Triggers](#additional-triggers) - [Additional Deployment Maps](#additional-deployment-maps) - [Removing Pipelines](#removing-pipelines) - [Deploying via Pipelines](#deploying-via-pipelines) @@ -241,7 +242,7 @@ Pipelines can also trigger other pipelines upon completion. To do this, use the ### Additional Triggers -Pipelines can also be trigger by other events For example, a new version of a package hosted on CodeArtifact: +Pipelines can also be triggered by other events. For example, a new version of a package hosted on CodeArtifact being published: ```yaml - name: ami-builder @@ -260,7 +261,7 @@ Pipelines can also be trigger by other events For example, a new version of a pa repository: my_test_repository ``` -In the above example, the *ami-builder* pipeline is triggered by everytime a new package version is published to the my_test_repository repo. +In the above example, the *ami-builder* pipeline is triggered by everytime a new package version is published to the *my_test_repository* repo. ### Additional Deployment Maps diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index ada7c564b..7440daffe 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -477,7 +477,7 @@ def add_pipeline_trigger(self, trigger: Tuple): if trigger_type in self._accepted_triggers.keys(): trigger_type = self._accepted_triggers[trigger_type] else: - raise Exception(trigger) + raise Exception(f"{trigger_type} is not currently supported as a pipeline trigger") if trigger_type == "CODEARTIFACT": details = {"repositoryName": trigger_config["repository"]} diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py index df639370c..2fcee9da4 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py @@ -210,3 +210,37 @@ def test_pipeline_creation_outputs_with_codeartifact_trigger_with_package_name() assert trigger["Properties"]["EventPattern"]["detail-type"] == ["CodeArtifact Package Version State Change"] assert trigger["Properties"]["EventPattern"]["source"] == ["aws.codeartifact"] assert trigger["Properties"]["EventPattern"]["detail"] == {"repositoryName": "my_test_repo", "packageName": "my_test_package"} + +def test_pipeline_creation_outputs_with_invalid_trigger_type(): + region_name = "eu-central-1" + acount_id = "123456789012" + + stack_input = { + "input": {"params": {}, "default_providers": {}, "regions": {}, "triggers": {"triggered_by": {"infinidash": {"arn": "arn:aws:11111111:us-east-1:infinidash/dash:blahblahblah"} }}}, + "ssm_params": {"fake-region": {}}, + } + + stack_input["input"]["name"] = "test-stack" + + stack_input["input"]["default_providers"]["source"] = { + "provider": "codecommit", + "properties": {"account_id": "123456789012"}, + } + stack_input["input"]["default_providers"]["build"] = { + "provider": "codebuild", + "properties": {"account_id": "123456789012"}, + } + + stack_input["ssm_params"][region_name] = { + "modules": "fake-bucket-name", + "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", + } + app = core.App() + + + with pytest.raises(Exception) as e_info: + PipelineStack(app, stack_input) + cloud_assembly = app.synth() + + + From 232cd8f7ec390ce2f484023968835c22b2fd5c44 Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 10 Nov 2021 09:26:48 +0000 Subject: [PATCH 05/17] Adding in documentation and code for completion triggers --- docs/user-guide.md | 36 ++++++++++++++++++- .../cdk/cdk_constructs/adf_codepipeline.py | 2 +- 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index df5715541..36a2547d4 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -240,6 +240,40 @@ Pipelines can also trigger other pipelines upon completion. To do this, use the name: web-app-testing ``` +Completion triggers can also be defined as part of the triggers expanded configuration. Take the above example for the ami-builder pipeline. +```yaml + - name: ami-builder + default_providers: + source: + provider: codecommit + properties: + account_id: 222222222222 + build: + provider: codebuild + role: packer + size: medium + params: + schedule: rate(7 days) + triggers: # What should trigger this pipeline, and what should be triggered when it completes + on_complete: + pipelines: + - my-web-app-pipeline # Start this pipeline + + - name: my-web-app-pipeline + default_providers: + source: + provider: github + properties: + repository: my-web-app + owner: cool_coder + oauth_token_path: /adf/github_token + json_field: token + targets: + - path: /banking/testing + name: web-app-testing +``` + + ### Additional Triggers Pipelines can also be triggered by other events. For example, a new version of a package hosted on CodeArtifact being published: @@ -255,7 +289,7 @@ Pipelines can also be triggered by other events. For example, a new version of a provider: codebuild role: packer size: medium - triggers: # What should happen when this pipeline completes + triggers: # What should trigger this pipeline, and what should be triggered when it completes triggered_by: code_artifact: repository: my_test_repository diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index 8510d3e0a..a5df41d9c 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -444,7 +444,7 @@ def __init__(self, scope: core.Construct, id: str, map_params: dict, ssm_params: ), "topic_arn": map_params.get('topic_arn'), "name": map_params['name'], - "completion_trigger": map_params.get('completion_trigger'), + "completion_trigger": map_params.get('completion_trigger', None) or map_params.get("triggers", {}).get("on_complete", None), "schedule": map_params.get('schedule'), "source": { "provider": map_params.get('default_providers', {}).get('source', {}).get('provider'), From a0085ac1ba391f990f8b876f895722fbdb08eacf Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 10 Nov 2021 15:24:00 +0000 Subject: [PATCH 06/17] Update docs/user-guide.md Co-authored-by: Simon --- docs/user-guide.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index 36a2547d4..75e58bd32 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -295,7 +295,7 @@ Pipelines can also be triggered by other events. For example, a new version of a repository: my_test_repository ``` -In the above example, the *ami-builder* pipeline is triggered by everytime a new package version is published to the *my_test_repository* repo. +In the above example, the *ami-builder* pipeline is triggered when a new package version is published to the *my_test_repository* repository in CodeArtifact. ### Additional Deployment Maps From b67e79e0bd08966f1b721c29420fc31fee2237d0 Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 10 Nov 2021 15:24:10 +0000 Subject: [PATCH 07/17] Update docs/user-guide.md Co-authored-by: Simon --- docs/user-guide.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index 75e58bd32..86813f86e 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -256,8 +256,8 @@ Completion triggers can also be defined as part of the triggers expanded configu schedule: rate(7 days) triggers: # What should trigger this pipeline, and what should be triggered when it completes on_complete: - pipelines: - - my-web-app-pipeline # Start this pipeline + pipelines: + - my-web-app-pipeline # Start this pipeline - name: my-web-app-pipeline default_providers: From ace8aac1de2bffb85fbbe2975b30d80bbd0ac1e2 Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 10 Nov 2021 15:24:17 +0000 Subject: [PATCH 08/17] Update docs/user-guide.md Co-authored-by: Simon --- docs/user-guide.md | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index 86813f86e..a15d8e185 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -260,17 +260,7 @@ Completion triggers can also be defined as part of the triggers expanded configu - my-web-app-pipeline # Start this pipeline - name: my-web-app-pipeline - default_providers: - source: - provider: github - properties: - repository: my-web-app - owner: cool_coder - oauth_token_path: /adf/github_token - json_field: token - targets: - - path: /banking/testing - name: web-app-testing + # Same configuration as defined above. ``` From 35c4b02079330e4365169290cb5e5554cd315a62 Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 10 Nov 2021 15:24:38 +0000 Subject: [PATCH 09/17] Update src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py Co-authored-by: Simon --- .../shared/cdk/cdk_stacks/tests/test_pipeline_creation.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py index 386428700..57bd08e16 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py @@ -241,6 +241,9 @@ def test_pipeline_creation_outputs_with_invalid_trigger_type(): with pytest.raises(Exception) as e_info: PipelineStack(app, stack_input) cloud_assembly = app.synth() + + error_message = str(e_info.value) + assert error_message.find("is not currently supported as a pipeline trigger") >= 0 From 0b365c5d89e1ec07cb0c7917d19db5b17a41a3fb Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 10 Nov 2021 15:24:56 +0000 Subject: [PATCH 10/17] Update src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py Co-authored-by: Simon --- .../adf-build/shared/schema_validation.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py index 8716a6445..facc16a7f 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py @@ -311,7 +311,10 @@ "pipelines": [str] } PIPELINE_TRIGGERS_SCHEMA = { - "codeartifact": str + Optional("code_artifact"): { + "repository": str, + Optional("package"): str, + } } TRIGGERS_SCHEMA = { Optional("on_complete"): COMPLETION_TRIGGERS_SCHEMA, From 89a400689a4914ad7104515c628db2e38b8c934a Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 10 Nov 2021 15:25:04 +0000 Subject: [PATCH 11/17] Update src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py Co-authored-by: Simon --- .../shared/cdk/cdk_stacks/tests/test_pipeline_creation.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py index 57bd08e16..84ca6f797 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py @@ -244,6 +244,3 @@ def test_pipeline_creation_outputs_with_invalid_trigger_type(): error_message = str(e_info.value) assert error_message.find("is not currently supported as a pipeline trigger") >= 0 - - - From 4b5eb953c677f5b02addd9ce08d4ba3af71d26d7 Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 10 Nov 2021 15:35:01 +0000 Subject: [PATCH 12/17] Code Review Comments --- docs/user-guide.md | 29 +++++++------------ .../cdk/cdk_constructs/adf_codepipeline.py | 6 ++-- .../cdk/cdk_stacks/adf_default_pipeline.py | 4 +-- .../tests/test_pipeline_creation.py | 3 ++ 4 files changed, 18 insertions(+), 24 deletions(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index 55fdb4642..d73956708 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -207,7 +207,7 @@ The following are the available pipeline parameters: ### Completion Triggers -Pipelines can also trigger other pipelines upon completion. To do this, use the *completion_trigger* key on the pipeline definition. For example: +Pipelines can also trigger other pipelines upon completion. To do this, use the *on_complete* key on the triggers definition. For example: ```yaml - name: ami-builder @@ -222,9 +222,10 @@ Pipelines can also trigger other pipelines upon completion. To do this, use the size: medium params: schedule: rate(7 days) - completion_trigger: # What should happen when this pipeline completes - pipelines: - - my-web-app-pipeline # Start this pipeline + triggers: # What should trigger this pipeline, and what should be triggered when it completes + on_complete: + pipelines: + - my-web-app-pipeline # Start this pipeline - name: my-web-app-pipeline default_providers: @@ -240,24 +241,16 @@ Pipelines can also trigger other pipelines upon completion. To do this, use the name: web-app-testing ``` -Completion triggers can also be defined as part of the triggers expanded configuration. Take the above example for the ami-builder pipeline. +Completion triggers can also be defined in a short handed fashion. Take the above example for the ami-builder pipeline. ```yaml - name: ami-builder - default_providers: - source: - provider: codecommit - properties: - account_id: 222222222222 - build: - provider: codebuild - role: packer - size: medium + # Default providers and parameters are the same as defined above. + # Only difference: instead of using `triggers` it uses the `completion_triggers` params: schedule: rate(7 days) - triggers: # What should trigger this pipeline, and what should be triggered when it completes - on_complete: + completion_triggers: # What should trigger this pipeline, and what should be triggered when it completes pipelines: - - my-web-app-pipeline # Start this pipeline + - my-web-app-pipeline # Start this pipeline - name: my-web-app-pipeline # Same configuration as defined above. @@ -266,7 +259,7 @@ Completion triggers can also be defined as part of the triggers expanded configu ### Additional Triggers -Pipelines can also be triggered by other events. For example, a new version of a package hosted on CodeArtifact being published: +Pipelines can also be triggered by other events using the *triggered_by* key on the triggers definition. For example, a new version of a package hosted on CodeArtifact being published: ```yaml - name: ami-builder diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index a5df41d9c..a27ac4831 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -6,7 +6,6 @@ import os import json -from typing import Tuple import boto3 from aws_cdk import ( @@ -489,8 +488,7 @@ def import_required_arns(): return _output - def add_pipeline_trigger(self, trigger: Tuple): - (trigger_type, trigger_config) = trigger + def add_pipeline_trigger(self, trigger_type, trigger_config): if trigger_type in self._accepted_triggers.keys(): trigger_type = self._accepted_triggers[trigger_type] else: @@ -500,7 +498,7 @@ def add_pipeline_trigger(self, trigger: Tuple): details = {"repositoryName": trigger_config["repository"]} if trigger_config.get("package"): details["packageName"] = trigger_config["package"] - trigger = _eventbridge.Rule( + _eventbridge.Rule( self, f"codeartifact-pipeline-trigger-{trigger_config['repository']}-{trigger_config['package'] if trigger_config.get('package') else 'all'}", event_pattern=_eventbridge.EventPattern( diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py index bbc3589fe..65eca5d3a 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py @@ -46,8 +46,8 @@ def generate_adf_default_pipeline(scope: core.Stack, stack_input): pipeline_triggers = stack_input["input"].get("triggers", {}).get("triggered_by", None) if pipeline_triggers: - for k, v in pipeline_triggers.items(): - _pipeline.add_pipeline_trigger((k,v)) + for trigger_type, trigger_config in pipeline_triggers.items(): + _pipeline.add_pipeline_trigger(trigger_type=trigger_type, trigger_config=trigger_config) def generate_source_stage_for_pipeline(_stages, scope, stack_input): _source_name = stack_input["input"]["default_providers"]["source"][ diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py index 84ca6f797..3abbfc3e3 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py @@ -141,6 +141,7 @@ def test_pipeline_creation_outputs_as_expected_when_source_is_codecommit_and_bui assert len(build_stage['Actions']) == 1 + def test_pipeline_creation_outputs_with_codeartifact_trigger(): region_name = "eu-central-1" acount_id = "123456789012" @@ -176,6 +177,7 @@ def test_pipeline_creation_outputs_with_codeartifact_trigger(): assert trigger["Properties"]["EventPattern"]["source"] == ["aws.codeartifact"] assert trigger["Properties"]["EventPattern"]["detail"] == {"repositoryName": "my_test_repo"} + def test_pipeline_creation_outputs_with_codeartifact_trigger_with_package_name(): region_name = "eu-central-1" acount_id = "123456789012" @@ -211,6 +213,7 @@ def test_pipeline_creation_outputs_with_codeartifact_trigger_with_package_name() assert trigger["Properties"]["EventPattern"]["source"] == ["aws.codeartifact"] assert trigger["Properties"]["EventPattern"]["detail"] == {"repositoryName": "my_test_repo", "packageName": "my_test_package"} + def test_pipeline_creation_outputs_with_invalid_trigger_type(): region_name = "eu-central-1" acount_id = "123456789012" From 1232e012a5fd3cc1fa1ca2b519e371673b88bbaa Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 17 Nov 2021 09:21:44 +0000 Subject: [PATCH 13/17] Apply suggestions from code review Co-authored-by: Simon Kok --- docs/user-guide.md | 10 +++++----- .../shared/cdk/cdk_constructs/adf_codepipeline.py | 4 ++-- .../shared/cdk/cdk_stacks/adf_default_pipeline.py | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index d73956708..c092b1929 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -223,9 +223,9 @@ Pipelines can also trigger other pipelines upon completion. To do this, use the params: schedule: rate(7 days) triggers: # What should trigger this pipeline, and what should be triggered when it completes - on_complete: - pipelines: - - my-web-app-pipeline # Start this pipeline + on_complete: + pipelines: + - my-web-app-pipeline # Start this pipeline - name: my-web-app-pipeline default_providers: @@ -249,8 +249,8 @@ Completion triggers can also be defined in a short handed fashion. Take the abov params: schedule: rate(7 days) completion_triggers: # What should trigger this pipeline, and what should be triggered when it completes - pipelines: - - my-web-app-pipeline # Start this pipeline + pipelines: + - my-web-app-pipeline # Start this pipeline - name: my-web-app-pipeline # Same configuration as defined above. diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index a27ac4831..d4594a7c1 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -443,7 +443,7 @@ def __init__(self, scope: core.Construct, id: str, map_params: dict, ssm_params: ), "topic_arn": map_params.get('topic_arn'), "name": map_params['name'], - "completion_trigger": map_params.get('completion_trigger', None) or map_params.get("triggers", {}).get("on_complete", None), + "completion_trigger": map_params.get('triggers', {}).get('on_complete', map_params.get('completion_trigger')), "schedule": map_params.get('schedule'), "source": { "provider": map_params.get('default_providers', {}).get('source', {}).get('provider'), @@ -489,7 +489,7 @@ def import_required_arns(): def add_pipeline_trigger(self, trigger_type, trigger_config): - if trigger_type in self._accepted_triggers.keys(): + if trigger_type in self._accepted_triggers: trigger_type = self._accepted_triggers[trigger_type] else: raise Exception(f"{trigger_type} is not currently supported as a pipeline trigger") diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py index 65eca5d3a..5aaa71a67 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py @@ -44,7 +44,7 @@ def generate_adf_default_pipeline(scope: core.Stack, stack_input): if "github" in _source_name: adf_github.GitHub.create_webhook_when_required(scope, _pipeline.cfn, stack_input["input"]) - pipeline_triggers = stack_input["input"].get("triggers", {}).get("triggered_by", None) + pipeline_triggers = stack_input["input"].get("triggers", {}).get("triggered_by") if pipeline_triggers: for trigger_type, trigger_config in pipeline_triggers.items(): _pipeline.add_pipeline_trigger(trigger_type=trigger_type, trigger_config=trigger_config) From 50306d60a187340e8748d7619e045f88940d3c72 Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 17 Nov 2021 09:38:45 +0000 Subject: [PATCH 14/17] Code Review changes --- .../shared/cdk/cdk_constructs/adf_codepipeline.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index d4594a7c1..4371a0bf2 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -24,6 +24,7 @@ ADF_PIPELINE_PREFIX = os.environ.get("ADF_PIPELINE_PREFIX", "") ADF_DEFAULT_BUILD_TIMEOUT = 20 + LOGGER = configure_logger(__name__) @@ -417,7 +418,9 @@ class Pipeline(core.Construct): 'SendSlackNotificationLambdaArn' ] - _accepted_triggers = {"code_artifact": "CODEARTIFACT"} + CODEARTIFACT_TRIGGER = "CODEARTIFACT" + + _accepted_triggers = {"code_artifact": CODEARTIFACT_TRIGGER} def __init__(self, scope: core.Construct, id: str, map_params: dict, ssm_params: dict, stages, **kwargs): #pylint: disable=W0622 super().__init__(scope, id, **kwargs) @@ -492,9 +495,10 @@ def add_pipeline_trigger(self, trigger_type, trigger_config): if trigger_type in self._accepted_triggers: trigger_type = self._accepted_triggers[trigger_type] else: + LOGGER.error(f"{trigger_type} is not currently supported. Supported values are: {self._accepted_triggers.keys()}") raise Exception(f"{trigger_type} is not currently supported as a pipeline trigger") - if trigger_type == "CODEARTIFACT": + if trigger_type == self.CODEARTIFACT_TRIGGER: details = {"repositoryName": trigger_config["repository"]} if trigger_config.get("package"): details["packageName"] = trigger_config["package"] From 73f8cc7e264720aaa84ddf674ce88b5ab66c36be Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 17 Nov 2021 10:53:17 +0000 Subject: [PATCH 15/17] Update src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py Co-authored-by: Simon Kok --- .../adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index 4371a0bf2..c9b50e6ec 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -492,11 +492,10 @@ def import_required_arns(): def add_pipeline_trigger(self, trigger_type, trigger_config): - if trigger_type in self._accepted_triggers: - trigger_type = self._accepted_triggers[trigger_type] - else: + if trigger_type not in self._accepted_triggers: LOGGER.error(f"{trigger_type} is not currently supported. Supported values are: {self._accepted_triggers.keys()}") raise Exception(f"{trigger_type} is not currently supported as a pipeline trigger") + trigger_type = self._accepted_triggers[trigger_type] if trigger_type == self.CODEARTIFACT_TRIGGER: details = {"repositoryName": trigger_config["repository"]} From a716228049bcf0eaeda78b1748222cc43cdd7f2c Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 17 Nov 2021 10:53:50 +0000 Subject: [PATCH 16/17] Update src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py Co-authored-by: Simon Kok --- .../adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index c9b50e6ec..e61708430 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -503,7 +503,7 @@ def add_pipeline_trigger(self, trigger_type, trigger_config): details["packageName"] = trigger_config["package"] _eventbridge.Rule( self, - f"codeartifact-pipeline-trigger-{trigger_config['repository']}-{trigger_config['package'] if trigger_config.get('package') else 'all'}", + f"codeartifact-pipeline-trigger-{trigger_config['repository']}-{trigger_config.get('package', 'all')}", event_pattern=_eventbridge.EventPattern( source=["aws.codeartifact"], detail_type=["CodeArtifact Package Version State Change"], From 13bd91da8f0d0ebcc5e549ed89b944467485f94e Mon Sep 17 00:00:00 2001 From: Simon Kok Date: Wed, 17 Nov 2021 15:45:24 +0100 Subject: [PATCH 17/17] Added new line on adf_default_pipeline --- .../adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py index a2553ff37..5bb5e7efd 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py @@ -54,6 +54,7 @@ def generate_adf_default_pipeline(scope: core.Stack, stack_input): if isinstance(notification_config, dict) and notification_config.get('type', '') == 'chat_bot': adf_chatbot.PipelineNotifications(scope, "adf_chatbot_notifications", _pipeline.cfn, notification_config) + def generate_source_stage_for_pipeline(_stages, scope, stack_input): _source_name = stack_input["input"]["default_providers"]["source"][ "provider"