diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py index 9248eeae0..6ab2055b4 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py @@ -14,45 +14,70 @@ LOGGER = configure_logger(__name__) -DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] -PIPELINE_MANAGEMENT_STATEMACHINE = os.getenv("PIPELINE_MANAGEMENT_STATEMACHINE_ARN") CLOUDWATCH = boto3.client("cloudwatch") METRICS = ADFMetrics(CLOUDWATCH, "PIPELINE_MANAGEMENT/RULE") -_cache = None +_CACHE = None -def lambda_handler(pipeline, _): - """Main Lambda Entry point""" +def lambda_handler(event, _): + """ + Main Lambda Entry point, creating the cross-account EventBridge rule + if the source account of the CodeCommit repository is in another account + than the deployment account. + + Such that a change in the source repository will trigger the pipeline. + + Args: + event (dict): The ADF Pipeline Management State Machine execution + input object. + """ # pylint: disable=W0603 # Global variable here to cache across lambda execution runtimes. - global _cache - if not _cache: - _cache = Cache() + global _CACHE + if not _CACHE: + _CACHE = Cache() METRICS.put_metric_data( {"MetricName": "CacheInitialized", "Value": 1, "Unit": "Count"} ) - LOGGER.info(pipeline) + LOGGER.info(event) + pipeline = event['pipeline_definition'] + + source_provider = ( + pipeline.get("default_providers", {}) + .get("source", {}) + .get("provider", "codecommit") + ) source_account_id = ( pipeline.get("default_providers", {}) .get("source", {}) .get("properties", {}) - .get("account_id", {}) + .get("account_id") ) if ( - source_account_id + source_provider == "codecommit" + and source_account_id and int(source_account_id) != int(DEPLOYMENT_ACCOUNT_ID) - and not _cache.exists(source_account_id) + and not _CACHE.exists(source_account_id) ): + LOGGER.info( + "Source is CodeCommit and the repository is hosted in the %s " + "account instead of the deployment account (%s). Creating or " + "updating EventBridge forward rule to forward change events " + "from the source account to the deployment account in " + "EventBridge.", + source_account_id, + DEPLOYMENT_ACCOUNT_ID, + ) rule = Rule(source_account_id) rule.create_update() - _cache.add(source_account_id, True) + _CACHE.add(source_account_id, True) METRICS.put_metric_data( {"MetricName": "CreateOrUpdate", "Value": 1, "Unit": "Count"} ) - return pipeline + return event diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_repository.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_repository.py index a8015d656..69ecce656 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_repository.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_repository.py @@ -16,47 +16,74 @@ METRICS = ADFMetrics(CLOUDWATCH, "PIPELINE_MANAGEMENT/REPO") LOGGER = configure_logger(__name__) DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] -DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] -def lambda_handler(pipeline, _): - """Main Lambda Entry point""" +def lambda_handler(event, _): + """ + Main Lambda Entry point, responsible for creating the CodeCommit + repository if required. + + Args: + event (dict): The ADF Pipeline Management Input event, holding the + pipeline definition and event source details. + + Returns: + dict: The input event. + """ + pipeline = event.get('pipeline_definition') + source_provider = ( + pipeline.get("default_providers", {}) + .get("source", {}) + .get("provider", "codecommit") + ) + if source_provider != "codecommit": + LOGGER.debug( + "This pipeline is not a CodeCommit source provider. " + "No actions required." + ) + return event + parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3) auto_create_repositories = parameter_store.fetch_parameter( "auto_create_repositories" ) - LOGGER.info(auto_create_repositories) - if auto_create_repositories == "enabled": - code_account_id = ( - pipeline.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("account_id", {}) + LOGGER.debug("Auto create repositories is: %s", auto_create_repositories) + if auto_create_repositories != "enabled": + LOGGER.debug( + "ADF is not configured to automatically create CodeCommit " + "repositories if they don't exist yet." ) - has_custom_repo = ( - pipeline.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("repository", {}) + return event + + code_account_id = ( + pipeline.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("account_id") + ) + has_custom_repo = ( + pipeline.get("default_providers", {}) + .get("source", {}) + .get("properties", {}) + .get("repository", {}) + ) + if ( + code_account_id + and str(code_account_id).isdigit() + and not has_custom_repo + ): + repo = Repo( + code_account_id, + pipeline.get("name"), + pipeline.get("description"), ) - if ( - auto_create_repositories - and code_account_id - and str(code_account_id).isdigit() - and not has_custom_repo - ): - repo = Repo( - code_account_id, - pipeline.get("name"), - pipeline.get("description"), - ) - repo.create_update() - METRICS.put_metric_data( - { - "MetricName": "CreateOrUpdate", - "Value": 1, - "Unit": "Count", - } - ) - - return pipeline + repo.create_update() + METRICS.put_metric_data( + { + "MetricName": "CreateOrUpdate", + "Value": 1, + "Unit": "Count", + } + ) + + return event diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/generate_pipeline_inputs.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/generate_pipeline_inputs.py index ad5c080c1..b7e9cdf02 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/generate_pipeline_inputs.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/generate_pipeline_inputs.py @@ -32,20 +32,28 @@ def store_regional_parameter_config( either as top level regions for a pipeline or stage specific regions These are only used to track pipelines. """ - if pipeline.top_level_regions: - parameter_store.put_parameter( - f"/deployment/{deployment_map_source}/{pipeline.name}/regions", - str(list(set(pipeline.top_level_regions))), - ) - return - parameter_store.put_parameter( f"/deployment/{deployment_map_source}/{pipeline.name}/regions", - str(list(set(Pipeline.flatten_list(pipeline.stage_regions)))), + str(pipeline.get_all_regions()), ) def fetch_required_ssm_params(pipeline_input, regions): + """ + Fetch the required SSM parameters for the regions of this pipeline. + + Args: + pipeline_input (dict): The pipeline input dictionary. + + regions ([str]): The regions of the pipeline. + + Returns: + dict[str, dict[str,str] | str]: + The SSM parameters in a dictionary. Where the key is the region or + a generic SSM parameter key for this pipeline. The value is either + a dictionary holding the key/value pairs of SSM parameters, or the + value of the generic SSM parameter. + """ output = {} for region in regions: parameter_store = ParameterStore(region, boto3) @@ -61,43 +69,62 @@ def fetch_required_ssm_params(pipeline_input, regions): output[region]["modules"] = parameter_store.fetch_parameter( "deployment_account_bucket" ) - output['default_scm_branch'] = parameter_store.fetch_parameter( - 'default_scm_branch', + output["default_scm_branch"] = parameter_store.fetch_parameter( + "default_scm_branch", ) codestar_connection_path = ( pipeline_input - .get('default_providers', {}) - .get('source') - .get('properties', {}) - .get('codestar_connection_path', {}) + .get("default_providers", {}) + .get("source") + .get("properties", {}) + .get("codestar_connection_path") ) if codestar_connection_path: - output['codestar_connection_arn'] = ( + output["codestar_connection_arn"] = ( parameter_store.fetch_parameter(codestar_connection_path) ) return output -def generate_pipeline_inputs(pipeline, organizations, parameter_store): +def generate_pipeline_inputs( + pipeline, + deployment_map_source, + organizations, + parameter_store, +): + """ + Generate the pipeline inputs for the given pipeline definition. + + Args: + pipeline (dict): The pipeline definition, as specified in the + deployment map that defines this pipeline. + + deployment_map_source (str): The deployment map source (i.e. "S3"). + + organizations (Organizations): The Organizations class instance. + + parameter_store (ParameterStore): The Parameter Store class instance. + """ data = {} pipeline_object = Pipeline(pipeline) regions = [] for target in pipeline.get("targets", []): target_structure = TargetStructure(target) - for step in target_structure.target: - regions = step.get( - "regions", pipeline.get("regions", DEPLOYMENT_ACCOUNT_REGION) - ) + for raw_step in target_structure.target: + step = pipeline_object.merge_in_deploy_defaults(raw_step) paths_tags = [] for path in step.get("path", []): paths_tags.append(path) if step.get("tags") is not None: paths_tags.append(step.get("tags", {})) for path_or_tag in paths_tags: - pipeline_object.stage_regions.append(regions) + pipeline_object.stage_regions.append(step.get("regions")) pipeline_target = Target( - path_or_tag, target_structure, organizations, step, regions + path_or_tag, + target_structure, + organizations, + step, ) pipeline_target.fetch_accounts_for_target() # Targets should be a list of lists. @@ -116,29 +143,39 @@ def generate_pipeline_inputs(pipeline, organizations, parameter_store): if DEPLOYMENT_ACCOUNT_REGION not in regions: pipeline_object.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION) - pipeline_object.generate_input() + data["pipeline_input"] = pipeline_object.generate_input() data["ssm_params"] = fetch_required_ssm_params( - pipeline_object.input, - pipeline_object.input["regions"] or [DEPLOYMENT_ACCOUNT_REGION] + data["pipeline_input"], + data["pipeline_input"]["regions"], ) - data["input"] = pipeline_object.input - if 'codestar_connection_arn' in data["ssm_params"]: - data['input']['default_providers']['source']['properties'][ - 'codestar_connection_arn' - ] = data["ssm_params"]['codestar_connection_arn'] - data['input']['default_scm_branch'] = data["ssm_params"].get( - 'default_scm_branch', + if "codestar_connection_arn" in data["ssm_params"]: + data["pipeline_input"]["default_providers"]["source"]["properties"][ + "codestar_connection_arn" + ] = data["ssm_params"]["codestar_connection_arn"] + data["pipeline_input"]["default_scm_branch"] = data["ssm_params"].get( + "default_scm_branch", ) store_regional_parameter_config( pipeline_object, parameter_store, - pipeline.get("deployment_map_source"), + deployment_map_source, ) return data -def lambda_handler(pipeline, _): - """Main Lambda Entry point""" +def lambda_handler(event, _): + """ + Main Lambda Entry point, responsible to generate the pipeline input + data based on the pipeline definition. + + Args: + event (dict): The ADF Pipeline Management State Machine input object, + holding the pipeline definition. + + Returns: + dict: The input event enriched with the pipeline inputs and ssm + parameter values retrieved. + """ parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3) sts = STS() cross_account_role_name = parameter_store.fetch_parameter( @@ -146,13 +183,21 @@ def lambda_handler(pipeline, _): ) role = sts.assume_cross_account_role( ( - f'arn:{get_partition(DEPLOYMENT_ACCOUNT_REGION)}:iam::' - f'{ROOT_ACCOUNT_ID}:role/{cross_account_role_name}-readonly' + f"arn:{get_partition(DEPLOYMENT_ACCOUNT_REGION)}:iam::" + f"{ROOT_ACCOUNT_ID}:role/{cross_account_role_name}-readonly" ), "pipeline", ) organizations = Organizations(role) - output = generate_pipeline_inputs(pipeline, organizations, parameter_store) + pipeline_input_data = generate_pipeline_inputs( + event.get("pipeline_definition"), + event.get("deployment_map_source"), + organizations, + parameter_store, + ) - return output + return { + **event, + **pipeline_input_data, + } diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/identify_out_of_date_pipelines.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/identify_out_of_date_pipelines.py index 66a800b67..e83a95817 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/identify_out_of_date_pipelines.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/identify_out_of_date_pipelines.py @@ -19,36 +19,80 @@ LOGGER = configure_logger(__name__) S3_BUCKET_NAME = os.environ["S3_BUCKET_NAME"] -DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] ADF_PIPELINE_PREFIX = os.environ["ADF_PIPELINE_PREFIX"] DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] DEPLOYMENT_PREFIX = "/deployment/" S3_BACKED_DEPLOYMENT_PREFIX = f"{DEPLOYMENT_PREFIX}S3/" -def download_deployment_maps(resource, prefix, local): - paginator = resource.meta.client.get_paginator("list_objects") +def download_deployment_maps(s3_resource, prefix, local): + """ + Download the deployment maps using the S3 resource. + It will only iterate over the deployment maps that match the specified + prefix and will store them in the local directory as requested. + + If any CommonPrefixes are found (like folders in a file system), then + it will call itself recursively. + + Args: + s3_resource (boto3.resource.S3): The S3 resource to use. + + prefix (str): The prefix that the objects should match. + + local (str): The local directory to store the files. + """ + paginator = s3_resource.meta.client.get_paginator("list_objects") for result in paginator.paginate( Bucket=S3_BUCKET_NAME, Delimiter="/", Prefix=prefix ): - LOGGER.debug("Downloaded deployment map: %s", result) + LOGGER.debug("Found the following deployment map: %s", result) for subdir in result.get("CommonPrefixes", []): - download_deployment_maps(resource, subdir.get("Prefix"), local) + # Recursive call: + download_deployment_maps( + s3_resource, + subdir.get("Prefix"), + local, + ) + for file in result.get("Contents", []): LOGGER.debug("File content in deployment map: %s", file) dest_path_name = os.path.join(local, file.get("Key")) if not os.path.exists(os.path.dirname(dest_path_name)): os.makedirs(os.path.dirname(dest_path_name)) - resource.meta.client.download_file( - S3_BUCKET_NAME, file.get("Key"), dest_path_name + s3_resource.meta.client.download_file( + S3_BUCKET_NAME, + file.get("Key"), + dest_path_name, ) def get_current_pipelines(parameter_store): - return parameter_store.fetch_parameters_by_path(S3_BACKED_DEPLOYMENT_PREFIX) + """ + Get pipelines that are defined currently. + + Args: + parameter_store (ParameterStore): The ParameterStore class instance + to use. + + Returns: + [str]: The parameter keys of the pipelines defined, these could include + stale pipelines. + """ + return parameter_store.fetch_parameters_by_path( + S3_BACKED_DEPLOYMENT_PREFIX, + ) def identify_out_of_date_pipelines(pipeline_names, current_pipelines): + """ + Identify which pipelines are out of date. + + Args: + pipeline_names (set[str]): The pipeline names that should remain. + + current_pipelines (set[str]): The pipelines defined at the moment, + including the pipeline names that could be stale. + """ return [ { "full_pipeline_name": f"{ADF_PIPELINE_PREFIX}{name}", @@ -59,11 +103,26 @@ def identify_out_of_date_pipelines(pipeline_names, current_pipelines): def lambda_handler(event, _): - output = event.copy() - s3 = boto3.resource("s3") + """ + Lambda handler, processing the pipelines that are defined and matching + those against the parameters of the pipelines that were created before. + + The pipelines that have parameters but are no longer defined are stale + and should be deleted. + + Args: + event (dict): The input event from the ADF Pipeline Management state + machine. + + Returns: + dict: The pipelines to be deleted, the traceroot, and hash of the + pipeline to be deleted dict. + """ + LOGGER.debug("Received: %s", event) + s3_resource = boto3.resource("s3") deployment_map = None with tempfile.TemporaryDirectory() as tmp_dir_path: - download_deployment_maps(s3, "", tmp_dir_path) + download_deployment_maps(s3_resource, "", tmp_dir_path) deployment_map = DeploymentMap( None, None, @@ -81,7 +140,8 @@ def lambda_handler(event, _): p.get("name") for p in deployment_map.map_contents["pipelines"] } out_of_date_pipelines = identify_out_of_date_pipelines( - pipeline_names, current_pipelines + pipeline_names, + current_pipelines, ) output = {} diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/process_deployment_map.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/process_deployment_map.py index cbd17fad8..9fcb432e4 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/process_deployment_map.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/process_deployment_map.py @@ -19,8 +19,6 @@ LOGGER = configure_logger(__name__) -DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] -DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] PIPELINE_MANAGEMENT_STATEMACHINE = os.getenv( "PIPELINE_MANAGEMENT_STATE_MACHINE", ) @@ -28,9 +26,6 @@ ADF_VERSION_METADATA_KEY = "adf_version" -_cache = None - - class DeploymentMapFileData(TypedDict): """ Class used to return the deployment map file data and its related @@ -136,10 +131,33 @@ def get_file_from_s3( def start_executions( sfn_client, + deployment_map_name: str, deployment_map, codepipeline_execution_id: str, request_id: str, ): + """ + Kick-off the ADF Pipeline Management State Machine. + Where each pipeline will be created/updated in its own Step Function + invocation/execution. + + Args: + sfn_client (boto3.client.StepFunction): + The Step Function boto3 client. + + deployment_map_name (str): + The name of the deployment map file that is the initiating source + of this create/update pipelines process. + + deployment_map (dict): + The deployment map definition of the pipeline. + + codepipeline_execution_id (str): + The CodePipeline execution id. + + request_id (str): + The Lambda function request id. + """ if not codepipeline_execution_id: codepipeline_execution_id = "no-codepipeline-exec-id-found" short_request_id = request_id[-12:] @@ -151,7 +169,6 @@ def start_executions( ) for pipeline in deployment_map.get("pipelines"): LOGGER.debug("Payload: %s", pipeline) - pipeline = {**pipeline, "deployment_map_source": "S3"} full_pipeline_name = pipeline.get('name', 'no-pipeline-name') # AWS Step Functions supports max 80 characters. # Since the run_id equals 49 characters plus the dash, we have 30 @@ -162,12 +179,27 @@ def start_executions( sfn_client.start_execution( stateMachineArn=PIPELINE_MANAGEMENT_STATEMACHINE, name=sfn_execution_name, - input=json.dumps(pipeline), + input=json.dumps({ + 'deployment_map_source': 'S3', + 'deployment_map_name': deployment_map_name, + 'pipeline_definition': pipeline, + }), ) def lambda_handler(event, context): - """Main Lambda Entry point""" + """ + Main Lambda Entry point, responsible for iterating over the deployment + map holding one or more pipelines and initiating the ADF Pipeline + Management State Machine to create/update the pipelines defined inside. + + Args: + event (dict): The S3 input event that invoked this Lambda Function. + context (Context): The Lambda context. + + Returns: + dict: The input event is returned. + """ output = event.copy() s3_resource = boto3.resource("s3") sfn_client = boto3.client("stepfunctions") @@ -179,6 +211,7 @@ def lambda_handler(event, context): ) start_executions( sfn_client, + s3_details.get("object_key"), deployment_map["content"], codepipeline_execution_id=deployment_map.get("execution_id"), request_id=context.aws_request_id, diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/store_pipeline_definition.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/store_pipeline_definition.py index bc4c5c347..232007868 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/store_pipeline_definition.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/store_pipeline_definition.py @@ -12,21 +12,44 @@ LOGGER = configure_logger(__name__) -DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] -DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] S3_BUCKET_NAME = os.environ["S3_BUCKET_NAME"] -def upload_event_to_s3(s3, definition): - pipeline_name = definition.get("input", {}).get("name") - s3_object = s3.Object(S3_BUCKET_NAME, f"pipelines/{pipeline_name}/definition.json") +def upload_event_to_s3(s3_resource, definition): + """ + Upload the event received to the Pipeline Definition Bucket. + + Args: + s3_resource (boto3.S3.resource): The S3 resource. + definition (any): The pipeline definition, input and other data + related to the pipeline to store. + + Returns: + str: The location where the definition is stored in the S3 bucket. + """ + pipeline_name = definition.get("pipeline_input", {}).get("name") + s3_object = s3_resource.Object( + S3_BUCKET_NAME, + f"pipelines/{pipeline_name}/definition.json", + ) s3_object.put(Body=json.dumps(definition).encode("UTF-8")) return f"{S3_BUCKET_NAME}/pipelines/{pipeline_name}/" def lambda_handler(event, _): + """ + Writes the pipeline definition to S3. + + Args: + event (dict): The input event, that is also returned as the output. + + Returns: + dict: The input event + definition_location. + """ output = event.copy() - s3 = boto3.resource("s3") - location = upload_event_to_s3(s3, event) + s3_resource = boto3.resource("s3") + + location = upload_event_to_s3(s3_resource, event) + output["definition_location"] = location return output diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_cloudformation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_cloudformation.py index 63aa5ade1..869bfe77b 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_cloudformation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_cloudformation.py @@ -16,16 +16,17 @@ class CloudFormation(core.Construct): - def __init__(self, scope: core.Construct, id: str, **kwargs): #pylint: disable=W0622, W0235 + # pylint: disable=W0622, W0235 + def __init__(self, scope: core.Construct, id: str, **kwargs): super().__init__(scope, id, **kwargs) @staticmethod def generate_actions(targets, region, map_params, target_approval_mode): - _actions = [] + actions = [] if not isinstance(targets, list): targets = [targets] for target in targets: - _actions.append( + actions.append( adf_codepipeline.Action( name=f"{target['name']}-{region}-create", provider="CloudFormation", @@ -39,7 +40,7 @@ def generate_actions(targets, region, map_params, target_approval_mode): ).config, ) if target_approval_mode: - _actions.append( + actions.append( adf_codepipeline.Action( name=f"{target['name']}-{region}", provider="Manual", @@ -51,7 +52,7 @@ def generate_actions(targets, region, map_params, target_approval_mode): action_name=f"{target['name']}-{region}", ).config ) - _actions.append( + actions.append( adf_codepipeline.Action( name=f"{target['name']}-{region}-execute", provider="CloudFormation", @@ -64,4 +65,4 @@ def generate_actions(targets, region, map_params, target_approval_mode): action_name=f"{target['name']}-{region}-execute", ).config ) - return _actions + return actions diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py index 0ac3c8106..ae26f6a14 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codebuild.py @@ -36,6 +36,8 @@ def __init__( id: str, shared_modules_bucket: str, deployment_region_kms: str, + deployment_map_source: str, + deployment_map_name: str, map_params: dict, target, **kwargs, @@ -87,6 +89,8 @@ def __init__( environment_variables=CodeBuild.generate_build_env_variables( _codebuild, shared_modules_bucket, + deployment_map_source, + deployment_map_name, map_params, target, ), @@ -176,6 +180,8 @@ def __init__( environment_variables=CodeBuild.generate_build_env_variables( _codebuild, shared_modules_bucket, + deployment_map_source, + deployment_map_name, map_params, ), privileged=( @@ -382,12 +388,16 @@ def determine_build_image(codebuild_id, scope, target, map_params): def generate_build_env_variables( codebuild, shared_modules_bucket, + deployment_map_source, + deployment_map_name, map_params, target=None, ): build_env_vars = { "PYTHONPATH": "./adf-build/python", "ADF_PROJECT_NAME": map_params['name'], + "ADF_DEPLOYMENT_MAP_SOURCE": deployment_map_source, + "ADF_DEPLOYMENT_MAP_NAME": deployment_map_name, "S3_BUCKET_NAME": shared_modules_bucket, "ACCOUNT_ID": core.Aws.ACCOUNT_ID, **( diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index 2a3fa9210..ac129f52c 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -166,7 +166,7 @@ def _generate_configuration(self): .get('default_providers', {}) .get('deploy', {}) .get('properties', {}) - .get('extract') + .get('extract', False) )) ), "ObjectKey": ( diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py index 97b3a6388..be85d4f99 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py @@ -1,7 +1,7 @@ # Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 -"""This is the functionality for generating a default adf pipeline. +"""This is the functionality for generating a default ADF pipeline. """ import os @@ -28,10 +28,12 @@ def generate_adf_default_pipeline(scope: core.Stack, stack_input): - _stages = [] + stages = [] notification_config = ( - stack_input["input"].get("params", {}).get("notification_endpoint", {}) + stack_input["pipeline_input"] + .get("params", {}) + .get("notification_endpoint", {}) ) needs_topic_arn = ( @@ -39,42 +41,48 @@ def generate_adf_default_pipeline(scope: core.Stack, stack_input): or notification_config.get('type', '') == "lambda" ) if needs_topic_arn: - stack_input["input"]["topic_arn"] = ( + stack_input["pipeline_input"]["topic_arn"] = ( adf_notifications.Notifications( scope, "adf_notifications", - stack_input["input"], + stack_input["pipeline_input"], ).topic_arn ) - _source_name = generate_source_stage_for_pipeline( - _stages, - scope, - stack_input, + source_stage = _generate_source_stage_for_pipeline(scope, stack_input) + if source_stage is not None: + stages.append(source_stage) + + build_stage = _generate_build_stage_for_pipeline(scope, stack_input) + if build_stage is not None: + stages.append(build_stage) + + stages.extend( + _generate_stages_with_targets_for_pipeline(scope, stack_input) ) - generate_build_stage_for_pipeline(_stages, scope, stack_input) - generate_targets_for_pipeline(_stages, scope, stack_input) - _pipeline = adf_codepipeline.Pipeline( + pipeline = adf_codepipeline.Pipeline( scope, "code_pipeline", - stack_input["input"], + stack_input["pipeline_input"], stack_input["ssm_params"], - _stages, + stages, ) - if "github" in _source_name: + if "github" in _get_source_name(stack_input): adf_github.GitHub.create_webhook_when_required( scope, - _pipeline.cfn, - stack_input["input"], + pipeline.cfn, + stack_input["pipeline_input"], ) pipeline_triggers = ( - stack_input["input"].get("triggers", {}).get("triggered_by") + stack_input["pipeline_input"] + .get("triggers", {}) + .get("triggered_by") ) if pipeline_triggers: for trigger_type, trigger_config in pipeline_triggers.items(): - _pipeline.add_pipeline_trigger( + pipeline.add_pipeline_trigger( trigger_type=trigger_type, trigger_config=trigger_config, ) @@ -87,94 +95,100 @@ def generate_adf_default_pipeline(scope: core.Stack, stack_input): adf_chatbot.PipelineNotifications( scope, "adf_chatbot_notifications", - _pipeline.cfn, + pipeline.cfn, notification_config, ) -def generate_source_stage_for_pipeline(_stages, scope, stack_input): - _source_name = ( - stack_input["input"]["default_providers"]["source"]["provider"].lower() +def _get_source_name(stack_input): + return ( + stack_input["pipeline_input"]["default_providers"] + .get("source", {}) + .get("provider", "codecommit") + .lower() ) - if "codecommit" in _source_name: - _stages.append( - adf_codecommit.CodeCommit( - scope, - "source", - stack_input["input"], - ).source, - ) - elif "codestar" in _source_name: - _stages.append( - adf_codestar.CodeStar( - scope, - "source", - stack_input['input'], - ).source, - ) - elif "github" in _source_name: - _stages.append( - adf_github.GitHub( - scope, - "source", - stack_input["input"], - ).source, - ) - elif "s3" in _source_name: - _stages.append( - adf_s3.S3( - scope, - "source", - stack_input["input"], - ).source, - ) - return _source_name -def generate_build_stage_for_pipeline(_stages, scope, stack_input): - build_name = ( - stack_input["input"]["default_providers"]["build"].get( - "provider", - "", - ).lower() - ) +def _generate_source_stage_for_pipeline(scope, stack_input): + source_name = _get_source_name(stack_input) + if "codecommit" in source_name: + return adf_codecommit.CodeCommit( + scope, + "source", + stack_input["pipeline_input"], + ).source + if "codestar" in source_name: + return adf_codestar.CodeStar( + scope, + "source", + stack_input['pipeline_input'], + ).source + if "github" in source_name: + return adf_github.GitHub( + scope, + "source", + stack_input["pipeline_input"], + ).source + if "s3" in source_name: + return adf_s3.S3( + scope, + "source", + stack_input["pipeline_input"], + ).source + return None + + +def _generate_build_stage_for_pipeline(scope, stack_input): build_enabled = ( - stack_input["input"]["default_providers"]["build"].get("enabled", True) + stack_input["pipeline_input"]["default_providers"] + .get("build", {}) + .get("enabled", True) + ) + if build_enabled is not True: + return None + + build_name = ( + stack_input["pipeline_input"]["default_providers"] + .get("build", {}) + .get("provider", "") + .lower() ) - if "codebuild" in build_name and build_enabled: - _stages.append( - adf_codebuild.CodeBuild( - scope, - "build", - stack_input["ssm_params"][ADF_DEPLOYMENT_REGION]["modules"], - stack_input["ssm_params"][ADF_DEPLOYMENT_REGION]["kms"], - stack_input["input"], - {}, # Empty target since this is a build only stage - ).build - ) - elif "jenkins" in build_name: - _stages.append( - adf_jenkins.Jenkins( - scope, - "build", - stack_input["input"], - ).build - ) + if "codebuild" in build_name: + return adf_codebuild.CodeBuild( + scope, + "build", + stack_input["ssm_params"][ADF_DEPLOYMENT_REGION]["modules"], + stack_input["ssm_params"][ADF_DEPLOYMENT_REGION]["kms"], + stack_input["deployment_map_source"], + stack_input["deployment_map_name"], + stack_input["pipeline_input"], + {}, # Empty target since this is a build only stage + ).build + if "jenkins" in build_name: + return adf_jenkins.Jenkins( + scope, + "build", + stack_input["pipeline_input"], + ).build + return None -def generate_targets_for_pipeline(_stages, scope, stack_input): + +def _generate_stages_with_targets_for_pipeline(scope, stack_input): + stages = [] for index, targets in enumerate( - stack_input["input"].get("environments", {}).get("targets", []) + stack_input["pipeline_input"] + .get("environments", {}) + .get("targets", []) ): top_level_deployment_type = ( - stack_input["input"] + stack_input["pipeline_input"] .get("default_providers", {}) .get("deploy", {}) - .get("provider", "") - or "cloudformation" + .get("provider", "cloudformation") ) top_level_action = ( - stack_input["input"] + stack_input["pipeline_input"] .get("default_providers", {}) .get("deploy", {}) .get("properties", {}) @@ -182,182 +196,181 @@ def generate_targets_for_pipeline(_stages, scope, stack_input): ) for wave_index, wave in enumerate(targets): - _actions = [] - _is_approval = ( + actions = [] + is_approval = ( wave[0].get("name", "").startswith("approval") or wave[0].get("provider", "") == "approval" ) - _action_type_name = "approval" if _is_approval else "deployment" - _stage_name = ( + action_type_name = "approval" if is_approval else "deployment" + stage_name = ( # 0th Index since step names are for entire stages not # per target. f"{wave[0].get('step_name')}-{wave_index}" if wave[0].get("step_name") - else f"{_action_type_name}-stage-{index + 1}-wave-{wave_index}" + else f"{action_type_name}-stage-{index + 1}-wave-{wave_index}" ) for target in wave: target_stage_override = ( - target.get("provider") - or top_level_deployment_type + target.get("provider", top_level_deployment_type) ) is_approval = ( target.get("name") == "approval" or target.get("provider", "") == "approval" ) if is_approval: - _actions.extend( - [ - adf_codepipeline.Action( - name=f"wave-{wave_index}-{target.get('name')}", - provider="Manual", - category="Approval", - target=target, - run_order=1, - map_params=stack_input["input"], - action_name=f"{target.get('name')}", - ).config - ] - ) + actions.extend([ + adf_codepipeline.Action( + name=f"wave-{wave_index}-{target.get('name')}", + provider="Manual", + category="Approval", + target=target, + run_order=1, + map_params=stack_input["pipeline_input"], + action_name=f"{target.get('name')}", + ).config + ]) continue if "codebuild" in target_stage_override: - _actions.extend( - [ - adf_codebuild.CodeBuild( - scope, - # Use the name of the pipeline for CodeBuild - # instead of the target name as it will always - # operate from the deployment account. - ( - f"{stack_input['input']['name']}-target-" - f"{index + 1}-wave-{wave_index}" - ), - stack_input["ssm_params"][ADF_DEPLOYMENT_REGION]["modules"], - stack_input["ssm_params"][ADF_DEPLOYMENT_REGION]["kms"], - stack_input["input"], - target, - ).deploy - ] + deploy_params = ( + stack_input["ssm_params"][ADF_DEPLOYMENT_REGION] ) + actions.extend([ + adf_codebuild.CodeBuild( + scope, + # Use the name of the pipeline for CodeBuild + # instead of the target name as it will always + # operate from the deployment account. + ( + f"{stack_input['pipeline_input']['name']}-" + f"target-{index + 1}-wave-{wave_index}" + ), + deploy_params["modules"], + deploy_params["kms"], + stack_input["deployment_map_source"], + stack_input["deployment_map_name"], + stack_input["pipeline_input"], + target, + ).deploy + ]) continue regions = target.get("regions", []) - generate_deployment_action_per_region( - _actions, - regions, - stack_input, - target, - target_stage_override, - top_level_action, + actions.extend( + # Returns a list of actions: + _generate_deployment_action_per_region( + regions, + stack_input, + target, + target_stage_override, + top_level_action, + ) ) - _stages.append( + stages.append( _codepipeline.CfnPipeline.StageDeclarationProperty( - name=_stage_name, - actions=_actions, + name=stage_name, + actions=actions, ) ) + return stages -def generate_deployment_action_per_region( - _actions, +def _generate_deployment_action_per_region( regions, stack_input, target, target_stage_override, top_level_action ): + actions = [] for region in regions: if "cloudformation" in target_stage_override: target_approval_mode = target.get("properties", {}).get( "change_set_approval", False ) - _target_action_mode = target.get("properties", {}).get("action") - action_mode = _target_action_mode or top_level_action + target_action_mode = target.get("properties", {}).get("action") + action_mode = target_action_mode or top_level_action if action_mode: - _actions.extend( - [ - adf_codepipeline.Action( - name=f"{target['name']}-{region}", - provider="CloudFormation", - category="Deploy", - region=region, - target=target, - action_mode=action_mode, - run_order=1, - map_params=stack_input["input"], - action_name=f"{target['name']}-{region}", - ).config - ] - ) - continue - _actions.extend( - adf_cloudformation.CloudFormation.generate_actions( - target, region, stack_input["input"], target_approval_mode - ) - ) - elif "codedeploy" in target_stage_override: - _actions.extend( - [ + actions.extend([ adf_codepipeline.Action( name=f"{target['name']}-{region}", - provider="CodeDeploy", + provider="CloudFormation", category="Deploy", region=region, target=target, - action_mode=top_level_action, + action_mode=action_mode, run_order=1, - map_params=stack_input["input"], + map_params=stack_input["pipeline_input"], action_name=f"{target['name']}-{region}", ).config - ] + ]) + continue + actions.extend( + # ^^ Using extend without list, + # as this generates multiple actions in a list + adf_cloudformation.CloudFormation.generate_actions( + target, + region, + stack_input["pipeline_input"], + target_approval_mode, + ) ) + elif "codedeploy" in target_stage_override: + actions.extend([ + adf_codepipeline.Action( + name=f"{target['name']}-{region}", + provider="CodeDeploy", + category="Deploy", + region=region, + target=target, + action_mode=top_level_action, + run_order=1, + map_params=stack_input["pipeline_input"], + action_name=f"{target['name']}-{region}", + ).config + ]) elif "s3" in target_stage_override: - _actions.extend( - [ - adf_codepipeline.Action( - name=f"{target['name']}-{region}", - provider="S3", - category="Deploy", - region=region, - target=target, - action_mode=top_level_action, - run_order=1, - map_params=stack_input["input"], - action_name=f"{target['name']}-{region}", - ).config - ] - ) + actions.extend([ + adf_codepipeline.Action( + name=f"{target['name']}-{region}", + provider="S3", + category="Deploy", + region=region, + target=target, + action_mode=top_level_action, + run_order=1, + map_params=stack_input["pipeline_input"], + action_name=f"{target['name']}-{region}", + ).config + ]) elif "lambda" in target_stage_override: - _actions.extend( - [ - adf_codepipeline.Action( - name=f"{target['name']}-{region}", - provider="Lambda", - category="Invoke", - region=region, - target=target, - action_mode=top_level_action, - run_order=1, - map_params=stack_input["input"], - action_name=f"{target['name']}-{region}", - ).config - ] - ) + actions.extend([ + adf_codepipeline.Action( + name=f"{target['name']}-{region}", + provider="Lambda", + category="Invoke", + region=region, + target=target, + action_mode=top_level_action, + run_order=1, + map_params=stack_input["pipeline_input"], + action_name=f"{target['name']}-{region}", + ).config + ]) elif "service_catalog" in target_stage_override: - _actions.extend( - [ - adf_codepipeline.Action( - name=f"{target['name']}-{region}", - provider="ServiceCatalog", - category="Deploy", - region=region, - target=target, - action_mode=top_level_action, - run_order=1, - map_params=stack_input["input"], - action_name=f"{target['name']}-{region}", - ).config - ] - ) + actions.extend([ + adf_codepipeline.Action( + name=f"{target['name']}-{region}", + provider="ServiceCatalog", + category="Deploy", + region=region, + target=target, + action_mode=top_level_action, + run_order=1, + map_params=stack_input["pipeline_input"], + action_name=f"{target['name']}-{region}", + ).config + ]) + return actions diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/main.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/main.py index 160939dc9..be2867054 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/main.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/main.py @@ -24,26 +24,30 @@ def __init__( scope: core.Construct, stack_input: dict, **kwargs - ) -> None: #pylint: disable=R0912, R0915 + ) -> None: # pylint: disable=R0912, R0915 """ Initialize the pipeline stack """ - super().__init__(scope, stack_input['input']['name'], **kwargs) + super().__init__( + scope, + stack_input["pipeline_input"]['name'], + **kwargs, + ) LOGGER.info( 'Pipeline creation/update of %s commenced', - stack_input['input']['name'], + stack_input['pipeline_input']['name'], ) - _pipeline_type = ( - stack_input['input'].get('params', {}).get( - 'type', - DEFAULT_PIPELINE - ).lower() + pipeline_type = ( + stack_input['pipeline_input'] + .get('params', {}) + .get('type', DEFAULT_PIPELINE) + .lower() ) - self.generate_pipeline(_pipeline_type, stack_input) + self.generate_pipeline(pipeline_type, stack_input) - def generate_pipeline(self, _pipeline_type, stack_input): - if _pipeline_type == DEFAULT_PIPELINE: + def generate_pipeline(self, pipeline_type, stack_input): + if pipeline_type == DEFAULT_PIPELINE: generate_default_pipeline(self, stack_input) else: - raise ValueError(f'{_pipeline_type} is not defined in main.py') + raise ValueError(f'{pipeline_type} is not defined in main.py') diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py index 112df7239..ae814661e 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py @@ -12,16 +12,18 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_1_target_with_2_wa account_id = "123456789012" stack_input = { - "input": { + "pipeline_input": { "params": {}, "default_providers": {"deploy": {"provider": "codedeploy"}}, "regions": {}, }, "ssm_params": {"fake-region": {}}, + "deployment_map_source": "S3", + "deployment_map_name": "deployment_map.yml", } - stack_input["input"]["name"] = "test-stack" - stack_input["input"]["environments"] = { + stack_input["pipeline_input"]["name"] = "test-stack" + stack_input["pipeline_input"]["environments"] = { "targets": [ [ [ @@ -38,11 +40,11 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_1_target_with_2_wa ] } - stack_input["input"]["default_providers"]["source"] = { + stack_input["pipeline_input"]["default_providers"]["source"] = { "provider": "codecommit", "properties": {"account_id": "123456789012"}, } - stack_input["input"]["default_providers"]["build"] = { + stack_input["pipeline_input"]["default_providers"]["build"] = { "provider": "codebuild", "properties": {"account_id": "123456789012"}, } @@ -76,16 +78,18 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_2_targets_with_2_w account_id = "123456789012" stack_input = { - "input": { + "pipeline_input": { "params": {}, "default_providers": {"deploy": {"provider": "codedeploy"}}, "regions": {}, }, "ssm_params": {"fake-region": {}}, + "deployment_map_source": "S3", + "deployment_map_name": "deployment_map.yml", } - stack_input["input"]["name"] = "test-stack" - stack_input["input"]["environments"] = { + stack_input["pipeline_input"]["name"] = "test-stack" + stack_input["pipeline_input"]["environments"] = { "targets": [ [ [ @@ -103,11 +107,11 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_2_targets_with_2_w ] } - stack_input["input"]["default_providers"]["source"] = { + stack_input["pipeline_input"]["default_providers"]["source"] = { "provider": "codecommit", "properties": {"account_id": "123456789012"}, } - stack_input["input"]["default_providers"]["build"] = { + stack_input["pipeline_input"]["default_providers"]["build"] = { "provider": "codebuild", "properties": {"account_id": "123456789012"}, } diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py index a1f239404..bd99b3af4 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_pipeline_creation.py @@ -13,9 +13,9 @@ @patch("cdk_stacks.main.generate_default_pipeline") def test_pipeline_generation_fails_if_pipeline_type_is_not_specified(mock): - stack_input = {"input": {"params": {}}} - stack_input["input"]["name"] = "test-stack" - stack_input["input"]["params"]["type"] = "fail" + stack_input = {"pipeline_input": {"params": {}}} + stack_input["pipeline_input"]["name"] = "test-stack" + stack_input["pipeline_input"]["params"]["type"] = "fail" app = core.App() with pytest.raises(ValueError): pipeline_stack = PipelineStack(app, stack_input) @@ -24,8 +24,8 @@ def test_pipeline_generation_fails_if_pipeline_type_is_not_specified(mock): @patch("cdk_stacks.main.generate_default_pipeline") def test_pipeline_generation_works_when_no_type_specified(mock): - stack_input = {"input": {"params": {}}} - stack_input["input"]["name"] = "test-stack" + stack_input = {"pipeline_input": {"params": {}}} + stack_input["pipeline_input"]["name"] = "test-stack" app = core.App() PipelineStack(app, stack_input) mock.assert_called() @@ -33,9 +33,9 @@ def test_pipeline_generation_works_when_no_type_specified(mock): @patch("cdk_stacks.main.generate_default_pipeline") def test_pipeline_generation_works_when_no_type_specified(mock): - stack_input = {"input": {"params": {}}} - stack_input["input"]["name"] = "test-stack" - stack_input["input"]["params"]["type"] = "Default" + stack_input = {"pipeline_input": {"params": {}}} + stack_input["pipeline_input"]["name"] = "test-stack" + stack_input["pipeline_input"]["params"]["type"] = "Default" app = core.App() PipelineStack(app, stack_input) @@ -47,17 +47,19 @@ def test_pipeline_creation_outputs_as_expected_when_source_is_s3_and_build_is_co account_id = "123456789012" stack_input = { - "input": {"params": {}, "default_providers": {}, "regions": {}}, + "pipeline_input": {"params": {}, "default_providers": {}, "regions": {}}, "ssm_params": {"fake-region": {}}, + "deployment_map_source": "S3", + "deployment_map_name": "deployment_map.yml", } - stack_input["input"]["name"] = "test-stack" + stack_input["pipeline_input"]["name"] = "test-stack" - stack_input["input"]["default_providers"]["source"] = { + stack_input["pipeline_input"]["default_providers"]["source"] = { "provider": "s3", "properties": {"account_id": "123456789012"}, } - stack_input["input"]["default_providers"]["build"] = { + stack_input["pipeline_input"]["default_providers"]["build"] = { "provider": "codebuild", "properties": {"account_id": "123456789012"}, } @@ -102,17 +104,19 @@ def test_pipeline_creation_outputs_as_expected_when_source_is_codecommit_and_bui account_id = "123456789012" stack_input = { - "input": {"params": {}, "default_providers": {}, "regions": {}}, + "pipeline_input": {"params": {}, "default_providers": {}, "regions": {}}, "ssm_params": {"fake-region": {}}, + "deployment_map_source": "S3", + "deployment_map_name": "deployment_map.yml", } - stack_input["input"]["name"] = "test-stack" + stack_input["pipeline_input"]["name"] = "test-stack" - stack_input["input"]["default_providers"]["source"] = { + stack_input["pipeline_input"]["default_providers"]["source"] = { "provider": "codecommit", "properties": {"account_id": "123456789012"}, } - stack_input["input"]["default_providers"]["build"] = { + stack_input["pipeline_input"]["default_providers"]["build"] = { "provider": "codebuild", "properties": {"account_id": "123456789012"}, } @@ -159,20 +163,22 @@ def test_pipeline_creation_outputs_as_expected_when_source_is_codecommit_with_co account_id = "123456789012" stack_input = { - "input": {"params": {}, "default_providers": {}, "regions": {}}, + "pipeline_input": {"params": {}, "default_providers": {}, "regions": {}}, "ssm_params": {"fake-region": {}}, + "deployment_map_source": "S3", + "deployment_map_name": "deployment_map.yml", } - stack_input["input"]["name"] = "test-stack" + stack_input["pipeline_input"]["name"] = "test-stack" - stack_input["input"]["default_providers"]["source"] = { + stack_input["pipeline_input"]["default_providers"]["source"] = { "provider": "codecommit", "properties": { "account_id": "123456789012", "output_artifact_format": "CODEBUILD_CLONE_REF", }, } - stack_input["input"]["default_providers"]["build"] = { + stack_input["pipeline_input"]["default_providers"]["build"] = { "provider": "codebuild", "properties": {"account_id": "123456789012"}, } @@ -220,7 +226,7 @@ def test_pipeline_creation_outputs_with_codeartifact_trigger(): account_id = "123456789012" stack_input = { - "input": { + "pipeline_input": { "params": {}, "default_providers": {}, "regions": {}, @@ -235,15 +241,17 @@ def test_pipeline_creation_outputs_with_codeartifact_trigger(): "ssm_params": { "fake-region": {}, }, + "deployment_map_source": "S3", + "deployment_map_name": "deployment_map.yml", } - stack_input["input"]["name"] = "test-stack" + stack_input["pipeline_input"]["name"] = "test-stack" - stack_input["input"]["default_providers"]["source"] = { + stack_input["pipeline_input"]["default_providers"]["source"] = { "provider": "codecommit", "properties": {"account_id": "123456789012"}, } - stack_input["input"]["default_providers"]["build"] = { + stack_input["pipeline_input"]["default_providers"]["build"] = { "provider": "codebuild", "properties": {"account_id": "123456789012"}, } @@ -278,7 +286,7 @@ def test_pipeline_creation_outputs_with_codeartifact_trigger_with_package_name() account_id = "123456789012" stack_input = { - "input": { + "pipeline_input": { "params": {}, "default_providers": {}, "regions": {}, @@ -294,15 +302,17 @@ def test_pipeline_creation_outputs_with_codeartifact_trigger_with_package_name() "ssm_params": { "fake-region": {}, }, + "deployment_map_source": "S3", + "deployment_map_name": "deployment_map.yml", } - stack_input["input"]["name"] = "test-stack" + stack_input["pipeline_input"]["name"] = "test-stack" - stack_input["input"]["default_providers"]["source"] = { + stack_input["pipeline_input"]["default_providers"]["source"] = { "provider": "codecommit", "properties": {"account_id": "123456789012"}, } - stack_input["input"]["default_providers"]["build"] = { + stack_input["pipeline_input"]["default_providers"]["build"] = { "provider": "codebuild", "properties": {"account_id": "123456789012"}, } @@ -342,7 +352,7 @@ def test_pipeline_creation_outputs_with_invalid_trigger_type(): account_id = "123456789012" stack_input = { - "input": { + "pipeline_input": { "params": {}, "default_providers": {}, "regions": {}, @@ -360,15 +370,17 @@ def test_pipeline_creation_outputs_with_invalid_trigger_type(): "ssm_params": { "fake-region": {}, }, + "deployment_map_source": "S3", + "deployment_map_name": "deployment_map.yml", } - stack_input["input"]["name"] = "test-stack" + stack_input["pipeline_input"]["name"] = "test-stack" - stack_input["input"]["default_providers"]["source"] = { + stack_input["pipeline_input"]["default_providers"]["source"] = { "provider": "codecommit", "properties": {"account_id": "123456789012"}, } - stack_input["input"]["default_providers"]["build"] = { + stack_input["pipeline_input"]["default_providers"]["build"] = { "provider": "codebuild", "properties": {"account_id": "123456789012"}, } @@ -398,7 +410,7 @@ def test_pipeline_creation_outputs_as_expected_when_notification_endpoint_is_cha account_id = "123456789012" stack_input = { - "input": { + "pipeline_input": { "params": { "notification_endpoint": { "target": "fake-config", @@ -411,15 +423,17 @@ def test_pipeline_creation_outputs_as_expected_when_notification_endpoint_is_cha "ssm_params": { "fake-region": {}, }, + "deployment_map_source": "S3", + "deployment_map_name": "deployment_map.yml", } - stack_input["input"]["name"] = "test-stack" + stack_input["pipeline_input"]["name"] = "test-stack" - stack_input["input"]["default_providers"]["source"] = { + stack_input["pipeline_input"]["default_providers"]["source"] = { "provider": "codecommit", "properties": {"account_id": "123456789012"}, } - stack_input["input"]["default_providers"]["build"] = { + stack_input["pipeline_input"]["default_providers"]["build"] = { "provider": "codebuild", "properties": {"account_id": "123456789012"}, } diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/execute_pipeline_stacks.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/execute_pipeline_stacks.py index 728e94cb1..8e6ae7315 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/execute_pipeline_stacks.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/execute_pipeline_stacks.py @@ -38,7 +38,10 @@ def upload_pipeline(template_path, name, s3): and returning the URL that can be referenced in the CloudFormation create_stack call. """ - s3_object_path = s3.put_object(f"pipelines/{name}/global.yml", template_path) + s3_object_path = s3.put_object( + f"pipelines/{name}/global.yml", + template_path, + ) LOGGER.debug('Uploaded Pipeline Template %s to S3', s3_object_path) return s3_object_path @@ -74,9 +77,9 @@ def main(): name = ( os.path.splitext( template_path.split('/')[-1].split('.template')[0] - )[0] # Just stackname no extension and no .template + )[0] # Just the stack name, no extension and no .template ) - with open(template_path, encoding='utf-8') as _template_path: + with open(template_path, encoding='utf-8'): thread = PropagatingThread(target=worker_thread, args=( template_path, name, @@ -86,7 +89,8 @@ def main(): threads.append(thread) batch_mod = counter % 10 if batch_mod == 9: - # Set to 9, meaning we have hit a set of 10 threads since n % 10 + # Set to 9, meaning we have hit a set of 10 threads + # since n % 10 delay = random.randint(5, 11) LOGGER.debug( 'Waiting for %s seconds before starting next batch ' diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py deleted file mode 100644 index bac31f86e..000000000 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py +++ /dev/null @@ -1,236 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: MIT-0 - -""" -This file is pulled into CodeBuild containers -and used to build the pipeline CloudFormation stack inputs -""" -import json -import os -from thread import PropagatingThread - -import boto3 -from cache import Cache -from deployment_map import DeploymentMap -from errors import ParameterNotFoundError -from logger import configure_logger -from organizations import Organizations -from parameter_store import ParameterStore -from partition import get_partition -from pipeline import Pipeline -from repo import Repo -from rule import Rule -from s3 import S3 -from sts import STS -from target import Target, TargetStructure - -LOGGER = configure_logger(__name__) -DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] -DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] -MASTER_ACCOUNT_ID = os.environ["MASTER_ACCOUNT_ID"] -ORGANIZATION_ID = os.environ["ORGANIZATION_ID"] -ADF_PIPELINE_PREFIX = os.environ["ADF_PIPELINE_PREFIX"] -SHARED_MODULES_BUCKET = os.environ["SHARED_MODULES_BUCKET"] -ADF_VERSION = os.environ["ADF_VERSION"] -ADF_LOG_LEVEL = os.environ["ADF_LOG_LEVEL"] - - -def ensure_event_bus_status(organization_id): - events = boto3.client("events") - events.put_permission( - Action="events:PutEvents", - Principal="*", - StatementId="OrgAccessForEventBus", - Condition={ - "Type": "StringEquals", - "Key": "aws:PrincipalOrgID", - "Value": organization_id, - }, - ) - - -def store_regional_parameter_config(pipeline, parameter_store): - """ - Responsible for storing the region information for specific - pipelines. These regions are defined in the deployment_map - either as top level regions for a pipeline or stage specific regions - """ - if pipeline.top_level_regions: - parameter_store.put_parameter( - f"/deployment/{pipeline.nam}/regions", - str(list(set(pipeline.top_level_regions))), - ) - return - - parameter_store.put_parameter( - f"/deployment/{pipeline.name}/regions", - str(list(set(Pipeline.flatten_list(pipeline.stage_regions)))), - ) - - -def fetch_required_ssm_params(regions): - output = {} - for region in regions: - parameter_store = ParameterStore(region, boto3) - output[region] = { - "s3": parameter_store.fetch_parameter( - f"/cross_region/s3_regional_bucket/{region}", - ), - "kms": parameter_store.fetch_parameter( - f"/cross_region/kms_arn/{region}", - ), - } - if region == DEPLOYMENT_ACCOUNT_REGION: - output[region]["modules"] = parameter_store.fetch_parameter( - "deployment_account_bucket" - ) - output["default_scm_branch"] = parameter_store.fetch_parameter( - "default_scm_branch" - ) - return output - - -def worker_thread( - p, organizations, auto_create_repositories, deployment_map, parameter_store -): - LOGGER.debug("Worker Thread started for %s", p.get("name")) - pipeline = Pipeline(p) - if auto_create_repositories == "enabled": - code_account_id = ( - p.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("account_id", {}) - ) - has_custom_repo = ( - p.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("repository", {}) - ) - if ( - auto_create_repositories - and code_account_id - and str(code_account_id).isdigit() - and not has_custom_repo - ): - repo = Repo(code_account_id, p.get("name"), p.get("description")) - repo.create_update() - - regions = [] - for target in p.get("targets", []): - target_structure = TargetStructure(target) - for step in target_structure.target: - regions = step.get("regions", p.get("regions", DEPLOYMENT_ACCOUNT_REGION)) - paths_tags = [] - for path in step.get("path", []): - paths_tags.append(path) - if step.get("tags") is not None: - paths_tags.append(step.get("tags", {})) - for path_or_tag in paths_tags: - pipeline.stage_regions.append(regions) - pipeline_target = Target( - path_or_tag, target_structure, organizations, step, regions - ) - pipeline_target.fetch_accounts_for_target() - - pipeline.template_dictionary["targets"].append( - target_structure.generate_waves() - ) - - if DEPLOYMENT_ACCOUNT_REGION not in regions: - pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION) - pipeline.generate_input() - ssm_params = fetch_required_ssm_params( - pipeline.input["regions"] or [DEPLOYMENT_ACCOUNT_REGION] - ) - deployment_map.update_deployment_parameters(pipeline) - store_regional_parameter_config(pipeline, parameter_store) - with open( - f'cdk_inputs/{pipeline.input["name"]}.json', mode="w", encoding="utf-8" - ) as outfile: - data = {} - data["input"] = pipeline.input - data["input"]["default_scm_branch"] = ssm_params.get("default_scm_branch") - data["ssm_params"] = ssm_params - json.dump(data, outfile) - - -def _create_inputs_folder(): - try: - return os.mkdir("cdk_inputs") - except FileExistsError: - return None - - -def main(): - """ - Generate pipeline inputs script. Kicks off multiple threads to - generate the pipeline inputs in parallel. - """ - LOGGER.info("ADF Version %s", ADF_VERSION) - LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL) - - _create_inputs_folder() - parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3) - s3 = S3(DEPLOYMENT_ACCOUNT_REGION, SHARED_MODULES_BUCKET) - deployment_map = DeploymentMap(parameter_store, s3, ADF_PIPELINE_PREFIX) - sts = STS() - partition = get_partition(DEPLOYMENT_ACCOUNT_REGION) - cross_account_access_role = parameter_store.fetch_parameter( - "cross_account_access_role", - ) - role = sts.assume_cross_account_role( - ( - f"arn:{partition}:iam::{MASTER_ACCOUNT_ID}:role/" - f"{cross_account_access_role}-readonly" - ), - "pipeline", - ) - organizations = Organizations(role) - ensure_event_bus_status(ORGANIZATION_ID) - try: - auto_create_repositories = parameter_store.fetch_parameter( - "auto_create_repositories", - ) - except ParameterNotFoundError: - auto_create_repositories = "enabled" - threads = [] - cache = Cache() - for pipeline in deployment_map.map_contents.get("pipelines", []): - source_account_id = ( - pipeline.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("account_id") - ) - need_to_create_rules = ( - source_account_id - and int(source_account_id) != int(DEPLOYMENT_ACCOUNT_ID) - and not cache.exists(source_account_id) - ) - if need_to_create_rules: - rule = Rule(source_account_id) - rule.create_update() - cache.add(source_account_id, True) - thread = PropagatingThread( - target=worker_thread, - args=( - pipeline, - organizations, - auto_create_repositories, - deployment_map, - parameter_store, - ), - ) - thread.start() - threads.append(thread) - - for thread in threads: - thread.join() - - -if __name__ == "__main__": - main() diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/generate_params.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/generate_params.py index 66ec5b79e..cafadd221 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/generate_params.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/generate_params.py @@ -152,7 +152,7 @@ def _retrieve_pipeline_targets(self) -> PipelineTargets: pipeline_targets = {} pipeline_definition = self._retrieve_pipeline_definition() input_targets: TargetWavesWithNestedWaveTargets = ( - pipeline_definition['input']['environments']['targets'] + pipeline_definition['pipeline_input']['environments']['targets'] ) # Since the input_targets returns a list of waves that each contain # a list of wave_targets, we need to flatten them to iterate: @@ -195,11 +195,17 @@ def _retrieve_pipeline_targets(self) -> PipelineTargets: # }, # ... # ] + LOGGER.debug( + "Found the following pipeline targets: %s", + pipeline_targets, + ) return pipeline_targets def _create_params_folder(self) -> None: try: - os.mkdir(f'{self.cwd}/params') + dir_path = f'{self.cwd}/params' + os.mkdir(dir_path) + LOGGER.debug("Created directory: %s", dir_path) except FileExistsError: pass @@ -232,6 +238,11 @@ def create_parameter_files(self) -> None: """ for target in self._retrieve_pipeline_targets().values(): for region in target['regions']: + LOGGER.debug( + "Generating parameters for the %s account in %s", + target['account_name'], + region, + ) current_params = deepcopy(EMPTY_PARAMS_DICT) current_params = self._merge_params( Parameters._parse( @@ -290,7 +301,7 @@ def create_parameter_files(self) -> None: ), current_params ) - if current_params is not None: + if current_params: self._write_params( current_params, f"{target['account_name']}_{region}", @@ -336,15 +347,31 @@ def _parse( file_path = f"{params_root_path}/params/{clean_file_name}" try: with open(f"{file_path}.json", encoding='utf-8') as file: - return json.load(file) + json_content = json.load(file) + LOGGER.debug( + "Read %s.yml: %s", + file_path, + json_content, + ) + return json_content except FileNotFoundError: try: with open(f"{file_path}.yml", encoding='utf-8') as file: - return yaml.load(file, Loader=yaml.FullLoader) + yaml_content = yaml.load(file, Loader=yaml.FullLoader) + LOGGER.debug( + "Read %s.yml: %s", + file_path, + yaml_content, + ) + return yaml_content except yaml.scanner.ScannerError: LOGGER.exception('Invalid Yaml for %s.yml', file_path) raise except FileNotFoundError: + LOGGER.debug( + "File not found for %s.{json or yml}, defaulting to empty", + file_path, + ) return {'Parameters': {}, 'Tags': {}} def _write_params( @@ -362,6 +389,11 @@ def _write_params( folder. """ filepath = f"{self.cwd}/params/{filename}.json" + LOGGER.debug( + "Writing to parameter file: %s: %s", + filepath, + new_params, + ) with open(filepath, mode='w', encoding='utf-8') as outfile: json.dump(new_params, outfile) @@ -400,6 +432,10 @@ def _merge_params( self.file_name, ) ) + LOGGER.debug( + "Merged result %s", + merged_params, + ) return merged_params diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/package_transform.sh b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/package_transform.sh index 39f63ab08..2f53d7150 100755 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/package_transform.sh +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/package_transform.sh @@ -34,7 +34,7 @@ fi # Get list of regions supported by this application echo "Determine which regions need to be prepared" -app_regions=`aws ssm get-parameters --names /deployment/$ADF_PROJECT_NAME/regions --with-decryption --output=text --query='Parameters[0].Value'` +app_regions=`aws ssm get-parameters --names /deployment/$ADF_DEPLOYMENT_MAP_SOURCE/$ADF_PROJECT_NAME/regions --with-decryption --output=text --query='Parameters[0].Value'` # Convert json list to bash list (space delimited regions) regions="`echo $app_regions | sed -e 's/\[\([^]]*\)\]/\1/g' | sed 's/,/ /g' | sed "s/'//g"`" diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/list_utils.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/list_utils.py new file mode 100644 index 000000000..88d0ed81d --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/list_utils.py @@ -0,0 +1,37 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +List utilities to ease list management. +""" + + +def _flatten_list(input_list): + result = [] + for item in input_list: + if isinstance(item, list): + if len(item) > 0: + result.extend( + _flatten_list(item), + ) + else: + result.append(item) + return result + + +def flatten_to_unique_sorted(input_list): + """ + Flatten nested lists and return a unique and sorted list of items. + This will recursively iterate over the lists and flatten them together + into one list. It will then remove redundant items, followed by sorting + them. + + Args: + input_list (list): The input list that could hold multiple levels of + nested lists. + + Returns: + List with unique and sorted list of items. + """ + result = _flatten_list(input_list) + return sorted(list(set(result))) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/parameter_store.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/parameter_store.py index 2fa0757ad..bb9f0ceb8 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/parameter_store.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/parameter_store.py @@ -31,7 +31,12 @@ def put_parameter(self, name, value, tier='Standard'): try: current_value = self.fetch_parameter(name) assert current_value == value - LOGGER.debug('No need to update parameter %s with value %s since they are the same', name, value) + LOGGER.debug( + 'No need to update parameter %s with value %s since they ' + 'are the same', + name, + value, + ) except (ParameterNotFoundError, AssertionError): LOGGER.debug('Putting SSM Parameter %s with value %s', name, value) self.client.put_parameter( @@ -50,7 +55,10 @@ def delete_parameter(self, name): Name=name ) except self.client.exceptions.ParameterNotFound: - LOGGER.debug('Attempted to delete Parameter %s but it was not found', name) + LOGGER.debug( + 'Attempted to delete Parameter %s but it was not found', + name, + ) def fetch_parameters_by_path(self, path): """Gets a Parameter(s) by Path from Parameter Store (Recursively) @@ -64,8 +72,9 @@ def fetch_parameters_by_path(self, path): WithDecryption=False ) except self.client.exceptions.ParameterNotFound as error: - raise ParameterNotFoundError(f'Parameter Path {path} Not Found') from error - + raise ParameterNotFoundError( + f'Parameter Path {path} Not Found', + ) from error def fetch_parameter(self, name, with_decryption=False): """Gets a Parameter from Parameter Store (Returns the Value) @@ -78,4 +87,6 @@ def fetch_parameter(self, name, with_decryption=False): ) return response['Parameter']['Value'] except self.client.exceptions.ParameterNotFound as error: - raise ParameterNotFoundError(f'Parameter {name} Not Found') from error + raise ParameterNotFoundError( + f'Parameter {name} Not Found', + ) from error diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pipeline.py index 197b53f02..7441de8b1 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pipeline.py @@ -7,6 +7,8 @@ """ import os +from copy import deepcopy +from list_utils import flatten_to_unique_sorted DEPLOYMENT_ACCOUNT_REGION = os.environ["AWS_REGION"] @@ -14,31 +16,26 @@ class Pipeline: def __init__(self, pipeline): self.name = pipeline.get('name') - self.default_providers = pipeline.get('default_providers', {}) + self.default_providers = self._set_default_provider_defaults( + pipeline.get('default_providers'), + ) self.parameters = pipeline.get('params', {}) - self.input = {} self.template_dictionary = {"targets": []} - self.notification_endpoint = self.parameters.get('notification_endpoint', None) + self.notification_endpoint = self.parameters.get( + 'notification_endpoint', + ) self.stage_regions = [] self.top_level_regions = pipeline.get('regions', []) self.completion_trigger = pipeline.get('completion_trigger', {}) self.tags = pipeline.get('tags', {}) self.schedule = self.parameters.get('schedule', {}) if not isinstance(self.completion_trigger.get('pipelines', []), list): - self.completion_trigger['pipelines'] = [self.completion_trigger['pipelines']] + self.completion_trigger['pipelines'] = [ + self.completion_trigger['pipelines'], + ] if not isinstance(self.top_level_regions, list): self.top_level_regions = [self.top_level_regions] - @staticmethod - def flatten_list(input_list): - result = [] - for item in input_list: - if isinstance(item, list): - result.extend(Pipeline.flatten_list(item)) - else: - result.append(item) - return sorted(result) - def _create_pipelines_folder(self): try: return os.makedirs(f"pipelines/{self.name}") @@ -50,26 +47,87 @@ def _write_output(self, output_template): with open(output_path, mode='w', encoding='utf-8') as file_handler: file_handler.write(output_template) + def get_all_regions(self): + """ + Get all the regions specified for this pipeline. + This includes the regions that are defined at the top level of the + pipeline, being the `$.regions`. As well as the `$.targets.[].regions`. + + Returns: + list(str): The list of regions that this pipeline has configured. + """ + return flatten_to_unique_sorted( + [ + self.top_level_regions or [], + self.stage_regions, + ], + ) + @staticmethod - def _input_type_validation(params): - if not params.get('default_providers', {}).get('build', {}): - params['default_providers']['build'] = {} - params['default_providers']['build']['provider'] = 'codebuild' - if not params.get('default_providers', {}).get('deploy', {}): - params['default_providers']['deploy'] = {} - params['default_providers']['deploy']['provider'] = 'cloudformation' - return params + def _set_default_provider_defaults(default_providers): + providers = default_providers or {} + return { + 'source': { + 'provider': 'codecommit', + **providers.get('source', {}), + }, + 'build': { + 'provider': 'codebuild', + **providers.get('build', {}), + }, + 'deploy': { + 'provider': 'cloudformation', + **providers.get('deploy', {}), + }, + } + + def merge_in_deploy_defaults(self, deploy_target_config): + """ + Pass the step or target deployment configuration here to + get the default configuration applied if the provider or its + properties are not configured. + + Args: + deploy_target_config (dict): The target deployment configuration + dict holding the provider type attribute and its properties. + + Returns: + dict: The updated target deployment configuration, including the + defaults where those were overwritten yet. + """ + new_config = deepcopy(deploy_target_config) + default_deploy = self.default_providers.get('deploy') + if not new_config.get('provider'): + new_config['provider'] = ( + default_deploy.get('provider') + ) + new_config['properties'] = { + **default_deploy.get('properties', {}), + **new_config.get('properties', {}), + } + if new_config.get('regions') is None: + new_config['regions'] = ( + self.top_level_regions + or [DEPLOYMENT_ACCOUNT_REGION] + ) + return new_config def generate_input(self): - self.input = self._input_type_validation({ + """ + Generate the pipeline input data. + + Returns: + dict: The pipeline input data. + """ + pipeline_input = { "environments": self.template_dictionary, "name": self.name, "params": self.parameters, "tags": self.tags, "default_providers": self.default_providers, - "top_level_regions": sorted(self.flatten_list(list(set(self.top_level_regions)))), - "regions": sorted(list(set(self.flatten_list(self.stage_regions)))), + "regions": self.get_all_regions(), "deployment_account_region": DEPLOYMENT_ACCOUNT_REGION, "completion_trigger": self.completion_trigger, - "schedule": self.schedule - }) + "schedule": self.schedule, + } + return pipeline_input diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py index 7cc2d4bc3..36d4e20bf 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/target.py @@ -23,15 +23,23 @@ class TargetStructure: def __init__(self, target): self.target = TargetStructure._define_target_type(target) self.account_list = [] - self.wave = target.get('wave', {}) if isinstance(target, dict) else {} - self.exclude = target.get('exclude', []) if isinstance(target, dict) else [] + self.wave = ( + target.get('wave', {}) + if isinstance(target, dict) + else {} + ) + self.exclude = ( + target.get('exclude', []) + if isinstance(target, dict) + else [] + ) @staticmethod def _define_target_type(target): if isinstance(target, list): output = [] - for t in target: - output.append({"path": [t]}) + for target_path in target: + output.append({"path": [target_path]}) target = output if isinstance(target, (int, str)): target = [{"path": [target]}] @@ -40,7 +48,9 @@ def _define_target_type(target): target["path"] = target.get('target') if not target.get('path') and not target.get('tags'): target["path"] = '/deployment' - LOGGER.debug('No path/target detected, defaulting to /deployment') + LOGGER.debug( + 'No path/target detected, defaulting to /deployment', + ) if not isinstance(target.get('path', []), list): target["path"] = [target.get('path')] if not isinstance(target, list): @@ -49,21 +59,39 @@ def _define_target_type(target): def generate_waves(self): wave_size = self.wave.get('size', 50) - wave = [] + waves = [] length = len(self.account_list) - for index in range(0, length, wave_size): - wave.append(self.account_list[index:min(index + wave_size, length)]) - return wave - + for start_index in range(0, length, wave_size): + end_index = min( + start_index + wave_size, + length, + ) + waves.append( + self.account_list[start_index:end_index], + ) + return waves class Target: - def __init__(self, path, target_structure, organizations, step, regions): + """ + Target deployment configuration. + """ + def __init__( + self, + path, + target_structure, + organizations, + step, + ): self.path = path self.step_name = step.get('name', '') - self.provider = step.get('provider', {}) + self.provider = step.get('provider', 'cloudformation') self.properties = step.get('properties', {}) - self.regions = [regions] if not isinstance(regions, list) else regions + self.regions = ( + [step.get('regions')] + if not isinstance(step.get('regions'), list) + else step.get('regions') + ) self.target_structure = target_structure self.organizations = organizations @@ -91,18 +119,22 @@ def _target_is_approval(self): ) def _create_response_object(self, responses): - _entities = 0 + accounts_found = 0 for response in responses: - _entities += 1 - if Target._account_is_active(response) and not response.get('Id') in self.target_structure.exclude: + is_active_not_excluded = ( + Target._account_is_active(response) + and not response.get('Id') in self.target_structure.exclude + ) + if is_active_not_excluded: + accounts_found += 1 self.target_structure.account_list.append( self._create_target_info( response.get('Name'), str(response.get('Id')) ) ) - if _entities == 0: - raise NoAccountsFoundError(f"No Accounts found in {self.path}") + if accounts_found == 0: + raise NoAccountsFoundError(f"No accounts found in {self.path}") def _target_is_account_id(self): responses = self.organizations.client.describe_account( @@ -115,10 +147,15 @@ def _target_is_tags(self): accounts = [] for response in responses: if response.startswith('ou-'): - accounts.extend(self.organizations.get_accounts_for_parent(response)) + accounts.extend( + self.organizations.get_accounts_for_parent(response), + ) else: - account = self.organizations.client.describe_account(AccountId=response).get('Account') - accounts.append(account) + accounts.append( + self.organizations.client + .describe_account(AccountId=response) + .get('Account'), + ) self._create_response_object(accounts) def _target_is_ou_id(self): @@ -132,7 +169,8 @@ def _target_is_ou_path(self): self._create_response_object(responses) def _target_is_null_path(self): - self.path = '/deployment' # TODO we will fetch this from parameter store + # TODO we need to fetch this default path from parameter store + self.path = '/deployment' responses = self.organizations.dir_to_ou(self.path) self._create_response_object(responses) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_list_utils.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_list_utils.py new file mode 100644 index 000000000..1f942cea5 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_list_utils.py @@ -0,0 +1,29 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file + +from ..list_utils import flatten_to_unique_sorted + + +def test_flatten_to_unique_sorted(): + """ + Flatten and sort the list + """ + result = flatten_to_unique_sorted( + [ + # Nested lists: + ['val9', 'val0', 'val1'], + ['val1', 'val2'], + # Empty list + [], + # Double nested list: + [ + ['val8', 'val2'], + 'val4', + ], + # Single item + 'val3', + ], + ) + assert result == ['val0', 'val1', 'val2', 'val3', 'val4', 'val8', 'val9'] diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py index 31cbd2040..8d99173a5 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py @@ -16,20 +16,16 @@ def cls(): return Pipeline( pipeline={ - "name": "pipeline", - "params": {"key": "value"}, - "targets": [], - "default_providers": { - "source": { - "name": "codecommit", - "properties" : { - "account_id": 111111111111, + "name": "pipeline", + "params": {"key": "value"}, + "targets": [], + "default_providers": { + "source": { + "name": "codecommit", + "properties": { + "account_id": 111111111111, + } } } - } - }) - - -def test_flatten_list(): - assertions = Pipeline.flatten_list([['val0', 'val1'], ['val2']]) - assert assertions == ['val0', 'val1', 'val2'] + }, + ) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_target.py index a8ac765ae..f7490fc8b 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_target.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_target.py @@ -29,10 +29,11 @@ def dir_to_ou(self, path): def cls(): cls = Target( path="/thing/path", - regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=None, - step={}, + step={ + "regions": ["region1", "region2"], + }, ) return cls @@ -45,10 +46,11 @@ def test_account_is_active(): def test_fetch_accounts_for_target_ou_path(): cls = Target( path="/thing/path", - regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=None, - step={}, + step={ + "regions": ["region1", "region2"], + }, ) with patch.object(cls, "_target_is_ou_path") as mock: @@ -59,10 +61,11 @@ def test_fetch_accounts_for_target_ou_path(): def test_fetch_accounts_for_target_account_id(): cls = Target( path="111111111111", - regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=None, - step={}, + step={ + "regions": ["region1", "region2"], + }, ) with patch.object(cls, "_target_is_account_id") as mock: cls.fetch_accounts_for_target() @@ -72,10 +75,11 @@ def test_fetch_accounts_for_target_account_id(): def test_fetch_accounts_for_target_ou_id(): cls = Target( path="ou-123fake", - regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=None, - step={}, + step={ + "regions": ["region1", "region2"], + }, ) with patch.object(cls, "_target_is_ou_id") as mock: cls.fetch_accounts_for_target() @@ -85,10 +89,11 @@ def test_fetch_accounts_for_target_ou_id(): def test_fetch_accounts_for_approval(): cls = Target( path="approval", - regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=None, - step={}, + step={ + "regions": ["region1", "region2"], + }, ) with patch.object(cls, "_target_is_approval") as mock: cls.fetch_accounts_for_target() @@ -98,10 +103,11 @@ def test_fetch_accounts_for_approval(): def test_fetch_account_error(): cls = Target( path="some_string", - regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=Mock(), - step={}, + step={ + "regions": ["region1", "region2"], + }, ) with raises(InvalidDeploymentMapError): cls.fetch_accounts_for_target() @@ -110,10 +116,11 @@ def test_fetch_account_error(): def test_fetch_account_error_invalid_account_id(): cls = Target( path="12345678901", # 11 digits rather than 12 (invalid account id) - regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=Mock(), - step={}, + step={ + "regions": ["region1", "region2"], + }, ) with raises(InvalidDeploymentMapError): cls.fetch_accounts_for_target() @@ -137,8 +144,11 @@ def test_target_structure_respects_wave(): {"Name": "test-account-5", "Id": "5", "Status": "ACTIVE"}, ] ), - step=step, - regions=["region1"], + step={ + **step, + "provider": "codedeploy", + "regions": ["region1"], + } ) target.fetch_accounts_for_target() waves = list(target.target_structure.generate_waves()) @@ -151,7 +161,7 @@ def test_target_structure_respects_wave(): "name": "test-account-1", "path": "/some/random/ou", "properties": {}, - "provider": {}, + "provider": "codedeploy", "regions": ["region1"], "step_name": "", }, @@ -160,7 +170,7 @@ def test_target_structure_respects_wave(): "name": "test-account-2", "path": "/some/random/ou", "properties": {}, - "provider": {}, + "provider": "codedeploy", "regions": ["region1"], "step_name": "", }, @@ -173,7 +183,7 @@ def test_target_structure_respects_wave(): "name": "test-account-3", "path": "/some/random/ou", "properties": {}, - "provider": {}, + "provider": "codedeploy", "regions": ["region1"], "step_name": "", }, @@ -182,7 +192,7 @@ def test_target_structure_respects_wave(): "name": "test-account-4", "path": "/some/random/ou", "properties": {}, - "provider": {}, + "provider": "codedeploy", "regions": ["region1"], "step_name": "", }, @@ -195,7 +205,7 @@ def test_target_structure_respects_wave(): "name": "test-account-5", "path": "/some/random/ou", "properties": {}, - "provider": {}, + "provider": "codedeploy", "regions": ["region1"], "step_name": "", }, @@ -225,8 +235,10 @@ def test_target_wave_structure_respects_exclude_config(): {"Name": "test-account-6", "Id": "6", "Status": "ACTIVE"}, ] ), - step=step, - regions=["region1"], + step={ + **step, + "regions": "region1", + } ) target.fetch_accounts_for_target() waves = list(target.target_structure.generate_waves()) @@ -239,7 +251,7 @@ def test_target_wave_structure_respects_exclude_config(): "name": "test-account-1", "path": "/some/random/ou", "properties": {}, - "provider": {}, + "provider": "cloudformation", "regions": ["region1"], "step_name": "", }, @@ -248,7 +260,7 @@ def test_target_wave_structure_respects_exclude_config(): "name": "test-account-2", "path": "/some/random/ou", "properties": {}, - "provider": {}, + "provider": "cloudformation", "regions": ["region1"], "step_name": "", }, @@ -261,7 +273,7 @@ def test_target_wave_structure_respects_exclude_config(): "name": "test-account-3", "path": "/some/random/ou", "properties": {}, - "provider": {}, + "provider": "cloudformation", "regions": ["region1"], "step_name": "", }, @@ -270,7 +282,7 @@ def test_target_wave_structure_respects_exclude_config(): "name": "test-account-4", "path": "/some/random/ou", "properties": {}, - "provider": {}, + "provider": "cloudformation", "regions": ["region1"], "step_name": "", }, @@ -283,7 +295,7 @@ def test_target_wave_structure_respects_exclude_config(): "name": "test-account-6", "path": "/some/random/ou", "properties": {}, - "provider": {}, + "provider": "cloudformation", "regions": ["region1"], "step_name": "", }, diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_generate_params.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_generate_params.py index ef86bf458..06943d88a 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_generate_params.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_generate_params.py @@ -111,7 +111,7 @@ def cls(): parameter_store = Mock() definition_s3 = Mock() definition_s3.read_object.return_value = json.dumps({ - 'input': { + 'pipeline_input': { 'environments': { 'targets': [], } @@ -149,7 +149,7 @@ def test_retrieve_pipeline_targets_empty(cls): def test_retrieve_pipeline_targets(cls, input_definition_targets): cls.definition_s3.read_object.return_value = json.dumps({ - 'input': { + 'pipeline_input': { 'environments': { 'targets': input_definition_targets, } @@ -283,7 +283,7 @@ def test_merge_params_with_preset(cls): def test_create_parameter_files(cls, input_definition_targets): cls.definition_s3.read_object.return_value = json.dumps({ - 'input': { + 'pipeline_input': { 'environments': { 'targets': input_definition_targets, } @@ -309,7 +309,7 @@ def test_create_parameter_files(cls, input_definition_targets): def test_ensure_parameter_default_contents(cls, input_definition_targets): cls.definition_s3.read_object.return_value = json.dumps({ - 'input': { + 'pipeline_input': { 'environments': { 'targets': input_definition_targets, } @@ -350,7 +350,7 @@ def test_ensure_parameter_overrides( input_wave_target_two ): cls.definition_s3.read_object.return_value = json.dumps({ - 'input': { + 'pipeline_input': { 'environments': { 'targets': [ [