From 953149326721d6513b2ee8f8d43263a8c58d1645 Mon Sep 17 00:00:00 2001 From: Josh Jaffe Date: Mon, 14 Oct 2019 15:31:40 -0400 Subject: [PATCH 1/3] adding codepipeline docker example --- python/codepipeline-docker-build/Base.py | 71 +++++++++++++++++++ python/codepipeline-docker-build/Pipeline.py | 58 +++++++++++++++ python/codepipeline-docker-build/ReadMe.md | 6 ++ python/codepipeline-docker-build/app.py | 26 +++++++ python/codepipeline-docker-build/cdk.json | 3 + .../pipeline_delivery/Dockerfile | 2 + .../docker_build_buildspec.yml | 15 ++++ python/codepipeline-docker-build/push.sh | 11 +++ .../requirements.txt | 49 +++++++++++++ 9 files changed, 241 insertions(+) create mode 100644 python/codepipeline-docker-build/Base.py create mode 100644 python/codepipeline-docker-build/Pipeline.py create mode 100644 python/codepipeline-docker-build/ReadMe.md create mode 100644 python/codepipeline-docker-build/app.py create mode 100644 python/codepipeline-docker-build/cdk.json create mode 100644 python/codepipeline-docker-build/pipeline_delivery/Dockerfile create mode 100644 python/codepipeline-docker-build/pipeline_delivery/docker_build_buildspec.yml create mode 100755 python/codepipeline-docker-build/push.sh create mode 100644 python/codepipeline-docker-build/requirements.txt diff --git a/python/codepipeline-docker-build/Base.py b/python/codepipeline-docker-build/Base.py new file mode 100644 index 000000000..3cd9aface --- /dev/null +++ b/python/codepipeline-docker-build/Base.py @@ -0,0 +1,71 @@ +from aws_cdk import ( + aws_s3 as aws_s3, + aws_ecr, + aws_codebuild, + aws_ssm, + core, +) + + +class Base(core.Stack): + def __init__(self, app: core.App, id: str, props, **kwargs) -> None: + super().__init__(app, id, **kwargs) + + # pipeline requires versioned bucket + bucket = aws_s3.Bucket( + self, "SourceBucket", + bucket_name=f"{props.namespace.lower()}-{core.Aws.ACCOUNT_ID}", + versioned=True, + removal_policy=core.RemovalPolicy.DESTROY) + # ssm parameter to get bucket name laster + bucket_param = aws_ssm.StringParameter( + self, "ParameterB", + parameter_name=f"{props.namespace}-bucket", + string_value=bucket.bucket_name, + description='cdk pipeline bucket' + ) + # ecr repo to push docker container into + ecr = aws_ecr.Repository( + self, "ECR", + repository_name=f"{props.namespace}", + removal_policy=core.RemovalPolicy.DESTROY + ) + + # codebuild project meant to run in pipeline + cb_docker_build = aws_codebuild.PipelineProject( + self, "DockerBuild", + project_name=f"{props.namespace}-Docker-Build", + build_spec=aws_codebuild.BuildSpec.from_source_filename( + filename='pipeline_delivery/docker_build_buildspec.yml'), + environment=aws_codebuild.BuildEnvironment( + privileged=True, + + ), + # pass the ecr repo uri into the codebuild project so codebuild knows where to push + environment_variables={ + 'ecr': aws_codebuild.BuildEnvironmentVariable( + value=ecr.repository_uri), + 'tag': aws_codebuild.BuildEnvironmentVariable( + value='cdk') + }, + description='Pipeline for CodeBuild', + timeout=core.Duration.minutes(60), + ) + # codebuild iam permissions to read write s3 + bucket.grant_read_write(cb_docker_build) + + # codebuild permissions to interact with ecr + ecr.grant_pull_push(cb_docker_build) + + # update props to pass objects to another stack + props.bucket_name = bucket.bucket_name + props.bucket_arn = bucket.bucket_arn + props.bucket_obj = bucket + props.cb_docker_build = cb_docker_build + self.output_props = props + + # pass objects to another stack + @property + def outputs(self): + props = self.output_props + return props diff --git a/python/codepipeline-docker-build/Pipeline.py b/python/codepipeline-docker-build/Pipeline.py new file mode 100644 index 000000000..30e8dfdf9 --- /dev/null +++ b/python/codepipeline-docker-build/Pipeline.py @@ -0,0 +1,58 @@ +from aws_cdk import ( + + aws_codepipeline, + aws_codepipeline_actions, + aws_ssm, + core, +) + + +class Pipeline(core.Stack): + def __init__(self, app: core.App, id: str, props, **kwargs) -> None: + super().__init__(app, id, **kwargs) + # define the s3 artifact + source_output = aws_codepipeline.Artifact(artifact_name='source') + + # define the pipeline + pipeline = aws_codepipeline.Pipeline( + self, "Pipeline", + pipeline_name=f"{props.namespace}", + artifact_bucket=props.bucket_obj, + stages=[ + aws_codepipeline.StageProps( + stage_name='Source', + actions=[ + aws_codepipeline_actions.S3SourceAction( + + bucket=props.bucket_obj, + bucket_key='source.zip', + action_name='S3Source', + run_order=1, + output=source_output, + + ), + ] + ), + aws_codepipeline.StageProps( + + stage_name='Build', + actions=[aws_codepipeline_actions.CodeBuildAction( + action_name='DockerBuildImages', + # role=codepipeline_role, + input=source_output, + project=props.cb_docker_build, + run_order=1, + + ) + ] + ) + ] + + ) + # pipeline param to get the + pipeline_param = aws_ssm.StringParameter( + self, "ParameterP", + parameter_name=f"{props.namespace}-pipeline", + string_value=pipeline.pipeline_name, + description='cdk pipeline bucket' + ) diff --git a/python/codepipeline-docker-build/ReadMe.md b/python/codepipeline-docker-build/ReadMe.md new file mode 100644 index 000000000..e29fc1d82 --- /dev/null +++ b/python/codepipeline-docker-build/ReadMe.md @@ -0,0 +1,6 @@ +# CDK Python Codepipeline Example +* This is an example of a CodePipeline project that uses CodeBuild to Build a Docker Image and push to ECR. +* This example uses multiple stacks for the purpose of demonstrating ways of passing in objects from different stacks +* push.sh will trigger the pipeline via an S3 Upload. +* Parameter Store is used to store the value of the Pipeline and S3 Bucket so it can be retrieved later in push.sh. +* Parameter Store can be replaced with CloudFormation Outputs or Exports \ No newline at end of file diff --git a/python/codepipeline-docker-build/app.py b/python/codepipeline-docker-build/app.py new file mode 100644 index 000000000..cdfa03331 --- /dev/null +++ b/python/codepipeline-docker-build/app.py @@ -0,0 +1,26 @@ +from aws_cdk import ( + core, +) + +from Base import Base +from Pipeline import Pipeline + + +# using props to pass in objects between stacks +class Props(): + def __init__(self): + self.namespace = 'cdk-example-pipeline' + self.region = 'us-east-1' + + +props = Props() +app = core.App() + +# stack for ecr, bucket, codebuild +base = Base(app, f"{props.namespace}-base", props, ) +props = base.outputs + +# pipeline stack +pipeline = Pipeline(app, f"{props.namespace}-pipeline", props) +pipeline.add_dependency(base) +app.synth() diff --git a/python/codepipeline-docker-build/cdk.json b/python/codepipeline-docker-build/cdk.json new file mode 100644 index 000000000..787a71dd6 --- /dev/null +++ b/python/codepipeline-docker-build/cdk.json @@ -0,0 +1,3 @@ +{ + "app": "python3 app.py" +} diff --git a/python/codepipeline-docker-build/pipeline_delivery/Dockerfile b/python/codepipeline-docker-build/pipeline_delivery/Dockerfile new file mode 100644 index 000000000..ed5fa7ae0 --- /dev/null +++ b/python/codepipeline-docker-build/pipeline_delivery/Dockerfile @@ -0,0 +1,2 @@ +FROM python:3.7.2-alpine +RUN pip install awscli diff --git a/python/codepipeline-docker-build/pipeline_delivery/docker_build_buildspec.yml b/python/codepipeline-docker-build/pipeline_delivery/docker_build_buildspec.yml new file mode 100644 index 000000000..cc466bdc2 --- /dev/null +++ b/python/codepipeline-docker-build/pipeline_delivery/docker_build_buildspec.yml @@ -0,0 +1,15 @@ +version: 0.2 + +phases: + pre_build: + commands: + - echo logging into docker + - $(aws ecr get-login --no-include-email --region $AWS_DEFAULT_REGION) + build: + commands: + - echo Entered the post_build phase... + - echo Build completed on `date` + - docker build -t ${tag}:latest pipeline_delivery/ + - docker tag $tag:latest $ecr:$tag + - docker push $ecr + diff --git a/python/codepipeline-docker-build/push.sh b/python/codepipeline-docker-build/push.sh new file mode 100755 index 000000000..789e2f729 --- /dev/null +++ b/python/codepipeline-docker-build/push.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + + +export account_id=$(aws sts get-caller-identity | jq -r .Account) +export source_bucket=$(aws ssm get-parameter --name 'cdk-example-pipeline-bucket' | jq -r .Parameter.Value) +export pipeline_name=$(aws ssm get-parameter --name 'cdk-example-pipeline-pipeline' | jq -r .Parameter.Value) +export REGION='us-east-1' + +zip -r source.zip . +aws s3 cp source.zip s3://${source_bucket}/source.zip +aws codepipeline start-pipeline-execution --name ${pipeline_name} diff --git a/python/codepipeline-docker-build/requirements.txt b/python/codepipeline-docker-build/requirements.txt new file mode 100644 index 000000000..b6b3f0131 --- /dev/null +++ b/python/codepipeline-docker-build/requirements.txt @@ -0,0 +1,49 @@ +attrs==19.2.0 +aws-cdk.assets==1.12.0 +aws-cdk.aws-apigateway==1.12.0 +aws-cdk.aws-applicationautoscaling==1.12.0 +aws-cdk.aws-autoscaling==1.12.0 +aws-cdk.aws-autoscaling-common==1.12.0 +aws-cdk.aws-autoscaling-hooktargets==1.12.0 +aws-cdk.aws-certificatemanager==1.12.0 +aws-cdk.aws-cloudformation==1.12.0 +aws-cdk.aws-cloudfront==1.12.0 +aws-cdk.aws-cloudwatch==1.12.0 +aws-cdk.aws-codebuild==1.12.0 +aws-cdk.aws-codecommit==1.12.0 +aws-cdk.aws-codedeploy==1.12.0 +aws-cdk.aws-codepipeline==1.12.0 +aws-cdk.aws-codepipeline-actions==1.12.0 +aws-cdk.aws-ec2==1.12.0 +aws-cdk.aws-ecr==1.12.0 +aws-cdk.aws-ecr-assets==1.12.0 +aws-cdk.aws-ecs==1.12.0 +aws-cdk.aws-elasticloadbalancing==1.12.0 +aws-cdk.aws-elasticloadbalancingv2==1.12.0 +aws-cdk.aws-events==1.12.0 +aws-cdk.aws-events-targets==1.12.0 +aws-cdk.aws-iam==1.12.0 +aws-cdk.aws-kms==1.12.0 +aws-cdk.aws-lambda==1.12.0 +aws-cdk.aws-logs==1.12.0 +aws-cdk.aws-route53==1.12.0 +aws-cdk.aws-route53-targets==1.12.0 +aws-cdk.aws-s3==1.12.0 +aws-cdk.aws-s3-assets==1.12.0 +aws-cdk.aws-secretsmanager==1.12.0 +aws-cdk.aws-servicediscovery==1.12.0 +aws-cdk.aws-sns==1.12.0 +aws-cdk.aws-sns-subscriptions==1.12.0 +aws-cdk.aws-sqs==1.12.0 +aws-cdk.aws-ssm==1.12.0 +aws-cdk.aws-stepfunctions==1.12.0 +aws-cdk.core==1.12.0 +aws-cdk.cx-api==1.12.0 +aws-cdk.region-info==1.12.0 +cattrs==0.9.0 +importlib-resources==1.0.2 +jsii==0.18.0 +publication==0.0.3 +python-dateutil==2.8.0 +six==1.12.0 +typing-extensions==3.7.4 From 4ed4d9305136dbb7b2081c2343d86b01b471ad8c Mon Sep 17 00:00:00 2001 From: Josh Jaffe Date: Wed, 16 Oct 2019 19:03:15 -0400 Subject: [PATCH 2/3] pushing fixes, response to code review --- python/codepipeline-docker-build/Base.py | 38 ++++++----- python/codepipeline-docker-build/Pipeline.py | 37 ++++++----- python/codepipeline-docker-build/ReadMe.md | 2 +- python/codepipeline-docker-build/app.py | 15 +---- python/codepipeline-docker-build/push.sh | 2 +- .../requirements.txt | 64 +++++-------------- 6 files changed, 63 insertions(+), 95 deletions(-) diff --git a/python/codepipeline-docker-build/Base.py b/python/codepipeline-docker-build/Base.py index 3cd9aface..6afae2173 100644 --- a/python/codepipeline-docker-build/Base.py +++ b/python/codepipeline-docker-build/Base.py @@ -6,7 +6,6 @@ core, ) - class Base(core.Stack): def __init__(self, app: core.App, id: str, props, **kwargs) -> None: super().__init__(app, id, **kwargs) @@ -14,27 +13,26 @@ def __init__(self, app: core.App, id: str, props, **kwargs) -> None: # pipeline requires versioned bucket bucket = aws_s3.Bucket( self, "SourceBucket", - bucket_name=f"{props.namespace.lower()}-{core.Aws.ACCOUNT_ID}", + bucket_name=f"{props['namespace'].lower()}-{core.Aws.ACCOUNT_ID}", versioned=True, removal_policy=core.RemovalPolicy.DESTROY) - # ssm parameter to get bucket name laster + # ssm parameter to get bucket name later bucket_param = aws_ssm.StringParameter( self, "ParameterB", - parameter_name=f"{props.namespace}-bucket", + parameter_name=f"{props['namespace']}-bucket", string_value=bucket.bucket_name, description='cdk pipeline bucket' ) # ecr repo to push docker container into ecr = aws_ecr.Repository( self, "ECR", - repository_name=f"{props.namespace}", + repository_name=f"{props['namespace']}", removal_policy=core.RemovalPolicy.DESTROY ) - # codebuild project meant to run in pipeline cb_docker_build = aws_codebuild.PipelineProject( self, "DockerBuild", - project_name=f"{props.namespace}-Docker-Build", + project_name=f"{props['namespace']}-Docker-Build", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='pipeline_delivery/docker_build_buildspec.yml'), environment=aws_codebuild.BuildEnvironment( @@ -57,15 +55,25 @@ def __init__(self, app: core.App, id: str, props, **kwargs) -> None: # codebuild permissions to interact with ecr ecr.grant_pull_push(cb_docker_build) - # update props to pass objects to another stack - props.bucket_name = bucket.bucket_name - props.bucket_arn = bucket.bucket_arn - props.bucket_obj = bucket - props.cb_docker_build = cb_docker_build - self.output_props = props + core.CfnOutput( + self, "ECRURI", + description="ECR URI", + value=ecr.repository_uri, + ) + core.CfnOutput( + self, "S3Bucket", + description="S3 Bucket", + value=bucket.bucket_name + ) + + self.output_props = props.copy() + + self.output_props['bucket']= bucket + self.output_props['cb_docker_build'] = cb_docker_build + + # pass objects to another stack @property def outputs(self): - props = self.output_props - return props + return self.output_props diff --git a/python/codepipeline-docker-build/Pipeline.py b/python/codepipeline-docker-build/Pipeline.py index 30e8dfdf9..c79a172b2 100644 --- a/python/codepipeline-docker-build/Pipeline.py +++ b/python/codepipeline-docker-build/Pipeline.py @@ -1,5 +1,4 @@ from aws_cdk import ( - aws_codepipeline, aws_codepipeline_actions, aws_ssm, @@ -12,47 +11,51 @@ def __init__(self, app: core.App, id: str, props, **kwargs) -> None: super().__init__(app, id, **kwargs) # define the s3 artifact source_output = aws_codepipeline.Artifact(artifact_name='source') - # define the pipeline pipeline = aws_codepipeline.Pipeline( self, "Pipeline", - pipeline_name=f"{props.namespace}", - artifact_bucket=props.bucket_obj, + pipeline_name=f"{props['namespace']}", + artifact_bucket=props['bucket'], stages=[ aws_codepipeline.StageProps( stage_name='Source', actions=[ aws_codepipeline_actions.S3SourceAction( - - bucket=props.bucket_obj, + bucket=props['bucket'], bucket_key='source.zip', action_name='S3Source', run_order=1, output=source_output, - + trigger=aws_codepipeline_actions.S3Trigger.POLL ), ] ), aws_codepipeline.StageProps( - stage_name='Build', - actions=[aws_codepipeline_actions.CodeBuildAction( - action_name='DockerBuildImages', - # role=codepipeline_role, - input=source_output, - project=props.cb_docker_build, - run_order=1, - - ) + actions=[ + aws_codepipeline_actions.CodeBuildAction( + action_name='DockerBuildImages', + input=source_output, + project=props['cb_docker_build'], + run_order=1, + ) ] ) ] ) + print(props['bucket']) + props['bucket'].grant_read_write(pipeline.role) + # pipeline param to get the pipeline_param = aws_ssm.StringParameter( self, "ParameterP", - parameter_name=f"{props.namespace}-pipeline", + parameter_name=f"{props['namespace']}-pipeline", string_value=pipeline.pipeline_name, description='cdk pipeline bucket' ) + core.CfnOutput( + self, "PipelineOut", + description="Pipeline", + value=pipeline.pipeline_name + ) \ No newline at end of file diff --git a/python/codepipeline-docker-build/ReadMe.md b/python/codepipeline-docker-build/ReadMe.md index e29fc1d82..27b2d6782 100644 --- a/python/codepipeline-docker-build/ReadMe.md +++ b/python/codepipeline-docker-build/ReadMe.md @@ -1,4 +1,4 @@ -# CDK Python Codepipeline Example +# CDK Python CodePipeline Example * This is an example of a CodePipeline project that uses CodeBuild to Build a Docker Image and push to ECR. * This example uses multiple stacks for the purpose of demonstrating ways of passing in objects from different stacks * push.sh will trigger the pipeline via an S3 Upload. diff --git a/python/codepipeline-docker-build/app.py b/python/codepipeline-docker-build/app.py index cdfa03331..2508e81a4 100644 --- a/python/codepipeline-docker-build/app.py +++ b/python/codepipeline-docker-build/app.py @@ -5,22 +5,13 @@ from Base import Base from Pipeline import Pipeline - -# using props to pass in objects between stacks -class Props(): - def __init__(self): - self.namespace = 'cdk-example-pipeline' - self.region = 'us-east-1' - - -props = Props() +props = {'namespace': 'cdk-example-pipeline'} app = core.App() # stack for ecr, bucket, codebuild -base = Base(app, f"{props.namespace}-base", props, ) -props = base.outputs +base = Base(app, f"{props['namespace']}-base", props) # pipeline stack -pipeline = Pipeline(app, f"{props.namespace}-pipeline", props) +pipeline = Pipeline(app, f"{props['namespace']}-pipeline", base.outputs) pipeline.add_dependency(base) app.synth() diff --git a/python/codepipeline-docker-build/push.sh b/python/codepipeline-docker-build/push.sh index 789e2f729..f9e1bd1a9 100755 --- a/python/codepipeline-docker-build/push.sh +++ b/python/codepipeline-docker-build/push.sh @@ -8,4 +8,4 @@ export REGION='us-east-1' zip -r source.zip . aws s3 cp source.zip s3://${source_bucket}/source.zip -aws codepipeline start-pipeline-execution --name ${pipeline_name} +#aws codepipeline start-pipeline-execution --name ${pipeline_name} diff --git a/python/codepipeline-docker-build/requirements.txt b/python/codepipeline-docker-build/requirements.txt index b6b3f0131..6b992d117 100644 --- a/python/codepipeline-docker-build/requirements.txt +++ b/python/codepipeline-docker-build/requirements.txt @@ -1,49 +1,15 @@ -attrs==19.2.0 -aws-cdk.assets==1.12.0 -aws-cdk.aws-apigateway==1.12.0 -aws-cdk.aws-applicationautoscaling==1.12.0 -aws-cdk.aws-autoscaling==1.12.0 -aws-cdk.aws-autoscaling-common==1.12.0 -aws-cdk.aws-autoscaling-hooktargets==1.12.0 -aws-cdk.aws-certificatemanager==1.12.0 -aws-cdk.aws-cloudformation==1.12.0 -aws-cdk.aws-cloudfront==1.12.0 -aws-cdk.aws-cloudwatch==1.12.0 -aws-cdk.aws-codebuild==1.12.0 -aws-cdk.aws-codecommit==1.12.0 -aws-cdk.aws-codedeploy==1.12.0 -aws-cdk.aws-codepipeline==1.12.0 -aws-cdk.aws-codepipeline-actions==1.12.0 -aws-cdk.aws-ec2==1.12.0 -aws-cdk.aws-ecr==1.12.0 -aws-cdk.aws-ecr-assets==1.12.0 -aws-cdk.aws-ecs==1.12.0 -aws-cdk.aws-elasticloadbalancing==1.12.0 -aws-cdk.aws-elasticloadbalancingv2==1.12.0 -aws-cdk.aws-events==1.12.0 -aws-cdk.aws-events-targets==1.12.0 -aws-cdk.aws-iam==1.12.0 -aws-cdk.aws-kms==1.12.0 -aws-cdk.aws-lambda==1.12.0 -aws-cdk.aws-logs==1.12.0 -aws-cdk.aws-route53==1.12.0 -aws-cdk.aws-route53-targets==1.12.0 -aws-cdk.aws-s3==1.12.0 -aws-cdk.aws-s3-assets==1.12.0 -aws-cdk.aws-secretsmanager==1.12.0 -aws-cdk.aws-servicediscovery==1.12.0 -aws-cdk.aws-sns==1.12.0 -aws-cdk.aws-sns-subscriptions==1.12.0 -aws-cdk.aws-sqs==1.12.0 -aws-cdk.aws-ssm==1.12.0 -aws-cdk.aws-stepfunctions==1.12.0 -aws-cdk.core==1.12.0 -aws-cdk.cx-api==1.12.0 -aws-cdk.region-info==1.12.0 -cattrs==0.9.0 -importlib-resources==1.0.2 -jsii==0.18.0 -publication==0.0.3 -python-dateutil==2.8.0 -six==1.12.0 -typing-extensions==3.7.4 +aws-cdk.aws-cloudformation +aws-cdk.aws-codepipeline +aws-cdk.aws-codepipeline-actions +aws-cdk.aws-ecr +aws-cdk.aws-ecr-assets +aws-cdk.aws-s3 +aws-cdk.aws-s3-assets + +aws-cdk.aws-sqs +aws-cdk.aws-ssm + +aws-cdk.core +aws-cdk.cx-api +aws-cdk.region-info + From 75a056fa7cf74a74427b54f4cd5d89b1bd7062c8 Mon Sep 17 00:00:00 2001 From: Josh Jaffe Date: Sun, 20 Oct 2019 15:50:37 -0400 Subject: [PATCH 3/3] switching from class to dict, final fixes --- python/codepipeline-docker-build/Base.py | 4 ---- python/codepipeline-docker-build/Pipeline.py | 7 ++++--- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/python/codepipeline-docker-build/Base.py b/python/codepipeline-docker-build/Base.py index 6afae2173..b539c1593 100644 --- a/python/codepipeline-docker-build/Base.py +++ b/python/codepipeline-docker-build/Base.py @@ -37,7 +37,6 @@ def __init__(self, app: core.App, id: str, props, **kwargs) -> None: filename='pipeline_delivery/docker_build_buildspec.yml'), environment=aws_codebuild.BuildEnvironment( privileged=True, - ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ @@ -67,12 +66,9 @@ def __init__(self, app: core.App, id: str, props, **kwargs) -> None: ) self.output_props = props.copy() - self.output_props['bucket']= bucket self.output_props['cb_docker_build'] = cb_docker_build - - # pass objects to another stack @property def outputs(self): diff --git a/python/codepipeline-docker-build/Pipeline.py b/python/codepipeline-docker-build/Pipeline.py index c79a172b2..016d9f944 100644 --- a/python/codepipeline-docker-build/Pipeline.py +++ b/python/codepipeline-docker-build/Pipeline.py @@ -44,16 +44,17 @@ def __init__(self, app: core.App, id: str, props, **kwargs) -> None: ] ) - print(props['bucket']) + # give pipelinerole read write to the bucket props['bucket'].grant_read_write(pipeline.role) - # pipeline param to get the + #pipeline param to get the pipeline_param = aws_ssm.StringParameter( - self, "ParameterP", + self, "PPipeline", parameter_name=f"{props['namespace']}-pipeline", string_value=pipeline.pipeline_name, description='cdk pipeline bucket' ) + # cfn output core.CfnOutput( self, "PipelineOut", description="Pipeline",