Skip to content

Commit

Permalink
Update to CDK v1.137, pylint v2.12, and others to latest available (#417
Browse files Browse the repository at this point in the history
)

* Update to CDK v1.137, SAM CLI v1.36, and others to latest available

Bumping versions of CDK, SAM CLI, Boto3, and other libraries that we
depend on.

* Update pylint to v2.12 plus refactoring flagged code

The update of pylint returned a list of errors.
These errors have been fixed as part of this commit.

Additionally, refactored code that could use a refreshing haircut.
  • Loading branch information
sbkok authored Jan 7, 2022
1 parent cdfa538 commit 6cabcf7
Show file tree
Hide file tree
Showing 45 changed files with 436 additions and 439 deletions.
16 changes: 8 additions & 8 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
astroid~=2.6.4
botocore==1.21.2
boto3==1.18.2
isort==5.9.2
astroid~=2.9.0
botocore==1.23.26
boto3==1.20.26
isort==5.10.1
mock~=4.0.3
pylint~=2.9.3
pytest~=6.2.4
pylint~=2.12.2
pytest~=6.2.5
pyyaml>=5.4.1
schema~=0.7.4
tox==3.24.0
schema~=0.7.5
tox==3.24.4
2 changes: 1 addition & 1 deletion src/lambda_codebase/account/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def wait_on_account_creation(request_id: str) -> Tuple[AccountId, bool]:
)
if account_status["CreateAccountStatus"]["State"] == "FAILED":
reason = account_status["CreateAccountStatus"]["FailureReason"]
raise Exception("Failed to create account because %s" % reason)
raise Exception(f"Failed to create account because {reason}")
if account_status["CreateAccountStatus"]["State"] == "IN_PROGRESS":
LOGGER.info(
"Account creation still in progress, waiting.. "
Expand Down
8 changes: 4 additions & 4 deletions src/lambda_codebase/account_bootstrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,13 @@ def configure_generic_account(sts, event, region, role):
region,
role
)
kms_arn = parameter_store_deployment_account.fetch_parameter('/cross_region/kms_arn/{0}'.format(region))
bucket_name = parameter_store_deployment_account.fetch_parameter('/cross_region/s3_regional_bucket/{0}'.format(region))
kms_arn = parameter_store_deployment_account.fetch_parameter(f'/cross_region/kms_arn/{region}')
bucket_name = parameter_store_deployment_account.fetch_parameter(f'/cross_region/s3_regional_bucket/{region}')
except (ClientError, ParameterNotFoundError):
raise GenericAccountConfigureError(
'Account {0} cannot yet be bootstrapped '
f'Account {event["account_id"]} cannot yet be bootstrapped '
'as the Deployment Account has not yet been bootstrapped. '
'Have you moved your Deployment account into the deployment OU?'.format(event['account_id'])
'Have you moved your Deployment account into the deployment OU?'
) from None
parameter_store_target_account.put_parameter('kms_arn', kms_arn)
parameter_store_target_account.put_parameter('bucket_name', bucket_name)
Expand Down
4 changes: 2 additions & 2 deletions src/lambda_codebase/event.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@ class Event:
"""
def __init__(self, event, parameter_store, organizations, account_id):
self.parameter_store = parameter_store
self.config = ast.literal_eval('{0}'.format(
self.config = ast.literal_eval(
parameter_store.fetch_parameter(
'config'
)
))
)
self.account_id = account_id
self.organizations = organizations
self.protected_ou_list = self.config.get('protected', [])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -672,7 +672,7 @@ Resources:
nodejs: 12
pre_build:
commands:
- npm install cdk@1.114 -g -y --quiet --no-progress
- npm install cdk@1.137 -g -y --quiet --no-progress
- aws s3 cp s3://$SHARED_MODULES_BUCKET/adf-build/ ./adf-build/ --recursive --quiet
- pip install -r adf-build/requirements.txt -q -t ./adf-build
build:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,11 @@ def lambda_handler(event, _):
s3_buckets = []
for region in list(set([event.get('deployment_account_region')] + event.get("regions", []))):
kms_key_arn = parameter_store.fetch_parameter(
"/cross_region/kms_arn/{0}".format(region)
f"/cross_region/kms_arn/{region}"
)
kms_key_arns.append(kms_key_arn)
s3_bucket = parameter_store.fetch_parameter(
"/cross_region/s3_regional_bucket/{0}".format(region)
f"/cross_region/s3_regional_bucket/{region}"
)
s3_buckets.append(s3_bucket)
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def chunks(list_to_chunk, number_to_chunk_into):

def generate_pull_request_input(event, repo_name):
return {
"title": 'ADF {0} Automated Update PR'.format(event.ResourceProperties.Version),
"title": f'ADF {event.ResourceProperties.Version} Automated Update PR',
"description": PR_DESCRIPTION.format(event.ResourceProperties.Version),
"targets": [
{
Expand All @@ -149,12 +149,13 @@ def generate_pull_request_input(event, repo_name):
}

def generate_commit_input(repo_name, index, branch="master", parent_commit_id=None, puts=None, deletes=None):
commit_action = "Delete" if deletes else "Create"
output = {
"repositoryName": repo_name,
"branchName": branch,
"authorName": "AWS ADF Builders Team",
"email": "[email protected]",
"commitMessage": "Automated Commit - {0} Part {1}".format("Delete" if deletes else "Create", index),
"commitMessage": f"Automated Commit - {commit_action} Part {index}",
"putFiles": puts if puts else [],
"deleteFiles": deletes if deletes else []
}
Expand Down Expand Up @@ -329,6 +330,6 @@ def create_adf_config_file(props: CustomResourceProperties) -> FileToCommit:
.encode()
)

with open("/tmp/adfconfig.yml", "wb") as f:
f.write(adf_config)
with open("/tmp/adfconfig.yml", mode="wb") as file:
file.write(adf_config)
return FileToCommit("adfconfig.yml", FileMode.NORMAL, adf_config)
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
Jinja2~=3.0.1
boto3==1.18.2
Jinja2~=3.0.3
boto3==1.20.26
cfn-custom-resource~=1.0.1
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

from parameter_store import ParameterStore


def extract_pipeline(message):
"""
Try extract the pipeline name from the message (approval/success/failure)
Expand All @@ -22,19 +23,22 @@ def extract_pipeline(message):
try:
name = message.get('approval', {}).get('pipelineName', None) or message.get("detail", {}).get("pipeline", None)
return {
"name": name.split("{0}".format(os.environ.get("ADF_PIPELINE_PREFIX")))[-1],
"name": name.split(str(os.environ.get("ADF_PIPELINE_PREFIX")))[-1],
"state": message.get("detail", {}).get("state"),
"time": message.get("time"),
"account_id": message.get("account")
}
except AttributeError:
return {
"name": message.split("{0}".format(os.environ.get("ADF_PIPELINE_PREFIX")))[-1].split(' from account')[0],
"name": message.split(
str(os.environ.get("ADF_PIPELINE_PREFIX"))
)[-1].split(' from account')[0],
"state": message.split('has ')[-1].split(' at')[0],
"time": message.split('at ')[-1],
"account_id": message.split('account ')[-1].split(' has')[0]
}


def is_approval(message):
"""
Determines if the message sent in was for an approval action
Expand All @@ -43,6 +47,7 @@ def is_approval(message):
return False
return message.get('approval', None)


def is_bootstrap(event):
"""
Determines if the message sent in was for an bootstrap action -
Expand All @@ -58,6 +63,7 @@ def is_bootstrap(event):
except ValueError:
return True


def extract_message(event):
"""
Takes the message out of the incoming event and attempts to load it into JSON
Expand All @@ -70,29 +76,31 @@ def extract_message(event):
except ValueError:
return message


def create_approval(channel, message):
"""
Creates a dict that will be sent to send_message for approvals
"""
return {
"text": ":clock1: Pipeline {0} in {1} requires approval".format(
message["approval"]["pipelineName"],
message["approval"]["customData"]
"text": (
f":clock1: Pipeline {message['approval']['pipelineName']} "
f"in {message['approval']['customData']} requires approval"
),
"channel": channel,
"attachments": [
{
"fallback": "Approve or Deny Deployment at {0}".format(message["consoleLink"]),
"fallback": f"Approve or Deny Deployment at {message['consoleLink']}",
"actions": [
{
"type": "button",
"text": "Approve or Deny Deployment",
"url": "{0}".format(message["consoleLink"])
"url": str(message["consoleLink"])
}
]
}
]
}
}


def create_pipeline_message_text(channel, pipeline):
"""
Expand All @@ -101,14 +109,13 @@ def create_pipeline_message_text(channel, pipeline):
emote = ":red_circle:" if pipeline.get("state") == "FAILED" else ":white_check_mark:"
return {
"channel": channel,
"text": "{0} Pipeline {1} on {2} has {3}".format(
emote,
pipeline["name"],
pipeline["account_id"],
pipeline["state"]
)
"text": (
f"{emote} Pipeline {pipeline['name']} on {pipeline['account_id']} "
f"has {pipeline['state']}"
),
}


def create_bootstrap_message_text(channel, message):
"""
Creates a dict that will be sent to send_message for bootstrapping completion
Expand All @@ -120,9 +127,10 @@ def create_bootstrap_message_text(channel, message):
emote = ":red_circle:" if any(x in message for x in ['error', 'Failed']) else ":white_check_mark:"
return {
"channel": channel,
"text": "{0} {1}".format(emote, message)
"text": f"{emote} {message}"
}


def send_message(url, payload):
"""
Sends the message to the designated slack webhook
Expand All @@ -136,17 +144,21 @@ def send_message(url, payload):
with urllib.request.urlopen(req) as response:
return response.read()


def lambda_handler(event, _):
message = extract_message(event)
pipeline = extract_pipeline(message)
parameter_store = ParameterStore(os.environ["AWS_REGION"], boto3)
secrets_manager = boto3.client('secretsmanager', region_name=os.environ["AWS_REGION"])
channel = parameter_store.fetch_parameter(
name='/notification_endpoint/{0}'.format(pipeline["name"]),
with_decryption=False
name=f'/notification_endpoint/{pipeline["name"]}',
with_decryption=False,
)
# All slack url's must be stored in /adf/slack/channel_name since ADF only has access to the /adf/ prefix by default
url = json.loads(secrets_manager.get_secret_value(SecretId='/adf/slack/{0}'.format(channel))['SecretString'])
# All slack url's must be stored in /adf/slack/channel_name since ADF only
# has access to the /adf/ prefix by default
url = json.loads(secrets_manager.get_secret_value(
SecretId=f'/adf/slack/{channel}'
)['SecretString'])
if is_approval(message):
send_message(url[channel], create_approval(channel, message))
return
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,13 @@ def generate_notify_message(event):
update_status = 1
return {
"update_only": update_status,
"message": "Account {0} has now been bootstrapped into {1}".format(event["account_ids"][0], event["full_path"])
"message": (
f"Account {event['account_ids'][0]} has now been bootstrapped "
f"into {event['full_path']}"
)
}


def lambda_handler(event, _):
"""
Responsible for triggering the aws-deployment-framework-pipelines
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def _load_config_file(self):
"""
Loads the adfconfig.yml file and executes _parse_config
"""
with open(self.config_path) as config:
with open(self.config_path, encoding='utf-8') as config:
self.config_contents = yaml.load(config, Loader=yaml.FullLoader)
self._parse_config()

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0

"""Main entry point for main.py execution which
"""
Main entry point for main.py execution which
is executed from within AWS CodeBuild in the Master Account
"""

Expand Down Expand Up @@ -46,8 +47,7 @@ def is_account_in_invalid_state(ou_id, config):

protected = config.get('protected', [])
if ou_id in protected:
return "Is in a protected Organizational Unit {0}, it will be skipped.".format(
ou_id)
return f"Is in a protected Organizational Unit {ou_id}, it will be skipped."

return False

Expand Down Expand Up @@ -89,11 +89,11 @@ def update_deployment_account_output_parameters(
kms_and_bucket_dict[region]['s3_regional_bucket'] = outputs['s3_regional_bucket']
for key, value in outputs.items():
deployment_account_parameter_store.put_parameter(
"/cross_region/{0}/{1}".format(key, region),
f"/cross_region/{key}/{region}",
value
)
parameter_store.put_parameter(
"/cross_region/{0}/{1}".format(key, region),
f"/cross_region/{key}/{region}",
value
)

Expand Down Expand Up @@ -231,9 +231,12 @@ def worker_thread(
except GenericAccountConfigureError as error:
if 'Unable to fetch parameters' in str(error):
LOGGER.error(
'%s - Failed to update its base stack due to missing parameters (deployment_account_id or kms_arn), '
'ensure this account has been bootstrapped correctly by being moved from the root '
'into an Organizational Unit within AWS Organizations.', account_id)
'%s - Failed to update its base stack due to missing parameters '
'(deployment_account_id or kms_arn), ensure this account has been '
'bootstrapped correctly by being moved from the root into an '
'Organizational Unit within AWS Organizations.',
account_id,
)
raise Exception from error

except GenericAccountConfigureError as generic_account_error:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def __init__(self):
@staticmethod
def _find_all(policy):
_files = list(glob.iglob(
'./adf-bootstrap/**/{0}.json'.format(policy),
f'./adf-bootstrap/**/{policy}.json',
recursive=True,
))
return [f.replace('./adf-bootstrap', '.') for f in _files]
Expand Down Expand Up @@ -193,7 +193,7 @@ def apply(self, organizations, parameter_store, config): # pylint: disable=R091
policy,
organization_mapping[path])
policy_id = organizations.list_policies(
'adf-{0}-{1}'.format(policy, path), _type)
f'adf-{policy}-{path}', _type)
organizations.attach_policy(
policy_id, organization_mapping[path])
parameter_store.put_parameter(policy, str(_policies))
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,16 @@ def read_config_files(folder):
def _read_config_file(filename):
accounts = []
try:
with open(filename, 'r') as stream:
with open(filename, mode='r', encoding='utf-8') as stream:
config = yaml.safe_load(stream)
for account in config.get('accounts', []):
accounts.append(Account.load_from_config(account))
return accounts
except Exception as error:
LOGGER.error(
"Could not process %s due to an error: %s. ",
"Could not process %s due to an error: %s",
filename,
error
error,
)
LOGGER.error(
"Make sure the content of YAML files (.yml) are not empty and "
Expand Down
Loading

0 comments on commit 6cabcf7

Please sign in to comment.