Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DO NOT APPROVE] New deploys #670

Open
wants to merge 18 commits into
base: temporal
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions .idea/codeStyles/Project.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 17 additions & 1 deletion api/controller/aws/controllers.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,7 @@ class InfraTearDown(BaseHandler):
@authenticated
@gen.coroutine
def post(self):

teardown_nodes = self.json["teardown_nodes"]

credentials = self.get_authenticated_user_cloud_configuration()
Expand Down Expand Up @@ -367,6 +368,8 @@ def rollback_deployment(self, deployment_diagram, credentials, exceptions, code=
def do_diagram_deployment(self, project_name, project_id, diagram_data, project_config, force_redeploy):
credentials = self.get_authenticated_user_cloud_configuration()

org_id = self.get_authenticated_user_org().id

latest_deployment = self.dbsession.query(Deployment).filter_by(
project_id=project_id
).order_by(
Expand All @@ -377,6 +380,12 @@ def do_diagram_deployment(self, project_name, project_id, diagram_data, project_
if not force_redeploy and latest_deployment is not None:
latest_deployment_json = json.loads(latest_deployment.deployment_json)

# Kill the current session because deployment can take a very long time.
# A new session will be automatically opened when the session is grabbed again.
self.dbsession.close()

self._dbsession = None

# Model a deployment in memory to handle the deployment of each state
deployment_diagram: AwsDeployment = AwsDeployment(
project_id,
Expand Down Expand Up @@ -440,8 +449,15 @@ def do_diagram_deployment(self, project_name, project_id, diagram_data, project_

raise gen.Return()

<<<<<<< HEAD
serialized_deployment = deployment_diagram.serialize()

new_deployment = Deployment()
new_deployment.organization_id = org_id
=======
new_deployment = Deployment(id=deployment_diagram.deployment_id)
new_deployment.organization_id = org.id
>>>>>>> temporal
new_deployment.project_id = project_id
new_deployment.deployment_json = json.dumps(
serialized_deployment
Expand All @@ -452,7 +468,7 @@ def do_diagram_deployment(self, project_name, project_id, diagram_data, project_
)

deployment_log = DeploymentLog()
deployment_log.org_id = org.id
deployment_log.org_id = org_id

self.dbsession.add(deployment_log)
self.dbsession.commit()
Expand Down
2 changes: 1 addition & 1 deletion api/controller/services/controllers.py
Original file line number Diff line number Diff line change
Expand Up @@ -674,7 +674,7 @@ def get(self, account_id=None):
# Convert terraform plan terminal output to HTML
ansiconverter = Ansi2HTMLConverter()
terraform_output_html = ansiconverter.convert(
terraform_plan_output
terraform_plan_output.decode("utf-8")
)

rendered_html_output = response_html_template.format(
Expand Down
42 changes: 42 additions & 0 deletions api/data_types/lambda_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
from functools import cached_property
from hashlib import sha256
from json import dumps


class LambdaConfig:
def __init__(self, name, runtime, code, libraries, env, shared_files, role, memory, handler, is_inline_execution, max_execution_time, tags, layers):
self.name = name
self.runtime = runtime
self.code = code
self.libraries = libraries
self.env = env
self.shared_files = shared_files
self.role = role
self.handler = handler
self.is_inline_execution = is_inline_execution
self.max_execution_time = int(max_execution_time)
self.memory = int(memory)
self.tags = tags
self.layers = layers
self.description = "A Lambda deployed by refinery"

@cached_property
def uid(self):
attrs = [
self.name,
self.runtime,
self.code,
dumps(self.libraries, sort_keys=True),
dumps(self.env, sort_keys=True),
dumps(self.shared_files, sort_keys=True),
self.role,
self.handler,
self.is_inline_execution,
self.max_execution_time,
self.memory,
self.tags,
self.layers,
self.description
]
template = '{}' * len(attrs)
return sha256(template.formate(attrs).encode("UTF-8")).hexdigest()
23 changes: 23 additions & 0 deletions api/resources/aws_api_endpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
from data_types.aws_api_gateway_config import AwsApiGatewayConfig
from resources.base import Resource
from tasks.api_gateway import create_resource


class AWSAPIEndpoint(Resource):
def __init__(self, credentials, config: AwsApiGatewayConfig):
self.credentials = credentials
self.config = config

def deploy(self):
return create_resource(
self.credentials,
self.config.gateway_id,
self.config.parent_id,
self.config.path_part
)

def teardown(self):
pass

def uid(self):
return self.config.uid
148 changes: 148 additions & 0 deletions api/resources/aws_lambda.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
from assistants.decorators import aws_exponential_backoff
from botocore.exceptions import ClientError
from data_types.lambda_config import LambdaConfig
from functools import cached_property
from hashlib import sha256
from json import dumps
from resources.base import Resource
from tasks.build.temporal.python import Python36Builder
from tasks.build.temporal.nodejs import NodeJs12Builder
from tasks.s3 import s3_object_exists
from utils.shared_files import (
add_shared_files_symlink_to_zip, add_shared_files_to_zip
)
from utils.wrapped_aws_functions import lambda_delete_function


BUILDERS = [
Python36Builder,
NodeJs12Builder
]
RUNTIME_TO_BUILDER = {b.RUNTIME_PRETTY_NAME: b for b in BUILDERS}


class AWSLambda(Resource):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I like that all modifications of self are done in the properties here, and only really as a performance optimization. That's nice.

def __init__(self, app_config, aws_client_factory, credentials, lambda_config: LambdaConfig):
self.app_config = app_config
self.aws_client_factory = aws_client_factory
self.credentials = credentials
self.lambda_config = lambda_config

@property
def builder(self):
return RUNTIME_TO_BUILDER[self.lambda_config.runtime]

@cached_property
def s3_path(self):
uid_input = bytes("{}{}{}{}".format(
self.lambda_config.runtime,
self.lambda_config.code,
dumps(self.lambda_config.shared_files, sort_keys=True),
sorted(self.lambda_config.libraries),
), encoding='UTF-8')

return sha256(uid_input).hexdigest() + ".zip"

@property
def exists_in_s3(self):
return s3_object_exists(
self.aws_client_factory,
self.credentials,
self.credentials["lambda_packages_bucket"],
self.s3_path
)

@cached_property
def s3_client(self):
return self.aws_client_factory.get_aws_client(
"s3",
self.credentials
)

@cached_property
def lambda_client(self):
return self.aws_client_factory.get_aws_client(
"lambda",
self.credentials
)

@property
def uid(self):
return self.lambda_config.uid

def deploy(self):
zip_data = self.get_zip_data()
self.upload_to_s3(zip_data)

return self.deploy_lambda()

def get_zip_data(self):
builder = self.builder(
self.app_config,
self.aws_client_factory,
self.credentials,
self.code,
self.libraries
)
zip_data = builder.build()

return self.apply_shared_files(zip_data)

def upload_to_s3(self, zip_data):
if self.exists_in_s3:
return

# Write it the cache
self.s3_client.put_object(
Key=self.path,
Bucket=self.credentials["lambda_packages_bucket"],
Body=zip_data,
)

def apply_shared_files(self, zip_data):
if self.lambda_config.is_inline_execution:
return add_shared_files_symlink_to_zip(zip_data)
else:
# If it's an inline execution we don't add the shared files
# folder because we'll be live injecting them into /tmp/
# Add shared files to Lambda package as well.
return add_shared_files_to_zip(zip_data, self.lambda_config.shared_files)

@aws_exponential_backoff(allowed_errors=["ResourceConflictException"])
def deploy_lambda(self):
try:
return self.create_lambda_function()
except ClientError as e:
if e.response["Error"]["Code"] == "ResourceConflictException":
# Delete the existing lambda
lambda_delete_function(self.lambda_client, self.lambda_config.name)

raise

def create_lambda_function(self):
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/lambda.html#Lambda.Client.create_function
response = self.lambda_client.create_function(
FunctionName=self.lambda_config.name,
Runtime=self.lambda_config.runtime,
Role=self.lambda_config.role,
Handler=self.lambda_config.handler,
Code={
"S3Bucket": self.credentials["lambda_packages_bucket"],
"S3Key": self.s3_path,
},
Description=self.lambda_config.description,
Timeout=self.lambda_config.max_execution_time,
MemorySize=self.lambda_config.memory,
Publish=True,
VpcConfig={},
Environment={
"Variables": self.lambda_config.env.copy()
},
Tags=self.lambda_config.tags,
Layers=self.lambda_config.layers
)

return response['FunctionArn']

def teardown(self):
lambda_delete_function(self.lambda_client, self.lambda_config.name)
5 changes: 5 additions & 0 deletions api/resources/aws_queue.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from resources.base import Resource


class AWSQueue(Resource):
pass
5 changes: 5 additions & 0 deletions api/resources/aws_timer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from resources.base import Resource


class AWSTimer(Resource):
pass
15 changes: 15 additions & 0 deletions api/resources/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from abc import ABC, abstractmethod

class Resource(ABC):
@abstractmethod
def deploy(self):
pass

@abstractmethod
def teardown(self):
pass

@property
@abstractmethod
def uid(self):
pass
Empty file.
6 changes: 5 additions & 1 deletion api/tasks/aws_lambda.py
Original file line number Diff line number Diff line change
Expand Up @@ -756,8 +756,13 @@ def clean_lambda_iam_policies(aws_client_factory, credentials, lambda_name):
for statement in existing_lambda_statements:
# Try to extract API gateway
try:
# example source_arn: "arn:aws:execute-api:us-west-2:944870815613:i46abzd8cg/*/POST/L2BSF/AcceptanceConsent/SignRequestWebhook"
source_arn = statement["Condition"]["ArnLike"]["AWS:SourceArn"]
arn_parts = source_arn.split(":")
arn_name = arn_parts[5]

arn_name_parts = arn_name.split("/")
api_gateway_id = arn_name_parts[0]
except BaseException:
continue

Expand All @@ -766,7 +771,6 @@ def clean_lambda_iam_policies(aws_client_factory, credentials, lambda_name):
continue

try:
api_gateway_id = arn_parts[5]
api_gateway_data = api_gateway_get_rest_api(
api_gateway_client,
rest_api_id=api_gateway_id
Expand Down
15 changes: 15 additions & 0 deletions api/utils/ipc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from subprocess import Popen, PIPE


def popen_communicate(cmd, cwd=None):
process_handler = Popen(
cmd,
stdout=PIPE,
stderr=PIPE,
shell=False,
universal_newlines=True,
cwd=cwd,
)

# Returns tuple (stdout, stderr)
return process_handler.communicate()
6 changes: 4 additions & 2 deletions api/utils/ngrok.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,16 @@ def start_ngrok_tunnel(self, port):
shell=False,
universal_newlines=True,
)
stdout, tmp = process_handler.communicate()
stdout, stderr = process_handler.communicate()
logit("ngrok stdout: ", stdout)
logit("ngrok stderr: ", stderr)

@run_on_executor
def get_ngrok_tunnel_hostname(self):
ngrok_url = False

while not ngrok_url:
logit("Querying the ngrok API server for exposed endpoint URL...")
logit("Querying the ngrok API server for exposed endpoint URL... (if you see this many times, check ngrok)")

try:
response = requests.get(
Expand Down
Loading