From 8ee9dcc36a905073263c144f1098a516fcaf9c63 Mon Sep 17 00:00:00 2001 From: Madison Swain-Bowden Date: Fri, 25 Nov 2022 09:59:45 -0700 Subject: [PATCH] Add basic deployment DAG (#14) * Add SSH & HTTP connections * Rename env template * Add Airflow logs bucket to startup * Add basic deployment DAG * Add CODEOWNERS * Add other services * Add IS_PROD variable setting --- env.template => .env.template | 7 +++ .github/CODEOWNERS | 1 + docker-compose.dev.yml | 2 +- justfile | 2 +- techbloc_airflow/dags/constants.py | 5 ++ .../dags/deployments/deployment_dags.py | 57 +++++++++++++++++++ 6 files changed, 72 insertions(+), 2 deletions(-) rename env.template => .env.template (91%) create mode 100644 .github/CODEOWNERS create mode 100644 techbloc_airflow/dags/constants.py create mode 100644 techbloc_airflow/dags/deployments/deployment_dags.py diff --git a/env.template b/.env.template similarity index 91% rename from env.template rename to .env.template index 869dcca..f095059 100644 --- a/env.template +++ b/.env.template @@ -33,6 +33,13 @@ AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=sqlite:////opt/airflow/db/airflow.db # Replace "access_key" and "secret+key" with the real values. Secret key must be URL-encoded AIRFLOW_CONN_AWS_DEFAULT=aws://test_key:test_secret@?region_name=us-east-1&endpoint_url=http://s3:5000 +# SSH connections +AIRFLOW_CONN_SSH_MASTODON_CONN_ID=ssh://user@service +AIRFLOW_CONN_SSH_MONOLITH_CONN_ID=ssh://user@service +# HTTP connections +AIRFLOW_CONN_MATRIX_WEBHOOK=https://matrix-webhook +AIRFLOW_VAR_MATRIX_WEBHOOK_API_KEY=api_key + S3_LOCAL_ENDPOINT=http://s3:5000 AWS_CONN_ID=aws_default diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..74dfdfc --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @OrcaCollective/team diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 69a6390..6961cbf 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -34,7 +34,7 @@ services: MINIO_ROOT_USER: ${AWS_ACCESS_KEY} MINIO_ROOT_PASSWORD: ${AWS_SECRET_KEY} # Comma separated list of buckets to create on startup - BUCKETS_TO_CREATE: spd-lookup,airflow + BUCKETS_TO_CREATE: spd-lookup,airflow,techbloc-airflow-logs # Create empty buckets on every container startup # Note: $0 is included in the exec because "/bin/bash -c" swallows the first # argument, so it must be re-added at the beginning of the exec call diff --git a/justfile b/justfile index 95f88fc..8b078d6 100644 --- a/justfile +++ b/justfile @@ -40,7 +40,7 @@ install: check-py-version # Create the .env file from the template dotenv: - @([ ! -f .env ] && cp env.template .env) || true + @([ ! -f .env ] && cp .env.template .env) || true # Run docker compose with the specified command _dc *args: diff --git a/techbloc_airflow/dags/constants.py b/techbloc_airflow/dags/constants.py new file mode 100644 index 0000000..3909652 --- /dev/null +++ b/techbloc_airflow/dags/constants.py @@ -0,0 +1,5 @@ +SSH_MASTODON_CONN_ID = "ssh_mastodon" +SSH_MONOLITH_CONN_ID = "ssh_monolith" + +MATRIX_WEBHOOK_CONN_ID = "matrix_webhook" +MATRIX_WEBHOOK_API_KEY = "matrix_webhook_api_key" diff --git a/techbloc_airflow/dags/deployments/deployment_dags.py b/techbloc_airflow/dags/deployments/deployment_dags.py new file mode 100644 index 0000000..66a8fc3 --- /dev/null +++ b/techbloc_airflow/dags/deployments/deployment_dags.py @@ -0,0 +1,57 @@ +import json +from datetime import datetime + +import constants +from airflow.decorators import dag +from airflow.providers.http.operators.http import SimpleHttpOperator +from airflow.providers.ssh.operators.ssh import SSHOperator + + +SERVICES = [ + "1-312-hows-my-driving", + "spd-data-watch", + "OpenOversight", + "spd-lookup", +] + + +for service in SERVICES: + service_name = service.replace("-", "_") + dag_id = f"deploy_{service_name}" + + @dag( + dag_id=dag_id, + start_date=datetime(2022, 11, 24), + catchup=False, + schedule=None, + tags=["deployment"], + ) + def deployment_dag(): + + ssh_deploy = SSHOperator( + task_id=f"deploy_{service_name}", + ssh_conn_id=constants.SSH_MONOLITH_CONN_ID, + command="cd {{ params.service }} && just deploy", + # Note that AcceptEnv has to be set for IS_PROD on the host + # or this will fail silently!! + # https://airflow.apache.org/docs/apache-airflow-providers-ssh/stable/_api/airflow/providers/ssh/operators/ssh/index.html#airflow.providers.ssh.operators.ssh.SSHOperator # noqa + environment={"IS_PROD": "true"}, + params={ + "service": service, + }, + ) + + matrix_alert = SimpleHttpOperator( + task_id=f"notify_{service_name}_deploy", + http_conn_id=constants.MATRIX_WEBHOOK_CONN_ID, + data=json.dumps( + { + "key": "{{ var.value.matrix_webhook_api_key }}", + "body": f"Deployment complete for `{service}`", + } + ), + ) + + ssh_deploy >> matrix_alert + + deployment_dag()