-
Notifications
You must be signed in to change notification settings - Fork 1
190 lines (174 loc) · 6.95 KB
/
deploy_terraform.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
name: Deploy Data-Platform Terraform
env:
aws_deploy_region: "eu-west-2"
on:
workflow_call:
inputs:
terraform_infra_import:
description: "Terraform import statements"
required: false
default: ""
type: string
terraform_infra_remove:
description: "Terraform state rm statements"
required: false
default: ""
type: string
environment:
description: "Environment to deploy to"
required: true
type: string
automation_build_url:
description: "Link to where the build pipeline is defined"
type: string
required: true
build_path:
required: true
type: string
terraform_state_s3_key_prefix:
required: true
type: string
terraform_state_file_name:
required: true
type: string
secrets:
GOOGLE_PROJECT_ID:
required: true
AWS_DEPLOY_ACCOUNT_ID:
description: "ID of Account being deployed to"
required: true
INFRASTRUCTURE_PRIVATE_KEY:
required: true
AWS_ACCESS_KEY_ID:
required: true
AWS_SECRET_ACCESS_KEY:
required: true
AWS_API_ACCOUNT_PROD:
required: true
AWS_MOSAIC_PROD_ACCOUNT_ID:
required: true
AWS_DATA_PLATFORM_ACCOUNT_ID:
required: true
AWS_HACKIT_ACCOUNT_ID:
required: true
AWS_SANDBOX_ACCOUNT_ID:
required: true
AWS_ROLE_TO_ASSUME:
required: true
AWS_API_VPC_ID:
required: true
AWS_HOUSING_VPC_ID:
required: true
AWS_MOSAIC_VPC_ID:
required: true
AWS_DP_VPC_ID:
required: true
GOOGLE_CREDENTIALS:
required: true
COPY_LIBERATOR_TO_PRE_PROD_LAMBDA_EXECUTION_ROLE:
required: true
PRE_PRODUCTION_LIBERATOR_DATA_STORAGE_KMS_KEY_ARN:
required: true
PRODUCTION_FIREWALL_IP:
required: true
TERRAFORM_SECRET_TOKEN:
required: true
jobs:
deploy:
name: Terraform Apply
runs-on: ubuntu-20.04
environment: ${{ inputs.environment }}
steps:
- name: Checkout Source
uses: actions/checkout@v3
- name: Set Github Auth
run: git config --global url."https://oauth2:${{ secrets.TERRAFORM_SECRET_TOKEN}}@github.com".insteadOf https://github.com
shell: bash
- name: Install Terraform
uses: hashicorp/[email protected]
with:
terraform_wrapper: false
terraform_version: 1.2.0
- name: Add a key to allow access to Infastructure
run: |
mkdir -p ~/.ssh
ssh-keyscan github.com >> ~/.ssh/known_hosts
echo "${{ secrets.INFRASTRUCTURE_PRIVATE_KEY }}" > ~/.ssh/id_rsa
chmod 400 ~/.ssh/id_rsa ~/.ssh/known_hosts
- name: Setup Node.js 14
uses: actions/setup-node@v3
with:
node-version: "14"
- name: Install pipenv
run: |
sudo apt-get update
sudo apt-get install pipenv
- name: Set up Google Cloud Credentials
run: |
echo $GOOGLE_CREDENTIALS >> ./google_service_account_creds.json
shell: bash
env:
GOOGLE_CREDENTIALS: ${{ secrets.GOOGLE_CREDENTIALS }}
- name: Install Dependencies for rds-database-snapshot-replicator Lambda
working-directory: "./lambdas/rds-database-snapshot-replicator"
run: npm install
# - name: Install Dependencies for g-drive-to-s3 Lambda
# working-directory: "./lambdas/g_drive_to_s3"
# run: make install-requirements
- name: Download External Python Dependencies
working-directory: "./external-lib"
run: make all
- name: Package AWS Glue Job Helpers Module & Python modules
working-directory: "."
run: make package-helpers
- name: Setup deployment user profile
run: |
aws configure set default.region eu-west-2 > /dev/null 2>&1
echo "[profile deploy_role]" >> ~/.aws/config
echo "role_arn=arn:aws:iam::${{ secrets.aws_deploy_account_id }}:role/${{ secrets.AWS_ROLE_TO_ASSUME }}" >> ~/.aws/config
echo "source_profile=default" >> ~/.aws/config
echo "role_session_name=deploy" >> ~/.aws/config
echo "region=eu-west-2" >> ~/.aws/config
- name: Deploy Data Platform in ${{ inputs.build_path }}
uses: ./.github/actions/terraform-deploy
with:
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws_deploy_region: ${{ env.aws_deploy_region }}
terraform_state_s3_key_prefix: ${{ inputs.terraform_state_s3_key_prefix }}
terraform_state_file_name: ${{ inputs.terraform_state_file_name }}
build_path: ${{ inputs.build_path }}
automation_build_url: ${{ inputs.automation_build_url }}
environment: ${{ inputs.environment }}
google_project_id: ${{ secrets.GOOGLE_PROJECT_ID }}
aws_deploy_account_id: ${{ secrets.AWS_DEPLOY_ACCOUNT_ID }}
aws_api_account_id: ${{ secrets.AWS_API_ACCOUNT_PROD }}
aws_mosaic_prod_account_id: ${{ secrets.AWS_MOSAIC_PROD_ACCOUNT_ID }}
aws_data_platform_account_id: ${{ secrets.AWS_DATA_PLATFORM_ACCOUNT_ID }}
aws_sandbox_account_id: ${{ secrets.AWS_SANDBOX_ACCOUNT_ID }}
aws_api_vpc_id: ${{ secrets.AWS_API_VPC_ID }}
aws_housing_vpc_id: ${{ secrets.AWS_HOUSING_VPC_ID }}
aws_mosaic_vpc_id: ${{ secrets.AWS_MOSAIC_VPC_ID }}
aws_dp_vpc_id: ${{ secrets.AWS_DP_VPC_ID }}
aws_hackit_account_id: ${{ secrets.AWS_HACKIT_ACCOUNT_ID }}
aws_deploy_iam_role_name: ${{ secrets.AWS_ROLE_TO_ASSUME }}
copy_liberator_to_pre_prod_lambda_execution_role: ${{ secrets.COPY_LIBERATOR_TO_PRE_PROD_LAMBDA_EXECUTION_ROLE }}
pre_production_liberator_data_storage_kms_key_arn: ${{ secrets.PRE_PRODUCTION_LIBERATOR_DATA_STORAGE_KMS_KEY_ARN }}
production_firewall_ip: ${{ secrets.PRODUCTION_FIREWALL_IP }}
branch: "main"
- name: Configure Redshift users, external schemas and user permissions for external schema
working-directory: "./terraform/etl"
if: ${{ (inputs.build_path == './terraform/etl') && (success()) }}
run: |
terraform_outputs=$(terraform output -json)
cd ../../
source .venv/bin/activate
AWS_PROFILE=deploy_role python ./scripts/configure_redshift.py "$terraform_outputs"
- name: Build and Deploy SQL to Parquet Image to ECR
if: ${{ (inputs.build_path == './terraform/core') && (success()) }}
run: |
AWS_PROFILE=deploy_role ./docker/sql-to-parquet/deploy.sh
- name: Build and Deploy Pre-Production clean up job Image to ECR
if: ${{ (inputs.build_path == './terraform/core') && (success()) }}
run: |
ENVIRONMENT=${{ inputs.environment }} AWS_PROFILE=deploy_role ./docker/pre-production-data-cleanup/deploy.sh