Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Release 5.1.0 - CSP changes #1591

Merged
merged 5 commits into from
Dec 21, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 35 additions & 5 deletions ansible/artifacts-download.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,38 @@
become: yes
vars_files:
- "{{inventory_dir}}/secrets.yml"
environment:
AZURE_STORAGE_ACCOUNT: "{{sunbird_artifact_storage_account_name}}"
AZURE_STORAGE_SAS_TOKEN: "{{sunbird_artifact_storage_account_sas}}"
roles:
- artifacts-download-azure
tasks:
- name: download artifact from azure storage
include_role:
name: azure-cloud-storage
tasks_from: blob-download.yml
vars:
blob_container_name: "{{ cloud_storage_artifacts_bucketname }}"
blob_file_name: "{{ artifact }}"
local_file_or_folder_path: "{{ artifact_path }}"
storage_account_name: "{{ cloud_artifact_storage_accountname }}"
storage_account_key: "{{ cloud_artifact_storage_secret }}"
when: cloud_service_provider == "azure"

- name: download artifact from gcloud storage
include_role:
name: gcp-cloud-storage
tasks_from: download.yml
vars:
gcp_bucket_name: "{{ cloud_storage_artifacts_bucketname }}"
gcp_path: "{{ artifact }}"
local_file_or_folder_path: "{{ artifact_path }}"
when: cloud_service_provider == "gcloud"

- name: download artifact from aws s3
include_role:
name: aws-cloud-storage
tasks_from: download.yml
vars:
local_file_or_folder_path: "{{ artifact_path }}"
s3_bucket_name: "{{ cloud_storage_artifacts_bucketname }}"
s3_path: "{{ artifact }}"
aws_default_region: "{{ cloud_public_storage_region }}"
aws_access_key_id: "{{ cloud_artifact_storage_accountname }}"
aws_secret_access_key: "{{ cloud_artifact_storage_secret }}"
when: cloud_service_provider == "aws"
41 changes: 36 additions & 5 deletions ansible/artifacts-upload.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,39 @@
become: yes
vars_files:
- "{{inventory_dir}}/secrets.yml"
environment:
AZURE_STORAGE_ACCOUNT: "{{sunbird_artifact_storage_account_name}}"
AZURE_STORAGE_SAS_TOKEN: "{{sunbird_artifact_storage_account_sas}}"
roles:
- artifacts-upload-azure
tasks:
- name: upload artifact to azure storage
include_role:
name: azure-cloud-storage
tasks_from: blob-upload.yml
vars:
blob_container_name: "{{ cloud_storage_artifacts_bucketname }}"
container_public_access: "off"
blob_file_name: "{{ artifact }}"
local_file_or_folder_path: "{{ artifact_path }}"
storage_account_name: "{{ cloud_artifact_storage_accountname }}"
storage_account_key: "{{ cloud_artifact_storage_secret }}"
when: cloud_service_provider == "azure"

- name: upload artifact to gcloud storage
include_role:
name: gcp-cloud-storage
tasks_from: upload.yml
vars:
gcp_bucket_name: "{{ cloud_storage_artifacts_bucketname }}"
gcp_path: "{{ artifact }}"
local_file_or_folder_path: "{{ artifact_path }}"
when: cloud_service_provider == "gcloud"

- name: upload artifact to aws s3
include_role:
name: aws-cloud-storage
tasks_from: upload.yml
vars:
local_file_or_folder_path: "{{ artifact_path }}"
s3_bucket_name: "{{ cloud_storage_artifacts_bucketname }}"
s3_path: "{{ artifact }}"
aws_default_region: "{{ cloud_public_storage_region }}"
aws_access_key_id: "{{ cloud_artifact_storage_accountname }}"
aws_secret_access_key: "{{ cloud_artifact_storage_secret }}"
when: cloud_service_provider == "aws"
3 changes: 3 additions & 0 deletions ansible/roles/aws-cloud-storage/defaults/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
s3_bucket_name: ""
s3_path: ""
local_file_or_folder_path: ""
9 changes: 9 additions & 0 deletions ansible/roles/aws-cloud-storage/tasks/delete-folder.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
- name: delete files and folders recursively
environment:
AWS_DEFAULT_REGION: "{{ aws_default_region }}"
AWS_ACCESS_KEY_ID: "{{ aws_access_key_id }}"
AWS_SECRET_ACCESS_KEY: "{{ aws_secret_access_key }}"
shell: "aws s3 rm s3://{{ s3_bucket_name }}/{{ s3_path }} --recursive"
async: 3600
poll: 10
9 changes: 9 additions & 0 deletions ansible/roles/aws-cloud-storage/tasks/delete.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
- name: delete files from s3
environment:
AWS_DEFAULT_REGION: "{{ aws_default_region }}"
AWS_ACCESS_KEY_ID: "{{ aws_access_key_id }}"
AWS_SECRET_ACCESS_KEY: "{{ aws_secret_access_key }}"
shell: "aws s3 rm s3://{{ s3_bucket_name }}/{{ s3_path }}"
async: 3600
poll: 10
9 changes: 9 additions & 0 deletions ansible/roles/aws-cloud-storage/tasks/download.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
- name: download files to s3
environment:
AWS_DEFAULT_REGION: "{{ aws_default_region }}"
AWS_ACCESS_KEY_ID: "{{ aws_access_key_id }}"
AWS_SECRET_ACCESS_KEY: "{{ aws_secret_access_key }}"
shell: "aws s3 cp s3://{{ s3_bucket_name }}/{{ s3_path }} {{ local_file_or_folder_path }}"
async: 3600
poll: 10
18 changes: 18 additions & 0 deletions ansible/roles/aws-cloud-storage/tasks/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
---
- name: delete files from aws S3 bucket
include: delete.yml

- name: delete folders from aws S3 bucket recursively
include: delete-folder.yml


- name: download file from S3
include: download.yml

- name: upload files from a local to aws S3
include: upload.yml

- name: upload files and folder from local directory to aws S3
include: upload-folder.yml


9 changes: 9 additions & 0 deletions ansible/roles/aws-cloud-storage/tasks/upload-folder.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
- name: upload folder to s3
environment:
AWS_DEFAULT_REGION: "{{ aws_default_region }}"
AWS_ACCESS_KEY_ID: "{{ aws_access_key_id }}"
AWS_SECRET_ACCESS_KEY: "{{ aws_secret_access_key }}"
shell: "aws s3 cp {{ local_file_or_folder_path }} s3://{{ s3_bucket_name }}/{{ s3_path }} --recursive"
async: 3600
poll: 10
9 changes: 9 additions & 0 deletions ansible/roles/aws-cloud-storage/tasks/upload.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
- name: upload files to s3
environment:
AWS_DEFAULT_REGION: "{{ aws_default_region }}"
AWS_ACCESS_KEY_ID: "{{ aws_access_key_id }}"
AWS_SECRET_ACCESS_KEY: "{{ aws_secret_access_key }}"
shell: "aws s3 cp {{ local_file_or_folder_path }} s3://{{ s3_bucket_name }}/{{ s3_path }}"
async: 3600
poll: 10
12 changes: 11 additions & 1 deletion ansible/roles/azure-cloud-storage/tasks/delete-using-azcopy.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,16 @@
---
- name: generate SAS token for azcopy
shell: |
sas_expiry=`date -u -d "1 hour" '+%Y-%m-%dT%H:%MZ'`
sas_token=?`az storage container generate-sas -n {{ blob_container_name }} --account-name {{ storage_account_name }} --account-key {{ storage_account_key }} --https-only --permissions dlrw --expiry $sas_expiry -o tsv`
echo $sas_token
register: sas_token

- set_fact:
container_sas_token: "{{ sas_token.stdout}}"

- name: delete files and folders from azure storage using azcopy
shell: "azcopy rm 'https://{{ storage_account_name }}.blob.core.windows.net/{{ blob_container_name }}{{ blob_container_folder_path }}{{ storage_account_sas_token }}' --recursive"
shell: "azcopy rm 'https://{{ storage_account_name }}.blob.core.windows.net/{{ blob_container_name }}{{ blob_container_folder_path }}{{ container_sas_token }}' --recursive"
environment:
AZCOPY_CONCURRENT_FILES: "10"
async: 10800
Expand Down
14 changes: 12 additions & 2 deletions ansible/roles/azure-cloud-storage/tasks/upload-using-azcopy.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,22 @@
---
- name: generate SAS token for azcopy
shell: |
sas_expiry=`date -u -d "1 hour" '+%Y-%m-%dT%H:%MZ'`
sas_token=?`az storage container generate-sas -n {{ blob_container_name }} --account-name {{ storage_account_name }} --account-key {{ storage_account_key }} --https-only --permissions dlrw --expiry $sas_expiry -o tsv`
echo $sas_token
register: sas_token

- set_fact:
container_sas_token: "{{ sas_token.stdout}}"

- name: create container in azure storage if it doesn't exist
include_role:
name: azure-cloud-storage
tasks_from: container-create.yml

- name: upload files and folders to azure storage using azcopy
shell: "azcopy copy {{ local_file_or_folder_path }} 'https://{{ storage_account_name }}.blob.core.windows.net/{{ blob_container_name }}{{ blob_container_folder_path }}{{ storage_account_sas_token }}' --recursive"
shell: "azcopy copy {{ local_file_or_folder_path }} 'https://{{ storage_account_name }}.blob.core.windows.net/{{ blob_container_name }}{{ blob_container_folder_path }}{{ container_sas_token }}' --recursive"
environment:
AZCOPY_CONCURRENT_FILES: "10"
async: 10800
poll: 10
poll: 10
9 changes: 2 additions & 7 deletions ansible/roles/cassandra-backup/defaults/main.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
cassandra_root_dir: /etc/cassandra
cassandra_backup_dir: /data/cassandra/backup
cassandra_backup_azure_container_name: dp-cassandra-backup

# This variable is added for the below reason -
# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
# or other default files and just assign the value to the newly introduced common variable
# 3. After few releases, we will remove the older variables and use only the new variables across the repos
cassandra_backup_storage: "{{ cassandra_backup_azure_container_name }}"
cloud_storage_dpcassandrabackup_bucketname: "{{cloud_storage_management_bucketname}}"
cloud_storage_dpcassandrabackup_foldername: dp-cassandra-backup
24 changes: 18 additions & 6 deletions ansible/roles/cassandra-backup/tasks/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,22 +30,34 @@
name: azure-cloud-storage
tasks_from: upload-using-azcopy.yml
vars:
blob_container_name: "{{ cassandra_backup_storage }}"
blob_container_name: "{{ cloud_storage_dpcassandrabackup_foldername }}"
container_public_access: "off"
blob_container_folder_path: ""
local_file_or_folder_path: "{{ cassandra_backup_gzip_file_path }}"
storage_account_name: "{{ azure_management_storage_account_name }}"
storage_account_sas_token: "{{ azure_management_storage_account_sas }}"
storage_account_name: "{{ cloud_management_storage_accountname }}"
storage_account_key: "{{ cloud_management_storage_secret }}"
when: cloud_service_provider == "azure"

- name: upload backup to S3
include_role:
name: aws-cloud-storage
tasks_from: upload-folder.yml
vars:
local_file_or_folder_path: "{{ cassandra_backup_gzip_file_path }}"
s3_bucket_name: "{{ cloud_storage_dpcassandrabackup_bucketname }}"
s3_path: "{{ cloud_storage_dpcassandrabackup_foldername }}/{{ cassandra_backup_gzip_file_name}}"
aws_default_region: "{{ cloud_public_storage_region }}"
aws_access_key_id: "{{ cloud_management_storage_accountname }}"
aws_secret_access_key: "{{ cloud_management_storage_secret }}"
when: cloud_service_provider == "aws"

- name: upload file to gcloud storage
include_role:
name: gcp-cloud-storage
tasks_from: upload-batch.yml
vars:
gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
dest_folder_name: "{{ cassandra_backup_storage }}"
dest_folder_path: ""
gcp_bucket_name: "{{ cloud_storage_dpcassandrabackup_bucketname }}"
gcp_path: "{{ cloud_storage_dpcassandrabackup_foldername }}/{{ cassandra_backup_gzip_file_name}}"
local_file_or_folder_path: "{{ cassandra_backup_gzip_file_path }}"
when: cloud_service_provider == "gcloud"

Expand Down
8 changes: 2 additions & 6 deletions ansible/roles/cassandra-restore/defaults/main.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
user_home: "/home/{{ ansible_ssh_user }}/"

# This variable is added for the below reason -
# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
# or other default files and just assign the value to the newly introduced common variable
# 3. After few releases, we will remove the older variables and use only the new variables across the repos
cassandra_backup_storage: "{{ cassandra_backup_azure_container_name }}"
cloud_storage_dpcassandrabackup_bucketname: "{{cloud_storage_management_bucketname}}"
cloud_storage_dpcassandrabackup_foldername: 'cassandra-backup'
28 changes: 20 additions & 8 deletions ansible/roles/cassandra-restore/tasks/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

- set_fact:
artifact_path: "/tmp/{{ artifact }}"
artifacts_container: "{{ cassandra_backup_azure_container_name }}"
cassandra_restore_dir: /tmp/cassandra_backup

- name: download a file from azure storage
Expand All @@ -12,22 +11,35 @@
name: azure-cloud-storage
tasks_from: blob-download.yml
vars:
blob_container_name: "{{ cassandra_backup_storage }}"
blob_container_name: "{{ cloud_storage_dpcassandrabackup_foldername }}"
blob_file_name: "{{ artifact }}"
local_file_or_folder_path: "{{ artifact_path }}"
storage_account_name: "{{ azure_management_storage_account_name }}"
storage_account_key: "{{ azure_management_storage_account_key }}"
storage_account_name: "{{ cloud_management_storage_accountname }}"
storage_account_key: "{{ cloud_management_storage_secret }}"
when: cloud_service_provider == "azure"

- name: download a file from aws s3
become: true
include_role:
name: aws-cloud-storage
tasks_from: download.yml
vars:
s3_bucket_name: "{{ cloud_storage_dpcassandrabackup_bucketname }}"
aws_access_key_id: "{{ cloud_management_storage_accountname }}"
aws_secret_access_key: "{{ cloud_management_storage_secret }}"
aws_default_region: "{{ cloud_public_storage_region }}"
local_file_or_folder_path: "{{ artifact_path }}"
s3_path: "{{ cloud_storage_dpcassandrabackup_foldername }}/{{ artifact }}"
when: cloud_service_provider == "aws"

- name: download file from gcloud storage
include_role:
name: gcp-cloud-storage
tasks_from: download.yml
vars:
gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
dest_folder_name: "{{ artifacts_container }}"
dest_file_name: "{{ artifact }}"
local_file_or_folder_path: "{{ cassandra_restore_dir }}"
gcp_bucket_name: "{{ cloud_storage_dpcassandrabackup_bucketname }}"
gcp_path: "{{ cloud_storage_dpcassandrabackup_foldername }}/{{ artifact }}"
local_file_or_folder_path: "{{ artifact_path }}"
when: cloud_service_provider == "gcloud"

- name: extract the archive
Expand Down
4 changes: 2 additions & 2 deletions ansible/roles/es-azure-snapshot/tasks/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
vars:
blob_container_name: "{{ es_backup_storage }}"
container_public_access: "off"
storage_account_name: "{{ azure_management_storage_account_name }}"
storage_account_key: "{{ azure_management_storage_account_key }}"
storage_account_name: "{{ cloud_management_storage_accountname }}"
storage_account_key: "{{ cloud_management_storage_secret }}"

- name: Create Azure Repository
uri:
Expand Down
6 changes: 3 additions & 3 deletions ansible/roles/gcp-cloud-storage/defaults/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ gcp_storage_key_file: ""

# Folder name in GCP bucket
# Example -
# dest_folder_name: "my-destination-folder"
dest_folder_name: ""
# gcp_path: "my-destination-folder"
gcp_path: ""

# The delete pattern to delete files and folder
# Example -
Expand All @@ -36,7 +36,7 @@ dest_file_name: ""

# The folder path in gcloud storage to upload the files starting from the root of the bucket
# This path should start with / if we provide a value for this variable since we are going to append this path as below
# {{ bucket_name }}{{ dest_folder_name }}
# {{ bucket_name }}{{ gcp_path }}
# The above translates to "my-bucket/my-folder-path"
# Example -
# dest_folder_path: "/my-folder/json-files-folder"
Expand Down
4 changes: 2 additions & 2 deletions ansible/roles/gcp-cloud-storage/tasks/download.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
include_tasks: gcloud-auth.yml

- name: Download from gcloud storage
shell: gsutil cp "gs://{{ gcp_bucket_name }}/{{ dest_folder_name }}/{{ dest_file_name }}" "{{ local_file_or_folder_path }}"
shell: gsutil cp "gs://{{ gcp_bucket_name }}/{{ gcp_path }}" "{{ local_file_or_folder_path }}"
async: 3600
poll: 10

- name: Revoke gcloud access
include_tasks: gcloud-revoke.yml
include_tasks: gcloud-revoke.yml
2 changes: 1 addition & 1 deletion ansible/roles/gcp-cloud-storage/tasks/upload-batch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
include_tasks: gcloud-auth.yml

- name: Upload files from a local directory gcp storage
shell: gsutil -m cp -r "{{ local_file_or_folder_path }}" "gs://{{ gcp_bucket_name }}/{{ dest_folder_name }}/{{ dest_folder_path }}"
shell: gsutil -m cp -r "{{ local_file_or_folder_path }}" "gs://{{ gcp_bucket_name }}/{{ gcp_path}}"
async: 3600
poll: 10

Expand Down
2 changes: 1 addition & 1 deletion ansible/roles/gcp-cloud-storage/tasks/upload.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
include_tasks: gcloud-auth.yml

- name: Upload to gcloud storage
shell: gsutil cp "{{ local_file_or_folder_path }}" "gs://{{ gcp_bucket_name }}/{{ dest_folder_name }}/{{ dest_file_name }}"
shell: gsutil cp "{{ local_file_or_folder_path }}" "gs://{{ gcp_bucket_name }}/{{ gcp_path }}"
async: 3600
poll: 10

Expand Down
9 changes: 2 additions & 7 deletions ansible/roles/influxdb_backup/defaults/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,6 @@ influxdb_backup_dir: /tmp/influxdb_backup
influxdb_backup_databases: ["monitoring_events"]
influxdb_backup_file_prefix: influxdb_backup
influxdb_backup_file_name: "{{ influxdb_backup_file_prefix }}-{{ ansible_date_time.date }}-{{ ansible_date_time.hour }}-{{ ansible_date_time.minute }}-{{ ansible_date_time.second }}"
azure_influxdb_backup_container: influxdb-backup

# This variable is added for the below reason -
# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
# or other default files and just assign the value to the newly introduced common variable
# 3. After few releases, we will remove the older variables and use only the new variables across the repos
influxdb_backup_storage: "{{ azure_influxdb_backup_container }}"
cloud_storage_influxdbbackup_bucketname: "{{ cloud_storage_management_bucketname }}"
cloud_storage_influxdbbackup_foldername: influxdb-backup
Loading