Skip to content

Commit

Permalink
Add workflow to deploy environment with CDR infrastructure (#2457)
Browse files Browse the repository at this point in the history
  • Loading branch information
gurevichdmitry authored Sep 3, 2024
1 parent 7e2ff45 commit c441c7c
Show file tree
Hide file tree
Showing 14 changed files with 258 additions and 31 deletions.
47 changes: 47 additions & 0 deletions .ci/scripts/remote_setup.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
#!/bin/bash

# Default values
user="ubuntu"

# Parse options
while getopts ":k:s:u:h:d:c:" opt; do
case $opt in
k)
key="$OPTARG"
;;
s)
src_file="$OPTARG"
;;
u)
user="$OPTARG"
;;
h)
host="$OPTARG"
;;
d)
dest_file="$OPTARG"
;;
c)
command="$OPTARG"
;;
\?)
echo "Invalid option -$OPTARG" >&2
exit 1
;;
esac
done

# Ensure all mandatory parameters are provided
if [ -z "$key" ] || [ -z "$src_file" ] || [ -z "$host" ] || [ -z "$dest_file" ] || [ -z "$command" ]; then
echo "Usage: $0 -k <key> -s <source_file> -h <host> -d <destination_file> -c <command> [-u <user>]"
exit 1
fi

# Set the permission for the key file
chmod 600 "$key"

# Copy the file to the EC2/VM instance
scp -o StrictHostKeyChecking=no -v -i "$key" "$src_file" "$user@$host:$dest_file"

# Run the command on the remote EC2/VM instance
ssh -o StrictHostKeyChecking=no -v -i "$key" "$user@$host" "$command"
26 changes: 26 additions & 0 deletions .ci/scripts/set_cloud_env_params.sh
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,29 @@ echo "KSPM_PUBLIC_IP=$KSPM_PUBLIC_IP" >>"$GITHUB_ENV"
CSPM_PUBLIC_IP=$(terraform output -raw ec2_cspm_public_ip)
echo "::add-mask::$CSPM_PUBLIC_IP"
echo "CSPM_PUBLIC_IP=$CSPM_PUBLIC_IP" >>"$GITHUB_ENV"

if [[ ${TF_VAR_cdr_infra:-} == "true" ]]; then
ec2_cloudtrail_public_ip=$(terraform output -raw ec2_cloudtrail_public_ip)
echo "::add-mask::$ec2_cloudtrail_public_ip"
echo "CLOUDTRAIL_PUBLIC_IP=$ec2_cloudtrail_public_ip" >>"$GITHUB_ENV"

ec2_cloudtrail_key=$(terraform output -raw ec2_cloudtrail_key)
echo "::add-mask::$ec2_cloudtrail_key"
echo "CLOUDTRAIL_KEY=$ec2_cloudtrail_key" >>"$GITHUB_ENV"

az_vm_activity_logs_public_ip=$(terraform output -raw az_vm_activity_logs_public_ip)
echo "::add-mask::$az_vm_activity_logs_public_ip"
echo "ACTIVITY_LOGS_PUBLIC_IP=$az_vm_activity_logs_public_ip" >>"$GITHUB_ENV"

az_vm_activity_logs_key=$(terraform output -raw az_vm_activity_logs_key)
echo "::add-mask::$az_vm_activity_logs_key"
echo "ACTIVITY_LOGS_KEY=$az_vm_activity_logs_key" >>"$GITHUB_ENV"

gcp_audit_logs_public_ip=$(terraform output -raw gcp_audit_logs_public_ip)
echo "::add-mask::$gcp_audit_logs_public_ip"
echo "AUDIT_LOGS_PUBLIC_IP=$gcp_audit_logs_public_ip" >>"$GITHUB_ENV"

gcp_audit_logs_key=$(terraform output -raw gcp_audit_logs_key)
echo "::add-mask::$gcp_audit_logs_key"
echo "AUDIT_LOGS_KEY=$gcp_audit_logs_key" >>"$GITHUB_ENV"
fi
41 changes: 41 additions & 0 deletions .github/workflows/cdr-infra.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
name: Create Environment with Cloud Logs (CDR)
run-name: Creating ${{ github.event.inputs.deployment_name }} by @${{ github.actor }}

on:
# Ability to execute on demand
workflow_dispatch:
inputs:
deployment-name:
type: string
description: |
Name with letters, numbers, hyphens; start with a letter. Max 20 chars. e.g., 'my-env-123'
required: true
serverless_mode:
description: "Deploy a serverless project instead of an ESS deployment"
type: boolean
required: true
default: false
elk-stack-version:
required: true
description: "The version of the ELK stack: For BC use version without hash 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT"
default: "8.16.0"
type: string
docker-image-override:
required: false
description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)"
type: string

jobs:
deploy:
uses: ./.github/workflows/test-environment.yml
secrets: inherit
# Required for the 'Deploy' job in the 'test-environment.yml' to authenticate with Google Cloud (gcloud).
permissions:
contents: 'read'
id-token: 'write'
with:
deployment_name: ${{ inputs.deployment-name }}
ess-region: 'gcp-us-west2'
elk-stack-version: ${{ inputs.elk-stack-version }}
serverless_mode: ${{ fromJSON(inputs.serverless_mode) }}
cdr-infra: true
98 changes: 87 additions & 11 deletions .github/workflows/test-environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,11 @@ on:
type: string
description: "**Optional** By default, the environment will be created in our Cloud Security Organization. If you want to use your own cloud account, enter your Elastic Cloud API key."
required: false
cdr-infra:
description: "Flag to indicate that the CDR infrastructure is being created"
type: boolean
required: false
default: false
outputs:
s3-bucket:
description: "Terraform state s3 bucket folder"
Expand All @@ -117,6 +122,7 @@ env:
GCP_ZONE: "us-central1-a"
AZURE_DEFAULT_TAGS: "division=engineering org=security team=cloud-security-posture project=test-environments owner=${{ github.actor }}"
TF_VAR_ec_api_key: ${{ secrets.EC_API_KEY }}
TF_VAR_gcp_service_account_json: ${{ secrets.GCP_AGENT_CREDENTIALS }}

jobs:
Deploy:
Expand Down Expand Up @@ -217,6 +223,17 @@ jobs:
echo "::add-mask::$enrollment_token"
echo "ENROLLMENT_TOKEN=$enrollment_token" >> $GITHUB_ENV
- name: Init CDR Infra
id: init-cdr-infra
env:
CDR_INFRA: ${{ inputs.cdr-infra }}
run: |
if [[ "${CDR_INFRA:-}" == "true" ]]; then
echo "TF_VAR_cdr_infra=true" >> $GITHUB_ENV
else
echo "TF_VAR_cdr_infra=false" >> $GITHUB_ENV
fi
- name: Set up Python
uses: actions/setup-python@v5
with:
Expand Down Expand Up @@ -253,8 +270,9 @@ jobs:
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}

- name: Set TF_STATE_FOLDER
- name: Update Vars
run: |
echo "TF_VAR_gcp_project_id=$GCP_PROJECT" >> $GITHUB_ENV
echo "TF_STATE_FOLDER=$(date +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_ENV
- name: Terraform Init
Expand Down Expand Up @@ -291,6 +309,11 @@ jobs:
echo "aws-cnvm-stack=${CNVM_STACK_NAME}" >> $GITHUB_OUTPUT
python3 ../../.ci/scripts/create_env_config.py
aws s3 cp "./env_config.json" "${S3_BUCKET}/env_config.json"
if [[ ${TF_VAR_cdr_infra:-} == "true" ]]; then
aws s3 cp "${CLOUDTRAIL_KEY}" "${S3_BUCKET}/cloudtrail.pem"
aws s3 cp "${ACTIVITY_LOGS_KEY}" "${S3_BUCKET}/az_activity_logs.pem"
aws s3 cp "${AUDIT_LOGS_KEY}" "${S3_BUCKET}/gcp_audit_logs.pem"
fi
- name: Summary
if: success()
Expand All @@ -308,6 +331,61 @@ jobs:
echo "$summary" >> $GITHUB_STEP_SUMMARY
echo "$summary" # Print the summary to the workflow log
- name: Install AWS Cloudtrail integration
id: cloudtrail-integration
if: env.TF_VAR_cdr_infra == 'true'
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }}
env:
CLOUDTRAIL_S3: ${{ secrets.CLOUDTRAIL_S3 }}
run: |
poetry run python ./install_cloudtrail_integration.py
- name: Deploy AWS Cloudtrail agent
if: env.TF_VAR_cdr_infra == 'true'
run: |
scriptname="cloudtrail-linux.sh"
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname"
cmd="chmod +x $scriptname && ./$scriptname"
../../.ci/scripts/remote_setup.sh -k "$CLOUDTRAIL_KEY" -s "$src" -h "$CLOUDTRAIL_PUBLIC_IP" -d "~/$scriptname" -c "$cmd"
- name: Install Azure Activity Logs integration
id: az-activity-logs-integration
if: env.TF_VAR_cdr_infra == 'true'
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }}
env:
EVENTHUB: "activity-logs"
CONNECTION_STRING: ${{ secrets.AZURE_EVENTHUB_CONNECTION_STRING }}
STORAGE_ACCOUNT: "testenvsactivitylogs"
STORAGE_ACCOUNT_KEY: ${{ secrets.AZURE_STORAGE_ACCOUNT_KEY }}
run: |
poetry run python ./install_az_activity_logs_integration.py
- name: Deploy Azure Activity Logs agent
if: env.TF_VAR_cdr_infra == 'true'
run: |
scriptname="az_activity_logs.sh"
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname"
cmd="chmod +x $scriptname && ./$scriptname"
../../.ci/scripts/remote_setup.sh -k "$ACTIVITY_LOGS_KEY" -s "$src" -h "$ACTIVITY_LOGS_PUBLIC_IP" -d "~/$scriptname" -c "$cmd"
- name: Install GCP Audit Logs integration
id: gcp-audit-logs-integration
if: env.TF_VAR_cdr_infra == 'true'
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }}
env:
GCP_TOPIC_NAME: "test-envs-topic"
GCP_SUBSCRIPTION_NAME: "test-envs-topic-sub-id"
run: |
poetry run python ./install_gcp_audit_logs_integration.py
- name: Deploy GCP Audit Logs agent
if: env.TF_VAR_cdr_infra == 'true'
run: |
scriptname="gcp_audit_logs.sh"
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname"
cmd="chmod +x $scriptname && ./$scriptname"
../../.ci/scripts/remote_setup.sh -k "$AUDIT_LOGS_KEY" -s "$src" -h "$AUDIT_LOGS_PUBLIC_IP" -d "~/$scriptname" -c "$cmd"
- name: Install CNVM integration
id: cnvm
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }}
Expand Down Expand Up @@ -377,11 +455,10 @@ jobs:
- name: Deploy KSPM Unmanaged agent
run: |
chmod 600 ${{ env.EC2_KSPM_KEY }}
# Copy the manifest file to the EC2 instance
scp -o StrictHostKeyChecking=no -v -i ${{ env.EC2_KSPM_KEY }} ../../${{ env.INTEGRATIONS_SETUP_DIR }}/kspm_unmanaged.yaml "ubuntu@${{ env.KSPM_PUBLIC_IP }}:~/."
# Apply the manifest file
ssh -o StrictHostKeyChecking=no -v -i ${{ env.EC2_KSPM_KEY }} "ubuntu@${{ env.KSPM_PUBLIC_IP }}" "kubectl apply -f kspm_unmanaged.yaml"
scriptname="kspm_unmanaged.yaml"
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname"
cmd="kubectl apply -f $scriptname"
../../.ci/scripts/remote_setup.sh -k "$EC2_KSPM_KEY" -s "$src" -h "$KSPM_PUBLIC_IP" -d "~/$scriptname" -c "$cmd"
- name: Install CSPM integration
id: cspm
Expand All @@ -391,11 +468,10 @@ jobs:
- name: Deploy CSPM agent
run: |
chmod 600 ${{ env.EC2_CSPM_KEY }}
# Copy the manifest file to the EC2 instance
scp -o StrictHostKeyChecking=no -v -i ${{ env.EC2_CSPM_KEY }} ../../${{ env.INTEGRATIONS_SETUP_DIR }}/cspm-linux.sh "ubuntu@${{ env.CSPM_PUBLIC_IP }}:~/."
# Apply the manifest file
ssh -o StrictHostKeyChecking=no -v -i ${{ env.EC2_CSPM_KEY }} "ubuntu@${{ env.CSPM_PUBLIC_IP }}" "chmod +x cspm-linux.sh && ./cspm-linux.sh"
scriptname="cspm-linux.sh"
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname"
cmd="chmod +x $scriptname && ./$scriptname"
../../.ci/scripts/remote_setup.sh -k "$EC2_CSPM_KEY" -s "$src" -h "$CSPM_PUBLIC_IP" -d "~/$scriptname" -c "$cmd"
- name: Upload Integrations data
if: always()
Expand Down
3 changes: 2 additions & 1 deletion deploy/cloud/modules/azure/vm/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ resource "azurerm_resource_group" "resource_group" {

locals {
vm_private_key_file = "${path.module}/azure-vm-${random_id.id.hex}.pem"
vm_username = "azureuser"
vm_username = "ubuntu"
deploy_name = "${var.deployment_name}-${random_id.id.hex}"
tags = merge({
name = var.deployment_name
Expand Down Expand Up @@ -116,6 +116,7 @@ resource "azurerm_linux_virtual_machine" "linux_vm" {
location = azurerm_resource_group.resource_group.location
size = var.size
admin_username = local.vm_username
depends_on = [azurerm_network_interface_security_group_association.azure_vm_nsg_association]
network_interface_ids = [
azurerm_network_interface.vm_nic.id
]
Expand Down
14 changes: 14 additions & 0 deletions deploy/cloud/modules/gcp/vm/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -46,4 +46,18 @@ resource "google_compute_instance" "vm_instance" {
service_account {
scopes = var.scopes
}

provisioner "remote-exec" {
connection {
type = "ssh"
user = local.vm_username
private_key = tls_private_key.gcp_vm_key.private_key_pem
host = self.network_interface[0].access_config[0].nat_ip
}

inline = [
"echo '${var.gcp_service_account_json}' | base64 --decode > /home/ubuntu/credentials.json",
"chmod 600 /home/ubuntu/credentials.json"
]
}
}
7 changes: 7 additions & 0 deletions deploy/cloud/modules/gcp/vm/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,10 @@ variable "scopes" {
type = list(string)
default = ["https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/cloudplatformorganizations"]
}

variable "gcp_service_account_json" {
description = "GCP Service Account JSON"
type = string
default = "default"
sensitive = true
}
14 changes: 7 additions & 7 deletions deploy/test-environments/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,13 @@ module "aws_ec2_for_cspm" {
}

module "gcp_audit_logs" {
count = var.cdr_infra ? 1 : 0
providers = { google : google }
source = "../cloud/modules/gcp/vm"

deployment_name = var.deployment_name
network = "default"
specific_tags = merge(local.common_tags, { "vm_instance" : "audit-logs" })
count = var.cdr_infra ? 1 : 0
providers = { google : google }
source = "../cloud/modules/gcp/vm"
gcp_service_account_json = var.gcp_service_account_json
deployment_name = var.deployment_name
network = "default"
specific_tags = merge(local.common_tags, { "vm_instance" : "audit-logs" })

}

Expand Down
12 changes: 6 additions & 6 deletions deploy/test-environments/output.tf
Original file line number Diff line number Diff line change
Expand Up @@ -53,18 +53,18 @@ output "ec2_cloudtrail_key" {
}

output "gcp_audit_logs_ssh_cmd" {
value = var.cdr_infra ? module.gcp_audit_logs[0].gcp_vm_ssh_cmd : null
# sensitive = true
value = var.cdr_infra ? module.gcp_audit_logs[0].gcp_vm_ssh_cmd : null
sensitive = true
}

output "gcp_audit_logs_public_ip" {
value = var.cdr_infra ? module.gcp_audit_logs[0].gcp_vm_puglic_ip : null
# sensitive = true
value = var.cdr_infra ? module.gcp_audit_logs[0].gcp_vm_puglic_ip : null
sensitive = true
}

output "gcp_audit_logs_key" {
value = var.cdr_infra ? module.gcp_audit_logs[0].gcp_vm_ssh_key : null
# sensitive = true
value = var.cdr_infra ? module.gcp_audit_logs[0].gcp_vm_ssh_key : null
sensitive = true
}

output "az_vm_activity_logs_ssh_cmd" {
Expand Down
7 changes: 7 additions & 0 deletions deploy/test-environments/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,13 @@ variable "gcp_project_id" {
default = "default"
}

variable "gcp_service_account_json" {
description = "GCP Service Account JSON"
type = string
default = "default"
sensitive = true
}

# Elastic Cloud variables
# ===========================================
variable "ec_api_key" {
Expand Down
8 changes: 8 additions & 0 deletions dev-docs/Cloud-Env-Testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,14 @@ Follow these steps to connect to your Amazon Elastic Kubernetes Service (EKS) cl

This command should list the pods in the kube-system namespace, confirming that you have successfully connected to your EKS cluster.

## Create Environment with Cloud Logs

The [`Create Environment with Cloud Logs`](https://github.com/elastic/cloudbeat/actions/workflows/cdr-infra.yml) GitHub Action extends the regular [`Create Environment`](https://github.com/elastic/cloudbeat/actions/workflows/test-environment.yml) action by including a pre-configured installation of Cloud Logs. This workflow installs integrations for AWS CloudTrail, GCP Audit Logs, and Azure Activity Logs, including the necessary connectivity and configuration on the respective cloud providers.

### Workflow Inputs

The workflow requires a subset of input parameters. All required inputs are described [here](#how-to-run-the-workflow).


## Cleanup Procedure

Expand Down
Loading

0 comments on commit c441c7c

Please sign in to comment.