Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

scotsuite scanning addition #86

Merged
merged 5 commits into from
Jan 11, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .project_automation/functional_tests/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ run_test() {
echo $AWS_DEFAULT_REGION
unset AWS_DEFAULT_REGION
echo $AWS_DEFAULT_REGION
taskcat test run -t $1
}
taskcat test run -n -t $1
.project_automation/functional_tests/scoutsuite/scoutsuite.sh}

acct_id=$(aws sts get-caller-identity --output text --query 'Account')

Expand Down
15 changes: 0 additions & 15 deletions .project_automation/functional_tests/entrypoint.sh.rej

This file was deleted.

55 changes: 0 additions & 55 deletions .project_automation/functional_tests/entrypoint_scotsuite.sh

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"about": "This is a simplified ruleset intended for use by CDO Securtiy Certifiers and is maintained by Amazon's CloudSecurity team.",
"about": "This is a simplified ruleset intended for use with AWS ABI test environments.",
"rules": {
"cloudtrail-not-configured.json": [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def create_scoutsuite_custom_rule_file(file_name):
if not os.path.exists(scoutsuite_cloudtrail_json_file_path):
# If the file does not exist, create it
# Source path
src = '.project_automation/functional_tests/'+file_name
src = '.project_automation/functional_tests/scoutsuite/'+file_name
copy_file(src, scoutsuite_cloudtrail_json_file_path)
else:
logging.info(f'File {scoutsuite_cloudtrail_json_file_path} already exists')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def create_lambda_s3_presignedurl():
'''Function that creates the Lambda function that generates S3 presigned URLs'''

# Create the ZIP deployment package for Lambda
lambda_zipped_code = zip_folder_to_bytesio('./.project_automation/functional_tests/lambda_functions/source/lambda_s3_presignedurl')
lambda_zipped_code = zip_folder_to_bytesio('./.project_automation/functional_tests/scoutsuite/lambda_functions/source/lambda_s3_presignedurl')
lambda_zipped_code.seek(0) # Reset the cursor of the BytesIO object to the beginning

s3_client = session.client('s3')
Expand All @@ -95,7 +95,7 @@ def create_lambda_s3_presignedurl():
# Use the session to create a client for CloudFormation
cf_client = session.client('cloudformation')

with open('./.project_automation/functional_tests/lambda_s3_presignedurl.yaml', 'r') as file:
with open('./.project_automation/functional_tests/scoutsuite/lambda_s3_presignedurl.yaml', 'r') as file:
template_body = file.read()
# Check if the stack already exists
try:
Expand Down Expand Up @@ -277,6 +277,8 @@ def upload_scoutsuite_results_zip_to_s3(scoutsuite_zip_file_path, zip_name):
s3_file_with_key = time_key + '-' + zip_name
# Upload the Scoutsuite results zip to an S3 bucket
s3.upload_file(scoutsuite_zip_file_path, bucket_name, s3_file_with_key)
with open("scoutsuite_s3_filename.txt", "w") as file:
file.write(str(s3_file_with_key))

except ClientError as error:
logging.exception (error)
Expand Down
40 changes: 40 additions & 0 deletions .project_automation/functional_tests/scoutsuite/scoutsuite.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
#!/bin/bash -ex


## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
# managed and local tasks always use these variables for the project and project type path
PROJECT_PATH=${BASE_PATH}/project
PROJECT_TYPE_PATH=${BASE_PATH}/projecttype

cd ${PROJECT_PATH}

# Retrieve the AWS account ID and store it in a variable
AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query "Account" --output text)

run_scoutsuite() {
#Create Scoutsuite security scan custom rule
python3 .project_automation/functional_tests/scoutsuite/create-scoutsuite-custom-rule.py
# Execute Scoutsuite security scan
scout aws -r us-east-1 --ruleset .project_automation/functional_tests/scoutsuite/abi-scoutsuite-custom-ruleset.json --no-browser --max-rate 5 --max-workers 5 -f
# Upload Scoutsuite security scan results to S3 bucket named scoutsuite-results-aws-AWS-ACCOUNT-ID
python3 .project_automation/functional_tests/scoutsuite/process-scoutsuite-report.py
# Delete taskcat e2e test resources
taskcat test clean ALL
process_scoutsuite_report
}

process_scoutsuite_report() {
# Check Scoutsuite security scan result for Danger level findings (Non-0 exit code)
scoutsuite_sysout_result=$(cat scoutsuite_sysout.txt)
scoutsuite_s3_filename=$(cat scoutsuite_s3_filename.txt)
rm scoutsuite_sysout.txt
rm scoutsuite_s3_filename.txt
if [ "$scoutsuite_sysout_result" -ne 0 ]; then
# The value is non-zero, indicating Scoutsuite report needs to be checked for security issues
echo "Scoutsuite report contains security issues. For details please check the log messages above or the file $scoutsuite_s3_filename in the S3 bucket named scoutsuite-results-aws-$AWS_ACCOUNT_ID in the AWS test account provided by the ABI team."
exit 1
fi
}

#Run Scoutsuite security test
run_scoutsuite
4 changes: 2 additions & 2 deletions .project_automation/publication/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ REPO_NAME=$(git config --get remote.origin.url | cut -d '/' -f5 | cut -d '.' -f1
VERSION=$(cat VERSION)

BASE_URL="this would be the path to s3 bucket/${REPO_NAME}/"
S3_URI="s3://aws-abi-pilot/guide/${REPO_NAME}/"
S3_URI="s3://aws-abi/guide/${REPO_NAME}/"

print_header() {
printf "\n\n%s\n" "$*"
Expand All @@ -36,7 +36,7 @@ printf "\nPublished at ${BASE_URL}\n"

cd ${PROJECT_PATH}

taskcat upload --bucket-name aws-abi-pilot --object-acl public-read
taskcat upload --bucket-name aws-abi --object-acl public-read

# if [ -n "${BASE_PATH}" ]
# then
Expand Down