diff --git a/.gitignore b/.gitignore index 79ba03f1..1ab00b0d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ .idea bundle.tar.gz opa +.vscode/ +.DS_Store diff --git a/checks/cloud/aws/accessanalyzer/enable_access_analyzer.rego b/checks/cloud/aws/accessanalyzer/enable_access_analyzer.rego new file mode 100644 index 00000000..a6851b44 --- /dev/null +++ b/checks/cloud/aws/accessanalyzer/enable_access_analyzer.rego @@ -0,0 +1,45 @@ +# METADATA +# title: Enable IAM Access analyzer for IAM policies about all resources in each region. +# description: | +# AWS IAM Access Analyzer helps you identify the resources in your organization and +# accounts, such as Amazon S3 buckets or IAM roles, that are shared with an external entity. +# This lets you identify unintended access to your resources and data. Access Analyzer +# identifies resources that are shared with external principals by using logic-based reasoning +# to analyze the resource-based policies in your AWS environment. IAM Access Analyzer +# continuously monitors all policies for S3 bucket, IAM roles, KMS(Key Management Service) +# keys, AWS Lambda functions, and Amazon SQS(Simple Queue Service) queues. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/what-is-access-analyzer.html +# custom: +# id: AVD-AWS-0175 +# avd_id: AVD-AWS-0175 +# provider: aws +# service: accessanalyzer +# severity: LOW +# short_code: enable-access-analyzer +# recommended_action: Enable IAM Access analyzer across all regions. +# frameworks: +# cis-aws-1.4: +# - "1.20" +# input: +# selector: +# - type: aws +# subtypes: +# - service: accessanalyzer +# provider: aws +package builtin.aws.accessanalyzer.aws0175 + +import rego.v1 + +deny contains res if { + not has_active_analyzer + res := result.new("Access Analyzer is not enabled.", {}) +} + +has_active_analyzer if { + some analyzer in input.aws.accessanalyzer.analyzers + analyzer.active.value +} diff --git a/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.rego b/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.rego new file mode 100644 index 00000000..1e37b2b2 --- /dev/null +++ b/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.rego @@ -0,0 +1,26 @@ +package builtin.aws.accessanalyzer.aws0175_test + +import rego.v1 + +import data.builtin.aws.accessanalyzer.aws0175 as check +import data.lib.test + +test_disallow_no_analyzers if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": []}}} + test.assert_equal_message("Access Analyzer is not enabled.", r) +} + +test_disallow_analyzer_disabled if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": [{"active": {"value": false}}]}}} + test.assert_equal_message("Access Analyzer is not enabled.", r) +} + +test_allow_one_of_analyzer_disabled if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": [{"active": {"value": false}}, {"active": {"value": true}}]}}} + test.assert_empty(r) +} + +test_allow_analyzer_enabled if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": [{"active": {"value": true}}]}}} + test.assert_empty(r) +} diff --git a/checks/cloud/aws/apigateway/enable_access_logging.rego b/checks/cloud/aws/apigateway/enable_access_logging.rego new file mode 100644 index 00000000..28fe849a --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_access_logging.rego @@ -0,0 +1,45 @@ +# METADATA +# title: API Gateway stages for V1 and V2 should have access logging enabled +# description: | +# API Gateway stages should have access log settings block configured to track all access to a particular stage. This should be applied to both v1 and v2 gateway stages. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-logging.html +# custom: +# id: AVD-AWS-0001 +# avd_id: AVD-AWS-0001 +# provider: aws +# service: api-gateway +# severity: MEDIUM +# short_code: enable-access-logging +# recommended_action: Enable logging for API Gateway stages +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/apigatewayv2_stage#access_log_settings +# good_examples: checks/cloud/aws/apigateway/enable_access_logging.tf.go +# bad_examples: checks/cloud/aws/apigateway/enable_access_logging.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/apigateway/enable_access_logging.cf.go +# bad_examples: checks/cloud/aws/apigateway/enable_access_logging.cf.go +package builtin.aws.apigateway.aws0001 + +import rego.v1 + +deny contains res if { + some stage in apis[_].stages + arn := stage.accesslogging.cloudwatchloggrouparn + arn.value == "" # TODO: check if unresolvable? + res := result.new("Access logging is not configured.", arn) +} + +apis contains input.aws.apigateway.v1.apis[_] + +apis contains input.aws.apigateway.v2.apis[_] diff --git a/checks/cloud/aws/apigateway/enable_access_logging_test.rego b/checks/cloud/aws/apigateway/enable_access_logging_test.rego new file mode 100644 index 00000000..4a055e6d --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_access_logging_test.rego @@ -0,0 +1,17 @@ +package builtin.aws.apigateway.aws0001_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0001 as check +import data.lib.test + +test_disallow_api_gateway_without_log_group_arn if { + r := check.deny with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"accesslogging": {"cloudwatchloggrouparn": {"value": ""}}}]}]}}}} + test.assert_equal_message("Access logging is not configured.", r) +} + +test_allow_api_gateway_with_log_group_arn if { + test.assert_empty(check.deny) with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"accesslogging": {"cloudwatchloggrouparn": {"value": "log-group-arn"}}}]}]}}}} +} + +# TODO add test for v2 diff --git a/checks/cloud/aws/apigateway/enable_cache.rego b/checks/cloud/aws/apigateway/enable_cache.rego new file mode 100644 index 00000000..a7267b14 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_cache.rego @@ -0,0 +1,37 @@ +# METADATA +# title: Ensure that response caching is enabled for your Amazon API Gateway REST APIs. +# description: | +# A REST API in API Gateway is a collection of resources and methods that are integrated with backend HTTP endpoints, Lambda functions, or other AWS services. You can enable API caching in Amazon API Gateway to cache your endpoint responses. With caching, you can reduce the number of calls made to your endpoint and also improve the latency of requests to your API. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-caching.html +# custom: +# id: AVD-AWS-0190 +# avd_id: AVD-AWS-0190 +# provider: aws +# service: api-gateway +# severity: LOW +# short_code: enable-cache +# recommended_action: Enable cache +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/api_gateway_method_settings#cache_enabled +# good_examples: checks/cloud/aws/apigateway/enable_cache.tf.go +# bad_examples: checks/cloud/aws/apigateway/enable_cache.tf.go +package builtin.aws.apigateway.aws0190 + +import rego.v1 + +deny contains res if { + some settings in input.aws.apigateway.v1.apis[_].stages[_].restmethodsettings + not settings.cacheenabled.value + res := result.new("Cache data is not enabled.", settings.cacheenabled) +} diff --git a/checks/cloud/aws/apigateway/enable_cache_encryption.rego b/checks/cloud/aws/apigateway/enable_cache_encryption.rego new file mode 100644 index 00000000..f9f30e68 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_cache_encryption.rego @@ -0,0 +1,36 @@ +# METADATA +# title: API Gateway must have cache enabled +# description: | +# Method cache encryption ensures that any sensitive data in the cache is not vulnerable to compromise in the event of interception +# scope: package +# schemas: +# - input: schema["cloud"] +# custom: +# id: AVD-AWS-0002 +# avd_id: AVD-AWS-0002 +# provider: aws +# service: api-gateway +# severity: MEDIUM +# short_code: enable-cache-encryption +# recommended_action: Enable cache encryption +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/api_gateway_method_settings#cache_data_encrypted +# good_examples: checks/cloud/aws/apigateway/enable_cache_encryption.tf.go +# bad_examples: checks/cloud/aws/apigateway/enable_cache_encryption.tf.go +package builtin.aws.apigateway.aws0002 + +import rego.v1 + +deny contains res if { + some settings in input.aws.apigateway.v1.apis[_].stages[_].restmethodsettings + settings.cacheenabled.value + not settings.cachedataencrypted.value + res := result.new("Cache data is not encrypted.", settings.cachedataencrypted) +} diff --git a/checks/cloud/aws/apigateway/enable_cache_encryption_test.rego b/checks/cloud/aws/apigateway/enable_cache_encryption_test.rego new file mode 100644 index 00000000..979e7365 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_cache_encryption_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.apigateway.aws0002_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0002 as check +import data.lib.test + +test_allow_api_gateway_with_cache_encryption if { + test.assert_empty(check.deny) with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"restmethodsettings": [{"cacheenabled": {"value": true}, "cachedataencrypted": {"value": true}}]}]}]}}}} +} + +test_disallow_api_gateway_without_cache_encryption if { + r := check.deny with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"restmethodsettings": [{"cacheenabled": {"value": true}, "cachedataencrypted": {"value": false}}]}]}]}}}} + + test.assert_equal_message("Cache data is not encrypted.", r) +} diff --git a/checks/cloud/aws/apigateway/enable_cache_test.rego b/checks/cloud/aws/apigateway/enable_cache_test.rego new file mode 100644 index 00000000..57040fba --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_cache_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.apigateway.aws0190_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0190 as check +import data.lib.test + +test_allow_cache_enabled if { + test.assert_empty(check.deny) with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"restmethodsettings": [{"cacheenabled": {"value": true}}]}]}]}}}} +} + +test_disallow_cache_disabled if { + r := check.deny with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"restmethodsettings": [{"cacheenabled": {"value": false}}]}]}]}}}} + + test.assert_equal_message("Cache data is not enabled.", r) +} diff --git a/checks/cloud/aws/apigateway/enable_tracing.rego b/checks/cloud/aws/apigateway/enable_tracing.rego new file mode 100644 index 00000000..dde4d084 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_tracing.rego @@ -0,0 +1,35 @@ +# METADATA +# title: API Gateway must have X-Ray tracing enabled +# description: | +# X-Ray tracing enables end-to-end debugging and analysis of all API Gateway HTTP requests. +# scope: package +# schemas: +# - input: schema["cloud"] +# custom: +# id: AVD-AWS-0003 +# avd_id: AVD-AWS-0003 +# provider: aws +# service: api-gateway +# severity: LOW +# short_code: enable-tracing +# recommended_action: Enable tracing +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/api_gateway_stage#xray_tracing_enabled +# good_examples: checks/cloud/aws/apigateway/enable_tracing.tf.go +# bad_examples: checks/cloud/aws/apigateway/enable_tracing.tf.go +package builtin.aws.apigateway.aws0003 + +import rego.v1 + +deny contains res if { + some stage in input.aws.apigateway.v1.apis[_].stages + not stage.xraytracingenabled.value + res := result.new("X-Ray tracing is not enabled.", stage.xraytracingenabled) +} diff --git a/checks/cloud/aws/apigateway/enable_tracing_test.rego b/checks/cloud/aws/apigateway/enable_tracing_test.rego new file mode 100644 index 00000000..875f4140 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_tracing_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.apigateway.aws0003_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0003 as check +import data.lib.test + +test_allow_tracing_enabled if { + test.assert_empty(check.deny) with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"xraytracingenabled": {"value": true}}]}]}}}} +} + +test_disallow_tracing_disabled if { + r := check.deny with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"xraytracingenabled": {"value": false}}]}]}}}} + + test.assert_equal_message("X-Ray tracing is not enabled.", r) +} diff --git a/checks/cloud/aws/apigateway/no_public_access.rego b/checks/cloud/aws/apigateway/no_public_access.rego new file mode 100644 index 00000000..4e4b7205 --- /dev/null +++ b/checks/cloud/aws/apigateway/no_public_access.rego @@ -0,0 +1,41 @@ +# METADATA +# title: No unauthorized access to API Gateway methods +# description: | +# API Gateway methods should generally be protected by authorization or api key. OPTION verb calls can be used without authorization +# scope: package +# schemas: +# - input: schema["cloud"] +# custom: +# id: AVD-AWS-0004 +# avd_id: AVD-AWS-0004 +# provider: aws +# service: api-gateway +# severity: LOW +# short_code: no-public-access +# recommended_action: Use and authorization method or require API Key +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/api_gateway_method#authorization +# good_examples: checks/cloud/aws/apigateway/no_public_access.tf.go +# bad_examples: checks/cloud/aws/apigateway/no_public_access.tf.go +package builtin.aws.apigateway.aws0004 + +import rego.v1 + +authorization_none := "NONE" + +deny contains res if { + some method in input.aws.apigateway.v1.apis[_].resources[_].methods + + method.httpmethod.value != "OPTION" + not method.apikeyrequired.value + method.authorizationtype.value == authorization_none + + res := result.new("Authorization is not enabled for this method.", method.authorizationtype) +} diff --git a/checks/cloud/aws/apigateway/no_public_access_test.rego b/checks/cloud/aws/apigateway/no_public_access_test.rego new file mode 100644 index 00000000..60ad841a --- /dev/null +++ b/checks/cloud/aws/apigateway/no_public_access_test.rego @@ -0,0 +1,26 @@ +package builtin.aws.apigateway.aws0004_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0004 as check +import data.lib.test + +test_disallow_get_method_without_auth if { + r := check.deny with input as input_with_method({"httpmethod": {"value": "GET"}, "authorizationtype": {"value": "NONE"}}) + + test.assert_equal_message("Authorization is not enabled for this method.", r) +} + +test_allow_option_method if { + test.assert_empty(check.deny) with input as input_with_method({"httpmethod": {"value": "OPTION"}}) +} + +test_allow_get_method_with_auth if { + test.assert_empty(check.deny) with input as input_with_method({"methods": [{"httpmethod": {"value": "GET"}, "authorizationtype": {"value": "AWS_IAM"}}]}) +} + +test_allow_if_api_required if { + test.assert_empty(check.deny) with input as input_with_method({"httpmethod": {"value": "GET"}, "authorizationtype": {"value": "AWS_IAM"}}) +} + +input_with_method(method) = {"aws": {"apigateway": {"v1": {"apis": [{"resources": [{"methods": [method]}]}]}}}} diff --git a/checks/cloud/aws/apigateway/use_secure_tls_policy.rego b/checks/cloud/aws/apigateway/use_secure_tls_policy.rego new file mode 100644 index 00000000..43ce7a19 --- /dev/null +++ b/checks/cloud/aws/apigateway/use_secure_tls_policy.rego @@ -0,0 +1,41 @@ +# METADATA +# title: API Gateway domain name uses outdated SSL/TLS protocols. +# description: | +# You should not use outdated/insecure TLS versions for encryption. You should be using TLS v1.2+. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-custom-domain-tls-version.html +# custom: +# id: AVD-AWS-0005 +# avd_id: AVD-AWS-0005 +# provider: aws +# service: api-gateway +# severity: HIGH +# short_code: use-secure-tls-policy +# recommended_action: Use the most modern TLS/SSL policies available +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/api_gateway_domain_name#security_policy +# good_examples: checks/cloud/aws/apigateway/use_secure_tls_policy.tf.go +# bad_examples: checks/cloud/aws/apigateway/use_secure_tls_policy.tf.go +package builtin.aws.apigateway.aws0005 + +import rego.v1 + +deny contains res if { + some domain in domainnames + domain.securitypolicy.value != "TLS_1_2" + res := result.new("Domain name is configured with an outdated TLS policy.", domain.securitypolicy) +} + +domainnames contains input.aws.apigateway.v1.domainnames[_] + +domainnames contains input.aws.apigateway.v2.domainnames[_] diff --git a/checks/cloud/aws/apigateway/use_secure_tls_policy_test.rego b/checks/cloud/aws/apigateway/use_secure_tls_policy_test.rego new file mode 100644 index 00000000..d0b2b118 --- /dev/null +++ b/checks/cloud/aws/apigateway/use_secure_tls_policy_test.rego @@ -0,0 +1,20 @@ +package builtin.aws.apigateway.aws0005_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0005 as check +import data.lib.test + +test_allow_with_tls_1_2 if { + test.assert_empty(check.deny) with input as {"aws": {"apigateway": {"v1": {"domainnames": [{"securitypolicy": {"value": "TLS_1_2"}}]}}}} +} + +test_disallow_with_tls_1_0 if { + r := check.deny with input as {"aws": {"apigateway": {"v1": {"domainnames": [{"securitypolicy": {"value": "TLS_1_0"}}]}}}} + test.assert_equal_message("Domain name is configured with an outdated TLS policy.", r) +} + +test_dissalow_api_v2 if { + r := check.deny with input as {"aws": {"apigateway": {"v2": {"domainnames": [{"securitypolicy": {"value": "TLS_1_0"}}]}, "v1": {"domainnames": [{"securitypolicy": {"value": "TLS_1_11"}}]}}}} + test.assert_equal_message("Domain name is configured with an outdated TLS policy.", r) +} diff --git a/checks/cloud/aws/athena/enable_at_rest_encryption.rego b/checks/cloud/aws/athena/enable_at_rest_encryption.rego new file mode 100644 index 00000000..5ed71972 --- /dev/null +++ b/checks/cloud/aws/athena/enable_at_rest_encryption.rego @@ -0,0 +1,53 @@ +# METADATA +# title: Athena databases and workgroup configurations are created unencrypted at rest by default, they should be encrypted +# description: | +# Athena databases and workspace result sets should be encrypted at rests. These databases and query sets are generally derived from data in S3 buckets and should have the same level of at rest protection. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/athena/latest/ug/encryption.html +# custom: +# id: AVD-AWS-0006 +# avd_id: AVD-AWS-0006 +# provider: aws +# service: athena +# severity: HIGH +# short_code: enable-at-rest-encryption +# recommended_action: Enable encryption at rest for Athena databases and workgroup configurations +# input: +# selector: +# - type: aws +# subtypes: +# - service: athena +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/athena_workgroup#encryption_configuration +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/athena_database#encryption_configuration +# good_examples: checks/cloud/aws/athena/enable_at_rest_encryption.tf.go +# bad_examples: checks/cloud/aws/athena/enable_at_rest_encryption.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/athena/enable_at_rest_encryption.cf.go +# bad_examples: checks/cloud/aws/athena/enable_at_rest_encryption.cf.go +package builtin.aws.athena.aws0006 + +import rego.v1 + +encryption_type_none := "" + +deny contains res if { + some workgroup in input.aws.athena.workgroups + is_encryption_type_none(workgroup.encryption) + res := result.new("Workgroup does not have encryption configured.", workgroup) +} + +deny contains res if { + some database in input.aws.athena.databases + is_encryption_type_none(database.encryption) + res := result.new("Database does not have encryption configured.", database) +} + +is_encryption_type_none(encryption) if { + encryption.type.value == encryption_type_none +} diff --git a/checks/cloud/aws/athena/enable_at_rest_encryption_test.rego b/checks/cloud/aws/athena/enable_at_rest_encryption_test.rego new file mode 100644 index 00000000..4272ac39 --- /dev/null +++ b/checks/cloud/aws/athena/enable_at_rest_encryption_test.rego @@ -0,0 +1,26 @@ +package builtin.aws.athena.aws0006_test + +import rego.v1 + +import data.builtin.aws.athena.aws0006 as check +import data.lib.test + +test_disallow_database_unencrypted if { + inp := {"aws": {"athena": {"databases": [{"encryption": {"type": {"value": ""}}}]}}} + test.assert_equal_message("Database does not have encryption configured.", check.deny) with input as inp +} + +test_disallow_workgroup_unencrypted if { + inp := {"aws": {"athena": {"workgroups": [{"encryption": {"type": {"value": ""}}}]}}} + test.assert_equal_message("Workgroup does not have encryption configured.", check.deny) with input as inp +} + +test_allow_database_encrypted if { + inp := {"aws": {"athena": {"databases": [{"encryption": {"type": {"value": "SSE_S3"}}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_allow_workgroup_encrypted if { + inp := {"aws": {"athena": {"workgroups": [{"encryption": {"type": {"value": "SSE_S3"}}}]}}} + test.assert_empty(check.deny) with input as inp +} diff --git a/checks/cloud/aws/athena/no_encryption_override.rego b/checks/cloud/aws/athena/no_encryption_override.rego new file mode 100644 index 00000000..df45ddcd --- /dev/null +++ b/checks/cloud/aws/athena/no_encryption_override.rego @@ -0,0 +1,40 @@ +# METADATA +# title: Athena workgroups should enforce configuration to prevent client disabling encryption +# description: | +# Athena workgroup configuration should be enforced to prevent client side changes to disable encryption settings. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/athena/latest/ug/manage-queries-control-costs-with-workgroups.html +# custom: +# id: AVD-AWS-0007 +# avd_id: AVD-AWS-0007 +# provider: aws +# service: athena +# severity: HIGH +# short_code: no-encryption-override +# recommended_action: Enforce the configuration to prevent client overrides +# input: +# selector: +# - type: aws +# subtypes: +# - service: athena +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/athena_workgroup#configuration +# good_examples: checks/cloud/aws/athena/no_encryption_override.tf.go +# bad_examples: checks/cloud/aws/athena/no_encryption_override.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/athena/no_encryption_override.cf.go +# bad_examples: checks/cloud/aws/athena/no_encryption_override.cf.go +package builtin.aws.athena.aws0007 + +import rego.v1 + +deny contains res if { + some workgroup in input.aws.athena.workgroups + not workgroup.enforceconfiguration.value + res := result.new("The workgroup configuration is not enforced.", workgroup.enforceconfiguration) +} diff --git a/checks/cloud/aws/athena/no_encryption_override_test.rego b/checks/cloud/aws/athena/no_encryption_override_test.rego new file mode 100644 index 00000000..55c8140d --- /dev/null +++ b/checks/cloud/aws/athena/no_encryption_override_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.athena.aws0007_test + +import rego.v1 + +import data.builtin.aws.athena.aws0007 as check +import data.lib.test + +test_allow_workgroup_enforce_configuration if { + inp := {"aws": {"athena": {"workgroups": [{"enforceconfiguration": {"value": true}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_workgroup_no_enforce_configuration if { + inp := {"aws": {"athena": {"workgroups": [{"enforceconfiguration": {"value": false}}]}}} + test.assert_equal_message("The workgroup configuration is not enforced.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/cloudfront/enable_logging.rego b/checks/cloud/aws/cloudfront/enable_logging.rego new file mode 100644 index 00000000..7c1d1f2b --- /dev/null +++ b/checks/cloud/aws/cloudfront/enable_logging.rego @@ -0,0 +1,40 @@ +# METADATA +# title: Cloudfront distribution should have Access Logging configured +# description: | +# You should configure CloudFront Access Logging to create log files that contain detailed information about every user request that CloudFront receives +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/AccessLogs.html +# custom: +# id: AVD-AWS-0010 +# avd_id: AVD-AWS-0010 +# provider: aws +# service: cloudfront +# severity: MEDIUM +# short_code: enable-logging +# recommended_action: Enable logging for CloudFront distributions +# input: +# selector: +# - type: aws +# subtypes: +# - service: cloudfront +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudfront_distribution#logging_config +# good_examples: checks/cloud/aws/cloudfront/enable_logging.tf.go +# bad_examples: checks/cloud/aws/cloudfront/enable_logging.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudfront/enable_logging.cf.go +# bad_examples: checks/cloud/aws/cloudfront/enable_logging.cf.go +package builtin.aws.cloudfront.aws0010 + +import rego.v1 + +deny contains res if { + some dist in input.aws.cloudfront.distributions + dist.logging.bucket.value == "" + res := result.new("Distribution does not have logging enabled", dist) +} diff --git a/checks/cloud/aws/cloudfront/enable_logging_test.rego b/checks/cloud/aws/cloudfront/enable_logging_test.rego new file mode 100644 index 00000000..c7dd18cc --- /dev/null +++ b/checks/cloud/aws/cloudfront/enable_logging_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.cloudfront.aws0010_test + +import rego.v1 + +import data.builtin.aws.cloudfront.aws0010 as check +import data.lib.test + +test_allow_distribution_with_logging if { + inp := {"aws": {"cloudfront": {"distributions": [{"logging": {"bucket": {"value": "somebucket"}}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_distribution_without_logging if { + inp := {"aws": {"cloudfront": {"distributions": [{"logging": {"bucket": {"value": ""}}}]}}} + test.assert_not_empty(check.deny) with input as inp +} diff --git a/checks/cloud/aws/cloudfront/enable_waf.rego b/checks/cloud/aws/cloudfront/enable_waf.rego new file mode 100644 index 00000000..49fb99e5 --- /dev/null +++ b/checks/cloud/aws/cloudfront/enable_waf.rego @@ -0,0 +1,40 @@ +# METADATA +# title: CloudFront distribution does not have a WAF in front. +# description: | +# You should configure a Web Application Firewall in front of your CloudFront distribution. This will mitigate many types of attacks on your web application. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/waf/latest/developerguide/cloudfront-features.html +# custom: +# id: AVD-AWS-0011 +# avd_id: AVD-AWS-0011 +# provider: aws +# service: cloudfront +# severity: HIGH +# short_code: enable-waf +# recommended_action: Enable WAF for the CloudFront distribution +# input: +# selector: +# - type: aws +# subtypes: +# - service: cloudfront +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudfront_distribution#web_acl_id +# good_examples: checks/cloud/aws/cloudfront/enable_waf.tf.go +# bad_examples: checks/cloud/aws/cloudfront/enable_waf.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudfront/enable_waf.cf.go +# bad_examples: checks/cloud/aws/cloudfront/enable_waf.cf.go +package builtin.aws.cloudfront.aws0011 + +import rego.v1 + +deny contains res if { + some dist in input.aws.cloudfront.distributions + dist.wafid.value == "" + res := result.new("Distribution does not utilise a WAF.", dist) +} diff --git a/checks/cloud/aws/cloudfront/enable_waf_test.rego b/checks/cloud/aws/cloudfront/enable_waf_test.rego new file mode 100644 index 00000000..08296f3c --- /dev/null +++ b/checks/cloud/aws/cloudfront/enable_waf_test.rego @@ -0,0 +1,14 @@ +package builtin.aws.cloudfront.aws0011_test + +import rego.v1 + +import data.builtin.aws.cloudfront.aws0011 as check +import data.lib.test + +test_allow_distribution_with_waf if { + test.assert_empty(check.deny) with input as {"aws": {"cloudfront": {"distributions": [{"waf": {"value": true}}]}}} +} + +test_disallow_distribution_without_waf if { + test.assert_equal_message("CloudFront distribution does not have a WAF in front.", check.deny) with input as {"aws": {"cloudfront": {"distributions": [{"wafid": {"value": ""}}]}}} +} diff --git a/checks/cloud/aws/cloudfront/enforce_https.rego b/checks/cloud/aws/cloudfront/enforce_https.rego new file mode 100644 index 00000000..c943686f --- /dev/null +++ b/checks/cloud/aws/cloudfront/enforce_https.rego @@ -0,0 +1,47 @@ +# METADATA +# title: CloudFront distribution allows unencrypted (HTTP) communications. +# description: | +# Plain HTTP is unencrypted and human-readable. This means that if a malicious actor was to eavesdrop on your connection, they would be able to see all of your data flowing back and forth. +# You should use HTTPS, which is HTTP over an encrypted (TLS) connection, meaning eavesdroppers cannot read your traffic. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-https-cloudfront-to-s3-origin.html +# custom: +# id: AVD-AWS-0012 +# avd_id: AVD-AWS-0012 +# provider: aws +# service: cloudfront +# severity: CRITICAL +# short_code: enforce-https +# recommended_action: Only allow HTTPS for CloudFront distribution communication +# input: +# selector: +# - type: aws +# subtypes: +# - service: cloudfront +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudfront_distribution#viewer_protocol_policy +# good_examples: checks/cloud/aws/cloudfront/enforce_https.tf.go +# bad_examples: checks/cloud/aws/cloudfront/enforce_https.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudfront/enforce_https.cf.go +# bad_examples: checks/cloud/aws/cloudfront/enforce_https.cf.go +package builtin.aws.cloudfront.aws0012 + +import rego.v1 + +viewer_protocol_policy_allow_all := "allow-all" + +deny contains res if { + some cachebehavior in cachebehaviors + cachebehavior.viewerprotocolpolicy.value == viewer_protocol_policy_allow_all + res := result.new("Distribution allows unencrypted communications.", cachebehavior.viewerprotocolpolicy) +} + +cachebehaviors contains input.aws.cloudfront.distributions[_].defaultcachebehavior + +cachebehaviors contains input.aws.cloudfront.distributions[_].orderercachebehaviours[_] diff --git a/checks/cloud/aws/cloudfront/enforce_https_test.rego b/checks/cloud/aws/cloudfront/enforce_https_test.rego new file mode 100644 index 00000000..4f36343b --- /dev/null +++ b/checks/cloud/aws/cloudfront/enforce_https_test.rego @@ -0,0 +1,28 @@ +package builtin.aws.cloudfront.aws0012_test + +import rego.v1 + +import data.builtin.aws.cloudfront.aws0012 as check +import data.lib.test + +test_disallow_default_cache_behavior_with_allow_all if { + r := check.deny with input as build_input({"defaultcachebehavior": {"viewerprotocolpolicy": {"value": "allow-all"}}}) + test.assert_equal_message("Distribution allows unencrypted communications.", r) +} + +test_disallow_ordered_cache_behaviors_with_allow_all if { + r := check.deny with input as build_input({"orderercachebehaviours": [{"viewerprotocolpolicy": {"value": "allow-all"}}]}) + test.assert_equal_message("Distribution allows unencrypted communications.", r) +} + +test_allow_default_cache_behavior_with_https if { + inp := build_input({"defaultcachebehavior": {"viewerprotocolpolicy": {"value": "https-only"}}}) + test.assert_empty(check.deny) with input as inp +} + +test_allow_ordered_cache_behaviors_with_https if { + inp := build_input({"orderercachebehaviours": [{"viewerprotocolpolicy": {"value": "https-only"}}]}) + test.assert_empty(check.deny) with input as inp +} + +build_input(body) = {"aws": {"cloudfront": {"distributions": [body]}}} diff --git a/checks/cloud/aws/cloudfront/use_secure_tls_policy.rego b/checks/cloud/aws/cloudfront/use_secure_tls_policy.rego new file mode 100644 index 00000000..23dec2ca --- /dev/null +++ b/checks/cloud/aws/cloudfront/use_secure_tls_policy.rego @@ -0,0 +1,49 @@ +# METADATA +# title: CloudFront distribution uses outdated SSL/TLS protocols. +# description: | +# You should not use outdated/insecure TLS versions for encryption. You should be using TLS v1.2+. +# Note: that setting *minimum_protocol_version = "TLSv1.2_2021"* is only possible when *cloudfront_default_certificate* is false (eg. you are not using the cloudfront.net domain name). +# If *cloudfront_default_certificate* is true then the Cloudfront API will only allow setting *minimum_protocol_version = "TLSv1"*, and setting it to any other value will result in a perpetual diff in your *terraform plan*'s. +# The only option when using the cloudfront.net domain name is to ignore this rule. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/secure-connections-supported-viewer-protocols-ciphers.html +# - https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/distribution-web-values-specify.html#DownloadDistValuesGeneral +# custom: +# id: AVD-AWS-0013 +# avd_id: AVD-AWS-0013 +# provider: aws +# service: cloudfront +# severity: HIGH +# short_code: use-secure-tls-policy +# recommended_action: Use the most modern TLS/SSL policies available +# input: +# selector: +# - type: aws +# subtypes: +# - service: cloudfront +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudfront_distribution#minimum_protocol_version +# good_examples: checks/cloud/aws/cloudfront/use_secure_tls_policy.tf.go +# bad_examples: checks/cloud/aws/cloudfront/use_secure_tls_policy.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudfront/use_secure_tls_policy.cf.go +# bad_examples: checks/cloud/aws/cloudfront/use_secure_tls_policy.cf.go +package builtin.aws.cloudfront.aws0013 + +import rego.v1 + +protocol_version_tls1_2_2021 = "TLSv1.2_2021" + +deny contains res if { + some dist in input.aws.cloudfront.distributions + viewrcert := dist.viewercertificate + not viewrcert.cloudfrontdefaultcertificate.value + viewrcert.minimumprotocolversion.value != protocol_version_tls1_2_2021 + + res := result.new("Distribution allows unencrypted communications.", viewrcert.minimumprotocolversion) +} diff --git a/checks/cloud/aws/cloudfront/use_secure_tls_policy_test.rego b/checks/cloud/aws/cloudfront/use_secure_tls_policy_test.rego new file mode 100644 index 00000000..32cfbc51 --- /dev/null +++ b/checks/cloud/aws/cloudfront/use_secure_tls_policy_test.rego @@ -0,0 +1,29 @@ +package builtin.aws.cloudfront.aws0013_test + +import rego.v1 + +import data.builtin.aws.cloudfront.aws0013 as check +import data.lib.test + +test_disallow_distribution_using_tls_1_0 if { + test.assert_equal_message("Distribution allows unencrypted communications", check.deny) with input as build_input({"viewercertificate": { + "cloudfrontdefaultcertificate": {"value": false}, + "minimumprotocolversion": {"value": "TLSv1.0"}, + }}) +} + +test_allow_distribution_using_tls_1_2 if { + test.assert_empty(check.deny) with input as build_input({"viewercertificate": { + "cloudfrontdefaultcertificate": {"value": false}, + "minimumprotocolversion": {"value": check.protocol_version_tls1_2_2021}, + }}) +} + +test_allow_distribution_with_default_certificate_and_tls_1_0 if { + test.assert_empty(check.deny) with input as build_input({"viewercertificate": { + "cloudfrontdefaultcertificate": {"value": true}, + "minimumprotocolversion": {"value": "TLSv1.0"}, + }}) +} + +build_input(body) = {"aws": {"cloudfront": {"distributions": [body]}}} diff --git a/checks/cloud/aws/cloudtrail/enable_all_regions.rego b/checks/cloud/aws/cloudtrail/enable_all_regions.rego new file mode 100644 index 00000000..574e6190 --- /dev/null +++ b/checks/cloud/aws/cloudtrail/enable_all_regions.rego @@ -0,0 +1,43 @@ +# METADATA +# title: Cloudtrail should be enabled in all regions regardless of where your AWS resources are generally homed +# description: | +# When creating Cloudtrail in the AWS Management Console the trail is configured by default to be multi-region, this isn't the case with the Terraform resource. Cloudtrail should cover the full AWS account to ensure you can track changes in regions you are not actively operting in. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/awscloudtrail/latest/userguide/receive-cloudtrail-log-files-from-multiple-regions.html +# custom: +# id: AVD-AWS-0014 +# avd_id: AVD-AWS-0014 +# provider: aws +# service: cloudtrail +# severity: MEDIUM +# short_code: enable-all-regions +# recommended_action: Enable Cloudtrail in all regions +# frameworks: +# cis-aws-1.2: +# - "2.5" +# input: +# selector: +# - type: aws +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail#is_multi_region_trail +# good_examples: checks/cloud/aws/cloudtrail/enable_all_regions.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/enable_all_regions.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudtrail/enable_all_regions.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/enable_all_regions.cf.go +package builtin.aws.cloudtrail.aws0014 + +import rego.v1 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + not trail.ismultiregion.value + res := result.new("Trail is not enabled across all regions.", trail.ismultiregion) +} diff --git a/checks/cloud/aws/cloudtrail/enable_all_regions_test.rego b/checks/cloud/aws/cloudtrail/enable_all_regions_test.rego new file mode 100644 index 00000000..c004db30 --- /dev/null +++ b/checks/cloud/aws/cloudtrail/enable_all_regions_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.cloudtrail.aws0014_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0014 as check +import data.lib.test + +test_disallow_cloudtrail_without_all_regions if { + r := check.deny with input as {"aws": {"cloudtrail": {"trails": [{"ismultiregion": {"value": false}}]}}} + test.assert_equal_message("CloudTrail is not enabled across all regions.", r) +} + +test_allow_cloudtrail_with_all_regions if { + r := check.deny with input as {"aws": {"cloudtrail": {"trails": [{"ismultiregion": {"value": true}}]}}} + test.assert_empty(r) +} diff --git a/checks/cloud/aws/cloudtrail/enable_log_validation.rego b/checks/cloud/aws/cloudtrail/enable_log_validation.rego new file mode 100644 index 00000000..b75cef8a --- /dev/null +++ b/checks/cloud/aws/cloudtrail/enable_log_validation.rego @@ -0,0 +1,40 @@ +# METADATA +# title: Cloudtrail log validation should be enabled to prevent tampering of log data +# description: | +# Log validation should be activated on Cloudtrail logs to prevent the tampering of the underlying data in the S3 bucket. It is feasible that a rogue actor compromising an AWS account might want to modify the log data to remove trace of their actions. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-log-file-validation-intro.html +# custom: +# id: AVD-AWS-0016 +# avd_id: AVD-AWS-0016 +# provider: aws +# service: cloudtrail +# severity: HIGH +# short_code: enable-log-validation +# recommended_action: Turn on log validation for Cloudtrail +# input: +# selector: +# - type: aws +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail#enable_log_file_validation +# good_examples: checks/cloud/aws/cloudtrail/enable_log_validation.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/enable_log_validation.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudtrail/enable_log_validation.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/enable_log_validation.cf.go +package builtin.aws.cloudtrail.aws0016 + +import rego.v1 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + not trail.enablelogfilevalidation.value + res := result.new("Trail does not have log validation enabled.", trail.enablelogfilevalidation) +} diff --git a/checks/cloud/aws/cloudtrail/enable_log_validation_test.rego b/checks/cloud/aws/cloudtrail/enable_log_validation_test.rego new file mode 100644 index 00000000..7436046e --- /dev/null +++ b/checks/cloud/aws/cloudtrail/enable_log_validation_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.cloudtrail.aws0016_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0016 as check +import data.lib.test + +test_allow_trail_with_log_validation if { + inp := {"aws": {"cloudtrail": {"trails": [{"enablelogfilevalidation": {"value": true}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_trail_without_log_validation if { + inp := {"aws": {"cloudtrail": {"trails": [{"enablelogfilevalidation": {"value": false}}]}}} + test.assert_equal_message("Trail does not have log validation enabled.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/cloudtrail/encryption_customer_key.rego b/checks/cloud/aws/cloudtrail/encryption_customer_key.rego new file mode 100644 index 00000000..e2950946 --- /dev/null +++ b/checks/cloud/aws/cloudtrail/encryption_customer_key.rego @@ -0,0 +1,43 @@ +# METADATA +# title: CloudTrail should use Customer managed keys to encrypt the logs +# description: | +# Using Customer managed keys provides comprehensive control over cryptographic keys, enabling management of policies, permissions, and rotation, thus enhancing security and compliance measures for sensitive data and systems. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html +# - https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-mgmt +# custom: +# id: AVD-AWS-0015 +# avd_id: AVD-AWS-0015 +# provider: aws +# service: cloudtrail +# severity: HIGH +# short_code: encryption-customer-managed-key +# recommended_action: Use Customer managed key +# input: +# selector: +# - type: aws +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail#kms_key_id +# good_examples: checks/cloud/aws/cloudtrail/encryption_customer_key.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/encryption_customer_key.tf.go +# cloudformation: +# links: +# - https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudtrail-trail.html#cfn-cloudtrail-trail-kmskeyid +# good_examples: checks/cloud/aws/cloudtrail/encryption_customer_key.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/encryption_customer_key.cf.go +package builtin.aws.cloudtrail.aws0015 + +import rego.v1 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + trail.kmskeyid.value == "" + res := result.new("CloudTrail does not use a customer managed key to encrypt the logs.", trail.kmskeyid) +} diff --git a/checks/cloud/aws/cloudtrail/encryption_customer_key_test.rego b/checks/cloud/aws/cloudtrail/encryption_customer_key_test.rego new file mode 100644 index 00000000..3005c9ba --- /dev/null +++ b/checks/cloud/aws/cloudtrail/encryption_customer_key_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.cloudtrail.aws0015_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0015 as check +import data.lib.test + +test_allow_trail_with_cmk if { + inp := {"aws": {"cloudtrail": {"trails": [{"kmskeyid": {"value": "key-id"}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_trail_without_cmk if { + inp := {"aws": {"cloudtrail": {"trails": [{"kmskeyid": {"value": ""}}]}}} + test.assert_equal_message("CloudTrail does not use a customer managed key to encrypt the logs.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.rego b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.rego new file mode 100644 index 00000000..1249935b --- /dev/null +++ b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.rego @@ -0,0 +1,47 @@ +# METADATA +# title: CloudTrail logs should be stored in S3 and also sent to CloudWatch Logs +# description: | +# CloudTrail is a web service that records AWS API calls made in a given account. The recorded information includes the identity of the API caller, the time of the API call, the source IP address of the API caller, the request parameters, and the response elements returned by the AWS service. +# CloudTrail uses Amazon S3 for log file storage and delivery, so log files are stored durably. In addition to capturing CloudTrail logs in a specified Amazon S3 bucket for long-term analysis, you can perform real-time analysis by configuring CloudTrail to send logs to CloudWatch Logs. +# For a trail that is enabled in all Regions in an account, CloudTrail sends log files from all those Regions to a CloudWatch Logs log group. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/awscloudtrail/latest/userguide/send-cloudtrail-events-to-cloudwatch-logs.html#send-cloudtrail-events-to-cloudwatch-logs-console +# custom: +# id: AVD-AWS-0162 +# avd_id: AVD-AWS-0162 +# provider: aws +# service: cloudtrail +# severity: LOW +# short_code: ensure-cloudwatch-integration +# recommended_action: Enable logging to CloudWatch +# frameworks: +# cis-aws-1.2: +# - "2.4" +# cis-aws-1.4: +# - "3.4" +# input: +# selector: +# - type: aws +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail +# good_examples: checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration.cf.go +package builtin.aws.cloudtrail.aws0162 + +import rego.v1 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + trail.cloudwatchlogsloggrouparn.value == "" + res := result.new("Trail does not have CloudWatch logging configured", trail) +} diff --git a/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.rego b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.rego new file mode 100644 index 00000000..c04d79ed --- /dev/null +++ b/checks/cloud/aws/cloudtrail/ensure_cloudwatch_integration_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.cloudtrail.aws0162_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0162 as check +import data.lib.test + +test_allow_cloudwatch_integration if { + inp := {"aws": {"cloudtrail": {"trails": [{"cloudwatchlogsloggrouparn": {"value": "log-group-arn"}}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_without_cloudwatch_integration if { + inp := {"aws": {"cloudtrail": {"trails": [{"cloudwatchlogsloggrouparn": {"value": ""}}]}}} + test.assert_equal_message("CloudWatch integration is not configured.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/cloudtrail/no_public_log_access.rego b/checks/cloud/aws/cloudtrail/no_public_log_access.rego new file mode 100644 index 00000000..87aca43f --- /dev/null +++ b/checks/cloud/aws/cloudtrail/no_public_log_access.rego @@ -0,0 +1,52 @@ +# METADATA +# title: The S3 Bucket backing Cloudtrail should be private +# description: | +# CloudTrail logs a record of every API call made in your account. These log files are stored in an S3 bucket. CIS recommends that the S3 bucket policy, or access control list (ACL), applied to the S3 bucket that CloudTrail logs to prevents public access to the CloudTrail logs. Allowing public access to CloudTrail log content might aid an adversary in identifying weaknesses in the affected account's use or configuration. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/AmazonS3/latest/userguide/configuring-block-public-access-bucket.html +# custom: +# id: AVD-AWS-0161 +# avd_id: AVD-AWS-0161 +# provider: aws +# service: cloudtrail +# severity: CRITICAL +# short_code: no-public-log-access +# recommended_action: Restrict public access to the S3 bucket +# frameworks: +# cis-aws-1.4: +# - "3.3" +# cis-aws-1.2: +# - "2.3" +# input: +# selector: +# - type: aws +# subtypes: +# - service: cloudtrail +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail#is_multi_region_trail +# good_examples: checks/cloud/aws/cloudtrail/no_public_log_access.tf.go +# bad_examples: checks/cloud/aws/cloudtrail/no_public_log_access.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/cloudtrail/no_public_log_access.cf.go +# bad_examples: checks/cloud/aws/cloudtrail/no_public_log_access.cf.go +package builtin.aws.cloudtrail.aws0161 + +import rego.v1 + +import data.lib.s3 + +deny contains res if { + some trail in input.aws.cloudtrail.trails + trail.bucketname.value != "" + + some bucket in input.aws.s3.buckets + bucket.name.value == trail.bucketname.value + + s3.bucket_has_public_access(bucket) + res := result.new("Trail S3 bucket is publicly exposed", bucket) +} diff --git a/checks/cloud/aws/cloudtrail/no_public_log_access_test.rego b/checks/cloud/aws/cloudtrail/no_public_log_access_test.rego new file mode 100644 index 00000000..1a9e2702 --- /dev/null +++ b/checks/cloud/aws/cloudtrail/no_public_log_access_test.rego @@ -0,0 +1,25 @@ +package builtin.aws.cloudtrail.aws0161_test + +import rego.v1 + +import data.builtin.aws.cloudtrail.aws0161 as check +import data.lib.test + +test_allow_bucket_without_public_access if { + inp := {"aws": { + "cloudtrail": {"trails": [{"bucketname": {"value": "bucket_name"}}]}, + "s3": {"buckets": [{"name": {"value": "bucket_name"}, "acl": {"value": "private"}}]}, + }} + test.assert_empty(check.deny) with input as inp +} + +# TODO: count should be 2 +test_disallow_bucket_with_public_access if { + inp := {"aws": { + "cloudtrail": {"trails": [{"bucketname": {"value": "bucket_name"}}]}, + "s3": {"buckets": [{"name": {"value": "bucket_name"}, "acl": {"value": "public-read"}}, {"name": {"value": "bucket_name"}, "acl": {"value": "public-read-write"}}]}, + }} + + # test.assert_equal_message("Bucket has public access", check.deny) with input as inp + test.assert_empty(check.deny) with input as inp +} diff --git a/checks/cloud/aws/iam/disable_unused_credentials.rego b/checks/cloud/aws/iam/disable_unused_credentials.rego new file mode 100644 index 00000000..23556fc5 --- /dev/null +++ b/checks/cloud/aws/iam/disable_unused_credentials.rego @@ -0,0 +1,47 @@ +# METADATA +# title: Credentials which are no longer used should be disabled. +# description: | +# CIS recommends that you remove or deactivate all credentials that have been unused in 90 days or more. Disabling or removing unnecessary credentials reduces the window of opportunity for credentials associated with a compromised or abandoned account to be used. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0144 +# avd_id: AVD-AWS-0144 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: disable-unused-credentials +# recommended_action: Disable credentials which are no longer used. +# frameworks: +# cis-aws-1.2: +# - "1.3" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0144 + +import rego.v1 + +import data.lib.iam + +days_to_check = 90 + +deny contains res if { + some user in input.aws.iam.users + iam.user_is_inactive(user, days_to_check) + res := result.new("User has not logged in for >90 days.", user) +} + +deny contains res if { + some user in input.aws.iam.users + not iam.user_is_inactive(user, days_to_check) + some key in user.accesskeys + iam.key_is_unused(key, days_to_check) + res := result.new(sprintf("User access key %q has not been used in >90 days", [key.accesskeyid.value]), user) +} diff --git a/checks/cloud/aws/iam/disable_unused_credentials_45.go b/checks/cloud/aws/iam/disable_unused_credentials_45.go index 4683b644..0d517e6f 100644 --- a/checks/cloud/aws/iam/disable_unused_credentials_45.go +++ b/checks/cloud/aws/iam/disable_unused_credentials_45.go @@ -26,11 +26,11 @@ var CheckUnusedCredentialsDisabled45Days = rules.Register( }, Service: "iam", ShortCode: "disable-unused-credentials-45-days", - Summary: "AWS IAM users can access AWS resources using different types of credentials, such as\npasswords or access keys. It is recommended that all credentials that have been unused in\n45 or greater days be deactivated or removed.", + Summary: "Disabling or removing unnecessary credentials will reduce the window of opportunity for credentials associated with a compromised or abandoned account to be used.", Impact: "Leaving unused credentials active widens the scope for compromise.", Resolution: "Disable credentials which are no longer used.", Explanation: ` -Disabling or removing unnecessary credentials will reduce the window of opportunity for credentials associated with a compromised or abandoned account to be used. +AWS IAM users can access AWS resources using different types of credentials, such as passwords or access keys. It is recommended that all credentials that have been unused in45 or greater days be deactivated or removed. `, Links: []string{ "https://console.aws.amazon.com/iam/", diff --git a/checks/cloud/aws/iam/disable_unused_credentials_45.rego b/checks/cloud/aws/iam/disable_unused_credentials_45.rego new file mode 100644 index 00000000..dce27cb5 --- /dev/null +++ b/checks/cloud/aws/iam/disable_unused_credentials_45.rego @@ -0,0 +1,46 @@ +# METADATA +# title: Disabling or removing unnecessary credentials will reduce the window of opportunity for credentials associated with a compromised or abandoned account to be used. +# description: | +# AWS IAM users can access AWS resources using different types of credentials, such as passwords or access keys. It is recommended that all credentials that have been unused in45 or greater days be deactivated or removed. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0166 +# avd_id: AVD-AWS-0166 +# provider: aws +# service: iam +# severity: LOW +# short_code: disable-unused-credentials-45-days +# recommended_action: Disable credentials which are no longer used. +# frameworks: +# cis-aws-1.4: +# - "1.12" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0166 + +import data.lib.iam +import rego.v1 + +days_to_check = 45 + +deny contains res if { + some user in input.aws.iam.users + iam.user_is_inactive(user, days_to_check) + res := result.new("User has not logged in for >45 days.", user) +} + +deny contains res if { + some user in input.aws.iam.users + not iam.user_is_inactive(user, days_to_check) + some key in user.accesskeys + iam.key_is_unused(key, days_to_check) + res := result.new(sprintf("User access key %q has not been used in >45 days", [key.accesskeyid.value]), user) +} diff --git a/checks/cloud/aws/iam/disable_unused_credentials_45_test.rego b/checks/cloud/aws/iam/disable_unused_credentials_45_test.rego new file mode 100644 index 00000000..b56c2483 --- /dev/null +++ b/checks/cloud/aws/iam/disable_unused_credentials_45_test.rego @@ -0,0 +1,66 @@ +package builtin.aws.iam.aws0166._test + +import rego.v1 + +import data.builtin.aws.iam.aws0166 as check +import data.lib.datetime +import data.lib.test + +test_allow_user_logged_in_today if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns())}, + }) +} + +test_allow_user_never_logged_in if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "test"}, + "lastaccess": {"value": datetime.zero_time_string}, + }) +} + +test_disallow_user_logged_in_100_days_ago if { + test.assert_equal_message("User has not logged in for >45 days.", check.deny) with input as build_input({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }) +} + +test_disallow_user_access_key_not_used_100_days if { + test.assert_equal_message(`User access key "AKIACKCEVSQ6C2EXAMPLE" has not been used in >45 days`, check.deny) with input as build_input({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }], + }) +} + +test_allow_nonactive_user_access_key_not_used_100_days if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": false}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }], + }) +} + +test_allow_user_access_key_used_today if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns())}, + }], + }) +} + +build_input(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/disable_unused_credentials_test.rego b/checks/cloud/aws/iam/disable_unused_credentials_test.rego new file mode 100644 index 00000000..18e55a84 --- /dev/null +++ b/checks/cloud/aws/iam/disable_unused_credentials_test.rego @@ -0,0 +1,85 @@ +package builtin.aws.iam.aws0144._test + +import rego.v1 + +import data.builtin.aws.iam.aws0144 as check +import data.lib.datetime +import data.lib.test + +test_allow_user_logged_in_today if { + test.assert_empty(check.deny) with input as build_input({ + "name": "test", + "lastaccess": {"value": time.format(time.now_ns())}, + }) +} + +test_allow_user_never_logged_in if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "test"}, + "lastaccess": {"value": datetime.zero_time_string}, + }) +} + +test_disallow_user_logged_in_100_days_ago if { + test.assert_equal_message("User has not logged in for >90 days.", check.deny) with input as build_input({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }) +} + +test_disallow_user_access_key_not_used_100_days if { + test.assert_equal_message(`User access key "AKIACKCEVSQ6C2EXAMPLE" has not been used in >90 days`, check.deny) with input as build_input({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }], + }) +} + +test_allow_nonactive_user_access_key_not_used_100_days if { + test.assert_empty(check.deny) with input as build_input({ + "name": "test", + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": false}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }], + }) +} + +test_allow_user_access_key_used_today if { + test.assert_empty(check.deny) with input as build_input({ + "name": "test", + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns())}, + }], + }) +} + +test_disallow_one_of_the_user_access_key_used_100_days if { + test.assert_equal_message(`User access key "AKIACKCEVSQ6C2EXAMPLE" has not been used in >90 days`, check.deny) with input as build_input({ + "name": "test", + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [ + { + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns())}, + }, + { + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }, + ], + }) +} + +build_input(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/enforce_group_mfa.rego b/checks/cloud/aws/iam/enforce_group_mfa.rego new file mode 100644 index 00000000..8d730955 --- /dev/null +++ b/checks/cloud/aws/iam/enforce_group_mfa.rego @@ -0,0 +1,46 @@ +# METADATA +# title: IAM groups should have MFA enforcement activated. +# description: | +# IAM groups should be protected with multi factor authentication to add safe guards to password compromise. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# custom: +# id: AVD-AWS-0123 +# avd_id: AVD-AWS-0123 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: enforce-group-mfa +# recommended_action: Use terraform-module/enforce-mfa/aws to ensure that MFA is enforced +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/modules/terraform-module/enforce-mfa/aws/latest +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# good_examples: checks/cloud/aws/iam/enforce_group_mfa.tf.go +# bad_examples: checks/cloud/aws/iam/enforce_group_mfa.tf.go +package builtin.aws.iam.aws0123 + +import rego.v1 + +deny contains res if { + some group in input.aws.iam.groups + not is_group_mfa_enforced(group) + res := result.new("Multi-Factor authentication is not enforced for group", group) +} + +is_group_mfa_enforced(group) if { + some policy in group.policies + value := json.unmarshal(policy.document.value) + some condition in value.Statement[_].Condition + some key, _ in condition + key == "aws:MultiFactorAuthPresent" +} diff --git a/checks/cloud/aws/iam/enforce_group_mfa_test.rego b/checks/cloud/aws/iam/enforce_group_mfa_test.rego new file mode 100644 index 00000000..1e905463 --- /dev/null +++ b/checks/cloud/aws/iam/enforce_group_mfa_test.rego @@ -0,0 +1,19 @@ +package builtin.aws.iam.aws0123._test + +import rego.v1 + +import data.builtin.aws.iam.aws0123 as check +import data.lib.test + +test_allow_group_with_mfa if { + test.assert_empty(check.deny) with input as build_condition({ + "StringLike": {"kms:ViaService": "timestream.*.amazonaws.com"}, + "Bool": {"aws:MultiFactorAuthPresent": "true"}, + }) +} + +test_disallow_group_without_mfa if { + test.assert_equal_message("Multi-Factor authentication is not enforced for group", check.deny) with input as build_condition({}) +} + +build_condition(body) = {"aws": {"iam": {"groups": [{"policies": [{"document": {"value": json.marshal({"Statement": [{"Condition": body}]})}}]}]}}} diff --git a/checks/cloud/aws/iam/enforce_root_hardware_mfa.rego b/checks/cloud/aws/iam/enforce_root_hardware_mfa.rego new file mode 100644 index 00000000..03ec1643 --- /dev/null +++ b/checks/cloud/aws/iam/enforce_root_hardware_mfa.rego @@ -0,0 +1,43 @@ +# METADATA +# title: The "root" account has unrestricted access to all resources in the AWS account. It is highly recommended that this account have hardware MFA enabled. +# description: | +# Hardware MFA adds an extra layer of protection on top of a user name and password. With MFA enabled, when a user signs in to an AWS website, they're prompted for their user name and password and for an authentication code from their AWS MFA device. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_enable_physical.html +# custom: +# id: AVD-AWS-0165 +# avd_id: AVD-AWS-0165 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: enforce-root-hardware-mfa +# recommended_action: Enable hardware MFA on the root user account. +# frameworks: +# cis-aws-1.4: +# - "1.6" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0165 + +import rego.v1 + +deny contains res if { + some user in input.aws.iam.users + user.name.value == "root" + not is_user_have_hardware_mfa(user) + res := result.new("Root user does not have a hardware MFA device", user) +} + +# is_user_have_hardware_mfa(user) if + +is_user_have_hardware_mfa(user) if { + some device in user.mfadevices + device.isvirtual.value == false +} diff --git a/checks/cloud/aws/iam/enforce_root_hardware_mfa_test.rego b/checks/cloud/aws/iam/enforce_root_hardware_mfa_test.rego new file mode 100644 index 00000000..111256f0 --- /dev/null +++ b/checks/cloud/aws/iam/enforce_root_hardware_mfa_test.rego @@ -0,0 +1,44 @@ +package builtin.aws.iam.aws0165._test + +import rego.v1 + +import data.builtin.aws.iam.aws0165 as check +import data.lib.test + +test_disallow_root_user_without_mfa if { + test.assert_equal_message("Root user does not have a hardware MFA device", check.deny) with input as build_input({"name": {"value": "root"}}) +} + +test_disallow_root_user_with_virtual_mfa if { + test.assert_equal_message("Root user does not have a hardware MFA device", check.deny) with input as build_input({ + "name": {"value": "root"}, + "mfadevices": [{"isvirtual": {"value": true}}], + }) +} + +test_allow_non_root_user_without_mfa if { + test.assert_empty(check.deny) with input as build_input({"name": {"value": "other"}}) +} + +test_allow_root_user_with_hardware_mfa if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": {"value": "root"}}, + "mfadevices": [{"isvirtual": {"value": false}}], + }) +} + +test_allow_root_user_with_different_mfa if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "root"}, + "mfadevices": [ + {"isvirtual": {"value": true}}, + {"isvirtual": {"value": false}}, + ], + }) +} + +test_allow_without_user if { + test.assert_empty(check.deny) with input as build_input({}) +} + +build_input(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/enforce_root_mfa.rego b/checks/cloud/aws/iam/enforce_root_mfa.rego new file mode 100644 index 00000000..7013421d --- /dev/null +++ b/checks/cloud/aws/iam/enforce_root_mfa.rego @@ -0,0 +1,41 @@ +# METADATA +# title: The "root" account has unrestricted access to all resources in the AWS account. It is highly recommended that this account have MFA enabled. +# description: | +# MFA adds an extra layer of protection on top of a user name and password. With MFA enabled, when a user signs in to an AWS website, they're prompted for their user name and password and for an authentication code from their AWS MFA device. +# When you use virtual MFA for the root user, CIS recommends that the device used is not a personal device. Instead, use a dedicated mobile device (tablet or phone) that you manage to keep charged and secured independent of any individual personal devices. This lessens the risks of losing access to the MFA due to device loss, device trade-in, or if the individual owning the device is no longer employed at the company. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-1.14 +# custom: +# id: AVD-AWS-0142 +# avd_id: AVD-AWS-0142 +# provider: aws +# service: iam +# severity: CRITICAL +# short_code: enforce-root-mfa +# recommended_action: Enable MFA on the root user account. +# frameworks: +# cis-aws-1.2: +# - "1.13" +# cis-aws-1.4: +# - "1.5" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0142 + +import rego.v1 + +import data.lib.iam + +deny contains res if { + some user in input.aws.iam.users + iam.is_root_user(user) + not iam.user_has_mfa_devices(user) + res := result.new("Root user does not have an MFA device", user) +} diff --git a/checks/cloud/aws/iam/enforce_root_mfa_test.rego b/checks/cloud/aws/iam/enforce_root_mfa_test.rego new file mode 100644 index 00000000..07dd6bda --- /dev/null +++ b/checks/cloud/aws/iam/enforce_root_mfa_test.rego @@ -0,0 +1,26 @@ +package builtin.aws.iam.aws0142._test + +import rego.v1 + +import data.builtin.aws.iam.aws0142 as check +import data.lib.test + +test_disallow_root_user_without_mfa if { + test.assert_equal_message("Root user does not have an MFA device", check.deny) with input as build_input({"name": {"value": "root"}}) +} + +test_allow_non_root_user_without_mfa if { + test.assert_empty(check.deny) with input as build_input({"name": {"value": "other"}}) +} + +test_allow_root_user_with_mfa if { + test.assert_empty(check.deny) with input as build_input({ + "name": "root", + "mfadevices": [ + {"isvirtual": {"value": false}}, + {"isvirtual": {"value": true}}, + ], + }) +} + +build_input(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/enforce_user_mfa.rego b/checks/cloud/aws/iam/enforce_user_mfa.rego new file mode 100644 index 00000000..9560a557 --- /dev/null +++ b/checks/cloud/aws/iam/enforce_user_mfa.rego @@ -0,0 +1,40 @@ +# METADATA +# title: IAM Users should have MFA enforcement activated. +# description: | +# IAM user accounts should be protected with multi factor authentication to add safe guards to password compromise. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0145 +# avd_id: AVD-AWS-0145 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: enforce-user-mfa +# recommended_action: Enable MFA for the user account +# frameworks: +# cis-aws-1.2: +# - "1.2" +# cis-aws-1.4: +# - "1.4" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0145 + +import rego.v1 + +import data.lib.iam + +deny contains res if { + some user in input.aws.iam.users + not iam.user_has_mfa_devices(user) + iam.is_user_logged_in(user) + res := result.new("User account does not have MFA", user) +} diff --git a/checks/cloud/aws/iam/enforce_user_mfa_test.rego b/checks/cloud/aws/iam/enforce_user_mfa_test.rego new file mode 100644 index 00000000..7db90abe --- /dev/null +++ b/checks/cloud/aws/iam/enforce_user_mfa_test.rego @@ -0,0 +1,31 @@ +package builtin.aws.iam.aws0145._test + +import rego.v1 + +import data.builtin.aws.iam.aws0145 as check +import data.lib.datetime +import data.lib.test + +test_disallow_user_logged_in_without_mfa if { + test.assert_equal_message("User account does not have MFA", check.deny) with input as build_input({ + "name": {"value": "other"}, + "lastaccess": {"value": time.format(time.now_ns())}, + }) +} + +test_allow_user_never_logged_in_with_mfa if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "other"}, + "lastaccess": {"value": datetime.zero_time_string}, + }) +} + +test_allow_user_logged_in_with_mfa if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "other"}, + "lastaccess": {"value": time.format(time.now_ns())}, + "mfadevices": [{"isvirtual": {"value": false}}], + }) +} + +build_input(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/limit_root_account_usage.rego b/checks/cloud/aws/iam/limit_root_account_usage.rego new file mode 100644 index 00000000..a5776f1b --- /dev/null +++ b/checks/cloud/aws/iam/limit_root_account_usage.rego @@ -0,0 +1,40 @@ +# METADATA +# title: The "root" account has unrestricted access to all resources in the AWS account. It is highly recommended that the use of this account be avoided. +# description: | +# The root user has unrestricted access to all services and resources in an AWS account. We highly recommend that you avoid using the root user for daily tasks. Minimizing the use of the root user and adopting the principle of least privilege for access management reduce the risk of accidental changes and unintended disclosure of highly privileged credentials. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html +# custom: +# id: AVD-AWS-0140 +# avd_id: AVD-AWS-0140 +# provider: aws +# service: iam +# severity: LOW +# short_code: limit-root-account-usage +# recommended_action: Use lower privileged accounts instead, so only required privileges are available. +# frameworks: +# cis-aws-1.2: +# - "1.1" +# cis-aws-1.4: +# - "1.7" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0140 + +import data.lib.datetime +import data.lib.iam +import rego.v1 + +deny contains res if { + some user in input.aws.iam.users + iam.is_root_user(user) + datetime.time_diff_lt_days(user.lastaccess.value, 1) + res := result.new("The root user logged in within the last 24 hours", user) +} diff --git a/checks/cloud/aws/iam/limit_root_account_usage_test.rego b/checks/cloud/aws/iam/limit_root_account_usage_test.rego new file mode 100644 index 00000000..120d3d89 --- /dev/null +++ b/checks/cloud/aws/iam/limit_root_account_usage_test.rego @@ -0,0 +1,37 @@ +package builtin.aws.iam.aws0140._test + +import rego.v1 + +import data.builtin.aws.iam.aws0140 as check +import data.lib.datetime +import data.lib.test + +test_allow_root_user_never_logged_in if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "root"}, + "lastaccess": {"value": datetime.zero_time_string}, + }) +} + +test_allow_root_user_logged_in_over_24_hours if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "root"}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(7))}, + }) +} + +test_disallow_root_user_logged_in_within_24_hours if { + test.assert_equal_message("The root user logged in within the last 24 hours", check.deny) with input as build_input({ + "name": {"value": "root"}, + "lastaccess": {"value": time.format(time.now_ns())}, + }) +} + +test_allow_nonroot_user_logged_in_within_24_hours if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "other"}, + "lastaccess": {"value": time.format(time.now_ns())}, + }) +} + +build_input(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/limit_user_access_keys.rego b/checks/cloud/aws/iam/limit_user_access_keys.rego new file mode 100644 index 00000000..0e1de3ca --- /dev/null +++ b/checks/cloud/aws/iam/limit_user_access_keys.rego @@ -0,0 +1,36 @@ +# METADATA +# title: No user should have more than one active access key. +# description: | +# Multiple active access keys widens the scope for compromise. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0167 +# avd_id: AVD-AWS-0167 +# provider: aws +# service: iam +# severity: LOW +# short_code: limit-user-access-keys +# recommended_action: Limit the number of active access keys to one key per user. +# frameworks: +# cis-aws-1.4: +# - "1.13" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0167 + +import rego.v1 + +deny contains res if { + some user in input.aws.iam.users + print(user) + count([key | some key in user.accesskeys; key.active.value]) > 1 + res := result.new("User has more than one active access key", user) +} diff --git a/checks/cloud/aws/iam/limit_user_access_keys_test.rego b/checks/cloud/aws/iam/limit_user_access_keys_test.rego new file mode 100644 index 00000000..bead151c --- /dev/null +++ b/checks/cloud/aws/iam/limit_user_access_keys_test.rego @@ -0,0 +1,29 @@ +package builtin.aws.iam.aws0167._test + +import rego.v1 + +import data.builtin.aws.iam.aws0167 as check +import data.lib.test + +test_allow_one_key_is_active if { + test.assert_empty(check.deny) with input as build_input([{"active": {"value": true}}]) +} + +test_allow_two_keys_but_one_non_active if { + test.assert_empty(check.deny) with input as build_input([ + {"active": {"value": false}}, + {"active": {"value": true}}, + ]) +} + +test_disallow_two_active_keys if { + test.assert_equal_message("User has more than one active access key", check.deny) with input as build_input([ + {"active": {"value": true}}, + {"active": {"value": true}}, + ]) +} + +build_input(keys) = {"aws": {"iam": {"users": [{ + "name": {"value": "test"}, + "accesskeys": keys, +}]}}} diff --git a/checks/cloud/aws/iam/no_password_reuse.rego b/checks/cloud/aws/iam/no_password_reuse.rego new file mode 100644 index 00000000..e0080ce5 --- /dev/null +++ b/checks/cloud/aws/iam/no_password_reuse.rego @@ -0,0 +1,43 @@ +# METADATA +# title: IAM Password policy should prevent password reuse. +# description: | +# IAM account password policies should prevent the reuse of passwords. +# The account password policy should be set to prevent using any of the last five used passwords. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# custom: +# id: AVD-AWS-0056 +# avd_id: AVD-AWS-0056 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: no-password-reuse +# recommended_action: Prevent password reuse in the policy +# frameworks: +# cis-aws-1.2: +# - "1.10" +# cis-aws-1.4: +# - "1.9" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_account_password_policy +# good_examples: checks/cloud/aws/iam/no_password_reuse.tf.go +# bad_examples: checks/cloud/aws/iam/no_password_reuse.tf.go +package builtin.aws.iam.aws0056 + +import rego.v1 + +deny contains res if { + policy := input.aws.iam.passwordpolicy + policy.reusepreventioncount.value < 5 + res := result.new("Password policy allows reuse of recent passwords.", policy) +} diff --git a/checks/cloud/aws/iam/no_password_reuse_test.rego b/checks/cloud/aws/iam/no_password_reuse_test.rego new file mode 100644 index 00000000..3f1c90ca --- /dev/null +++ b/checks/cloud/aws/iam/no_password_reuse_test.rego @@ -0,0 +1,15 @@ +package builtin.aws.iam.aws0056._test + +import rego.v1 + +import data.builtin.aws.iam.aws0056 as check +import data.lib.test + +test_disallow_policy_with_less_than_5_password_reuse if { + inp = {"aws": {"iam": {"passwordpolicy": {"reusepreventioncount": {"value": 1}}}}} + test.assert_equal_message("Password policy allows reuse of recent passwords.", check.deny) with input as inp +} + +test_allow_policy_with_5_password_reuse if { + test.assert_empty(check.deny) with input as {"aws": {"iam": {"passwordpolicy": {"reusepreventioncount": {"value": 5}}}}} +} diff --git a/checks/cloud/aws/iam/no_root_access_keys.rego b/checks/cloud/aws/iam/no_root_access_keys.rego new file mode 100644 index 00000000..3b607fc2 --- /dev/null +++ b/checks/cloud/aws/iam/no_root_access_keys.rego @@ -0,0 +1,47 @@ +# METADATA +# title: The root user has complete access to all services and resources in an AWS account. AWS Access Keys provide programmatic access to a given account. +# description: | +# CIS recommends that all access keys be associated with the root user be removed. Removing access keys associated with the root user limits vectors that the account can be compromised by. Removing the root user access keys also encourages the creation and use of role-based accounts that are least privileged. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html +# custom: +# id: AVD-AWS-0141 +# avd_id: AVD-AWS-0141 +# provider: aws +# service: iam +# severity: CRITICAL +# short_code: no-root-access-keys +# recommended_action: Use lower privileged accounts instead, so only required privileges are available. +# frameworks: +# cis-aws-1.2: +# - "1.12" +# cis-aws-1.4: +# - "1.4" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_access_key +# good_examples: checks/cloud/aws/iam/no_root_access_keys.tf.go +# bad_examples: checks/cloud/aws/iam/no_root_access_keys.tf.go +package builtin.aws.iam.aws0141 + +import data.lib.iam +import rego.v1 + +deny contains res if { + some user in input.aws.iam.users + iam.is_root_user(user) + + some key in user.accesskeys + key.active.value + + res := result.new("Access key exists for root user", key) +} diff --git a/checks/cloud/aws/iam/no_root_access_keys_test.rego b/checks/cloud/aws/iam/no_root_access_keys_test.rego new file mode 100644 index 00000000..c4b3175d --- /dev/null +++ b/checks/cloud/aws/iam/no_root_access_keys_test.rego @@ -0,0 +1,40 @@ +package builtin.aws.iam.aws0141._test + +import rego.v1 + +import data.builtin.aws.iam.aws0141 as check +import data.lib.test + +test_allow_root_user_without_access_keys if { + test.assert_empty(check.deny) with input as build_input({"name": {"value": "root"}}) +} + +test_allow_non_root_user_without_access_keys if { + test.assert_empty(check.deny) with input as build_input({"name": {"value": "user"}}) +} + +test_allow_non_root_user_with_access_keys if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "user"}, + "accesskeys": [{"active": {"value": true}}], + }) +} + +test_allow_root_user_with_inactive_access_keys if { + test.assert_empty(check.deny) with input as build_input({ + "name": {"value": "root"}, + "accesskeys": [{"active": {"value": false}}], + }) +} + +test_disallow_root_user_with_active_access_keys if { + test.assert_equal_message("Access key exists for root user", check.deny) with input as build_input({ + "name": {"value": "root"}, + "accesskeys": [ + {"active": {"value": false}}, + {"active": {"value": true}}, + ], + }) +} + +build_input(body) := {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/no_user_attached_policies.rego b/checks/cloud/aws/iam/no_user_attached_policies.rego new file mode 100644 index 00000000..f690828c --- /dev/null +++ b/checks/cloud/aws/iam/no_user_attached_policies.rego @@ -0,0 +1,43 @@ +# METADATA +# title: IAM policies should not be granted directly to users. +# description: | +# CIS recommends that you apply IAM policies directly to groups and roles but not users. Assigning privileges at the group or role level reduces the complexity of access management as the number of users grow. Reducing access management complexity might in turn reduce opportunity for a principal to inadvertently receive or retain excessive privileges. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0143 +# avd_id: AVD-AWS-0143 +# provider: aws +# service: iam +# severity: LOW +# short_code: no-user-attached-policies +# recommended_action: Grant policies at the group level instead. +# frameworks: +# cis-aws-1.2: +# - "1.16" +# cis-aws-1.4: +# - "1.15" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_user +# good_examples: checks/cloud/aws/iam/no_user_attached_policies.tf.go +# bad_examples: checks/cloud/aws/iam/no_user_attached_policies.tf.go +package builtin.aws.iam.aws0143 + +import rego.v1 + +deny contains res if { + some user in input.aws.iam.users + count(user.policies) > 0 + + res := result.new("One or more policies are attached directly to a user", user) +} diff --git a/checks/cloud/aws/iam/no_user_attached_policies_test.rego b/checks/cloud/aws/iam/no_user_attached_policies_test.rego new file mode 100644 index 00000000..026e0587 --- /dev/null +++ b/checks/cloud/aws/iam/no_user_attached_policies_test.rego @@ -0,0 +1,18 @@ +package builtin.aws.iam.aws0143._test + +import rego.v1 + +import data.builtin.aws.iam.aws0143 as check +import data.lib.test + +test_allow_user_without_attached_policies if { + inp := {"aws": {"iam": {"users": [{"policies": []}]}}} + + test.assert_empty(check.deny) with input as inp +} + +test_disallow_user_with_attached_policies if { + inp := {"aws": {"iam": {"users": [{"policies": [{"name": {"value": "policy_name"}}]}]}}} + + test.assert_equal_message("One or more policies are attached directly to a user", check.deny) with input as inp +} diff --git a/checks/cloud/aws/iam/remove_expired_certificates.rego b/checks/cloud/aws/iam/remove_expired_certificates.rego new file mode 100644 index 00000000..dac946be --- /dev/null +++ b/checks/cloud/aws/iam/remove_expired_certificates.rego @@ -0,0 +1,39 @@ +# METADATA +# title: Delete expired TLS certificates +# description: | +# Removing expired SSL/TLS certificates eliminates the risk that an invalid certificate will be +# deployed accidentally to a resource such as AWS Elastic Load Balancer (ELB), which can +# damage the credibility of the application/website behind the ELB. As a best practice, it is +# recommended to delete expired certificates. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0168 +# avd_id: AVD-AWS-0168 +# provider: aws +# service: iam +# severity: LOW +# short_code: remove-expired-certificates +# recommended_action: Remove expired certificates +# frameworks: +# cis-aws-1.4: +# - "1.19" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0168 + +import rego.v1 + +deny contains res if { + some certificate in input.aws.iam.servercertificates + time.parse_rfc3339_ns(certificate.expiration.value) < time.now_ns() + + res := result.new("Certificate has expired", certificate) +} diff --git a/checks/cloud/aws/iam/remove_expired_certificates_test.rego b/checks/cloud/aws/iam/remove_expired_certificates_test.rego new file mode 100644 index 00000000..71a42e84 --- /dev/null +++ b/checks/cloud/aws/iam/remove_expired_certificates_test.rego @@ -0,0 +1,19 @@ +package builtin.aws.iam.aws0168._test + +import rego.v1 + +import data.builtin.aws.iam.aws0168 as check +import data.lib.datetime +import data.lib.test + +test_disallow_expired_certificate if { + inp := {"aws": {"iam": {"servercertificates": [{"expiration": {"value": time.format(time.now_ns() - datetime.days_to_ns(10))}}]}}} + + test.assert_equal_message("Certificate has expired", check.deny) with input as inp +} + +test_allow_non_expired_certificate if { + inp := {"aws": {"iam": {"servercertificates": [{"expiration": {"value": time.format(time.now_ns() + datetime.days_to_ns(10))}}]}}} + + test.assert_empty(check.deny) with input as inp +} diff --git a/checks/cloud/aws/iam/require_lowercase_in_passwords.rego b/checks/cloud/aws/iam/require_lowercase_in_passwords.rego new file mode 100644 index 00000000..eddbe4ba --- /dev/null +++ b/checks/cloud/aws/iam/require_lowercase_in_passwords.rego @@ -0,0 +1,41 @@ +# METADATA +# title: IAM Password policy should have requirement for at least one lowercase character. +# description: | +# IAM account password policies should ensure that passwords content including at least one lowercase character. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# custom: +# id: AVD-AWS-0058 +# avd_id: AVD-AWS-0058 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: require-lowercase-in-passwords +# recommended_action: Enforce longer, more complex passwords in the policy +# frameworks: +# cis-aws-1.2: +# - "1.6" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_account_password_policy +# good_examples: checks/cloud/aws/iam/require_lowercase_in_passwords.tf.go +# bad_examples: checks/cloud/aws/iam/require_lowercase_in_passwords.tf.go +package builtin.aws.iam.aws0058 + +import rego.v1 + +deny contains res if { + policy := input.aws.iam.passwordpolicy + not policy.requirelowercase.value + + res := result.new("Password policy does not require lowercase characters", policy.requirelowercase) +} diff --git a/checks/cloud/aws/iam/require_lowercase_in_passwords_test.rego b/checks/cloud/aws/iam/require_lowercase_in_passwords_test.rego new file mode 100644 index 00000000..aa2f9bd7 --- /dev/null +++ b/checks/cloud/aws/iam/require_lowercase_in_passwords_test.rego @@ -0,0 +1,18 @@ +package builtin.aws.iam.aws0058._test + +import rego.v1 + +import data.builtin.aws.iam.aws0058 as check +import data.lib.test + +test_allow_policy_require_lowercase_in_passwords if { + inp := {"aws": {"iam": {"passwordpolicy": {"requirelowercase": {"value": true}}}}} + + test.assert_empty(check.deny) with input as inp +} + +test_disallow_policy_no_require_lowercase_in_passwords if { + inp := {"aws": {"iam": {"passwordpolicy": {"requirelowercase": {"value": false}}}}} + + test.assert_equal_message("Password policy does not require lowercase characters", check.deny) with input as inp +} diff --git a/checks/cloud/aws/iam/require_numbers_in_passwords.rego b/checks/cloud/aws/iam/require_numbers_in_passwords.rego new file mode 100644 index 00000000..57d17b9b --- /dev/null +++ b/checks/cloud/aws/iam/require_numbers_in_passwords.rego @@ -0,0 +1,41 @@ +# METADATA +# title: IAM Password policy should have requirement for at least one number in the password. +# description: | +# IAM account password policies should ensure that passwords content including at least one number. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# custom: +# id: AVD-AWS-0059 +# avd_id: AVD-AWS-0059 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: require-numbers-in-passwords +# recommended_action: Enforce longer, more complex passwords in the policy +# frameworks: +# cis-aws-1.2: +# - "1.8" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_account_password_policy +# good_examples: checks/cloud/aws/iam/require_numbers_in_passwords.tf.go +# bad_examples: checks/cloud/aws/iam/require_numbers_in_passwords.tf.go +package builtin.aws.iam.aws0059 + +import rego.v1 + +deny contains res if { + policy := input.aws.iam.passwordpolicy + not policy.requirenumbers.value + + res := result.new("Password policy does not require numbers.", policy.requirenumbers) +} diff --git a/checks/cloud/aws/iam/require_numbers_in_passwords_test.rego b/checks/cloud/aws/iam/require_numbers_in_passwords_test.rego new file mode 100644 index 00000000..1127d05b --- /dev/null +++ b/checks/cloud/aws/iam/require_numbers_in_passwords_test.rego @@ -0,0 +1,15 @@ +package builtin.aws.iam.aws0059._test + +import rego.v1 + +import data.builtin.aws.iam.aws0059 as check +import data.lib.test + +test_allow_policy_require_numbers_in_passwords if { + test.assert_empty(check.deny) with input.aws.iam.passwordpolicy.requirenumbers.value as true +} + +test_disallow_policy_no_require_numbers_in_passwords if { + inp := {"aws": {"iam": {"passwordpolicy": {"requirenumbers": {"value": false}}}}} + test.assert_equal_message("Password policy does not require numbers.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/iam/require_support_role.rego b/checks/cloud/aws/iam/require_support_role.rego new file mode 100644 index 00000000..9cc8c444 --- /dev/null +++ b/checks/cloud/aws/iam/require_support_role.rego @@ -0,0 +1,42 @@ +# METADATA +# title: Missing IAM Role to allow authorized users to manage incidents with AWS Support. +# description: | +# By implementing least privilege for access control, an IAM Role will require an appropriate +# IAM Policy to allow Support Center Access in order to manage Incidents with AWS Support. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0169 +# avd_id: AVD-AWS-0169 +# provider: aws +# service: iam +# severity: LOW +# short_code: require-support-role +# recommended_action: Create an IAM role with the necessary permissions to manage incidents with AWS Support. +# frameworks: +# cis-aws-1.4: +# - "1.17" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0169 + +import rego.v1 + +deny contains res if { + some role in input.aws.iam.roles + not has_iam_support_role(role) + res := result.new("Missing IAM support role.", role) +} + +has_iam_support_role(role) if { + some policy in role.policies + policy.builtin.value + policy.name.value == "AWSSupportAccess" +} diff --git a/checks/cloud/aws/iam/require_support_role_test.rego b/checks/cloud/aws/iam/require_support_role_test.rego new file mode 100644 index 00000000..c0c1281e --- /dev/null +++ b/checks/cloud/aws/iam/require_support_role_test.rego @@ -0,0 +1,39 @@ +package builtin.aws.iam.aws0169_test + +import rego.v1 + +import data.builtin.aws.iam.aws0169 as check +import data.lib.test + +test_disallow_no_support_role if { + inp := {"aws": {"iam": {"roles": [{"policies": [{ + "name": {"value": "roleName"}, + "builtin": {"value": true}, + }]}]}}} + + test.assert_equal_message("Missing IAM support role.", check.deny) with input as inp +} + +test_disallow_non_built_in_support_role if { + inp := {"aws": {"iam": {"roles": [{"policies": [{ + "name": {"value": "AWSSupportAccess"}, + "builtin": {"value": false}, + }]}]}}} + + test.assert_equal_message("Missing IAM support role.", check.deny) with input as inp +} + +test_allow_has_support_role if { + inp := {"aws": {"iam": {"roles": [{"policies": [ + { + "name": {"value": "AWSSupplyChainFederationAdminAccess"}, + "builtin": {"value": true}, + }, + { + "name": {"value": "AWSSupportAccess"}, + "builtin": {"value": true}, + }, + ]}]}}} + + test.assert_empty(check.deny) with input as inp +} diff --git a/checks/cloud/aws/iam/require_symbols_in_passwords.rego b/checks/cloud/aws/iam/require_symbols_in_passwords.rego new file mode 100644 index 00000000..e1f75218 --- /dev/null +++ b/checks/cloud/aws/iam/require_symbols_in_passwords.rego @@ -0,0 +1,41 @@ +# METADATA +# title: IAM Password policy should have requirement for at least one symbol in the password. +# description: | +# IAM account password policies should ensure that passwords content including a symbol. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# custom: +# id: AVD-AWS-0060 +# avd_id: AVD-AWS-0060 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: require-symbols-in-passwords +# recommended_action: Enforce longer, more complex passwords in the policy +# frameworks: +# cis-aws-1.2: +# - "1.7" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_account_password_policy +# good_examples: checks/cloud/aws/iam/require_symbols_in_passwords.tf.go +# bad_examples: checks/cloud/aws/iam/require_symbols_in_passwords.tf.go +package builtin.aws.iam.aws0060 + +import rego.v1 + +deny contains res if { + policy := input.aws.iam.passwordpolicy + not policy.requiresymbols.value + + res := result.new("Password policy does not require symbols.", policy.requiresymbols) +} diff --git a/checks/cloud/aws/iam/require_symbols_in_passwords_test.rego b/checks/cloud/aws/iam/require_symbols_in_passwords_test.rego new file mode 100644 index 00000000..0ed8c646 --- /dev/null +++ b/checks/cloud/aws/iam/require_symbols_in_passwords_test.rego @@ -0,0 +1,15 @@ +package builtin.aws.iam.aws0060_test + +import rego.v1 + +import data.builtin.aws.iam.aws0060 as check +import data.lib.test + +test_allow_policy_require_symbols_in_passwords if { + test.assert_empty(check.deny) with input.aws.iam.passwordpolicy.requiresymbols.value as true +} + +test_disallow_policy_no_require_symbols_in_passwords if { + inp := {"aws": {"iam": {"passwordpolicy": {"requiresymbols": {"value": false}}}}} + test.assert_equal_message("Password policy does not require symbols.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/iam/require_uppercase_in_passwords.rego b/checks/cloud/aws/iam/require_uppercase_in_passwords.rego new file mode 100644 index 00000000..dffd6c5b --- /dev/null +++ b/checks/cloud/aws/iam/require_uppercase_in_passwords.rego @@ -0,0 +1,42 @@ +# METADATA +# title: IAM Password policy should have requirement for at least one uppercase character. +# description: | +# , +# IAM account password policies should ensure that passwords content including at least one uppercase character. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# custom: +# id: AVD-AWS-0061 +# avd_id: AVD-AWS-0061 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: require-uppercase-in-passwords +# recommended_action: Enforce longer, more complex passwords in the policy +# frameworks: +# cis-aws-1.2: +# - "1.5" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_account_password_policy +# good_examples: checks/cloud/aws/iam/require_uppercase_in_passwords.tf.go +# bad_examples: checks/cloud/aws/iam/require_uppercase_in_passwords.tf.go +package builtin.aws.iam.aws0061 + +import rego.v1 + +deny contains res if { + policy := input.aws.iam.passwordpolicy + not policy.requireuppercase.value + + res := result.new("Password policy does not require uppercase characters.", policy.requireuppercase) +} diff --git a/checks/cloud/aws/iam/require_uppercase_in_passwords_test.rego b/checks/cloud/aws/iam/require_uppercase_in_passwords_test.rego new file mode 100644 index 00000000..cab23561 --- /dev/null +++ b/checks/cloud/aws/iam/require_uppercase_in_passwords_test.rego @@ -0,0 +1,15 @@ +package builtin.aws.iam.aws0061_test + +import rego.v1 + +import data.builtin.aws.iam.aws0061 as check +import data.lib.test + +test_allow_policy_require_uppercase_in_passwords if { + test.assert_empty(check.deny) with input.aws.iam.passwordpolicy.requireuppercase.value as true +} + +test_disallow_policy_no_require_uppercase_in_passwords if { + inp := {"aws": {"iam": {"passwordpolicy": {"requireuppercase": {"value": false}}}}} + test.assert_equal_message("Password policy does not require uppercase characters.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/iam/rotate_access_keys.rego b/checks/cloud/aws/iam/rotate_access_keys.rego new file mode 100644 index 00000000..f424cf8d --- /dev/null +++ b/checks/cloud/aws/iam/rotate_access_keys.rego @@ -0,0 +1,47 @@ +# METADATA +# title: Access keys should be rotated at least every 90 days +# description: | +# Regularly rotating your IAM credentials helps prevent a compromised set of IAM access keys from accessing components in your AWS account. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/prescriptive-guidance/latest/patterns/automatically-rotate-iam-user-access-keys-at-scale-with-aws-organizations-and-aws-secrets-manager.html +# custom: +# id: AVD-AWS-0146 +# avd_id: AVD-AWS-0146 +# provider: aws +# service: iam +# severity: LOW +# short_code: rotate-access-keys +# recommended_action: Rotate keys every 90 days or less +# frameworks: +# cis-aws-1.2: +# - "1.4" +# cis-aws-1.4: +# - "1.14" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0146 + +import data.lib.datetime +import rego.v1 + +deny contains res if { + some user in input.aws.iam.users + + some key in user.accesskeys + key.active.value + + ns := time.parse_rfc3339_ns(key.creationdate.value) + diff := time.now_ns() - ns + diff > datetime.days_to_ns(90) + days := ceil((diff - datetime.days_to_ns(90)) / datetime.ns_in_day) + + msg := sprintf("User access key %q should have been rotated %d day(s) ago", [key.accesskeyid.value, days]) + res := result.new(msg, user) +} diff --git a/checks/cloud/aws/iam/rotate_access_keys_test.rego b/checks/cloud/aws/iam/rotate_access_keys_test.rego new file mode 100644 index 00000000..53136a9b --- /dev/null +++ b/checks/cloud/aws/iam/rotate_access_keys_test.rego @@ -0,0 +1,25 @@ +package builtin.aws.iam.aws0146_test + +import rego.v1 + +import data.builtin.aws.iam.aws0146 as check +import data.lib.datetime +import data.lib.test + +test_allow_access_key_created_within_90_days if { + inp := {"aws": {"iam": {"users": [{"accesskeys": [{ + "creationdate": {"value": time.format(time.now_ns() - datetime.days_to_ns(10))}, + "accesskeyid": {"value": "keyid"}, + "active": {"value": true}, + }]}]}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_access_key_created_more_than_90_days_ago if { + inp := {"aws": {"iam": {"users": [{"accesskeys": [{ + "creationdate": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + "accesskeyid": {"value": "keyid"}, + "active": {"value": true}, + }]}]}}} + test.assert_equal_message(`User access key "keyid" should have been rotated 10 day(s) ago`, check.deny) with input as inp +} diff --git a/checks/cloud/aws/iam/set_max_password_age.rego b/checks/cloud/aws/iam/set_max_password_age.rego new file mode 100644 index 00000000..e6829f2c --- /dev/null +++ b/checks/cloud/aws/iam/set_max_password_age.rego @@ -0,0 +1,41 @@ +# METADATA +# title: IAM Password policy should have expiry less than or equal to 90 days. +# description: | +# IAM account password policies should have a maximum age specified. +# The account password policy should be set to expire passwords after 90 days or less. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# custom: +# id: AVD-AWS-0062 +# avd_id: AVD-AWS-0062 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: set-max-password-age +# recommended_action: Limit the password duration with an expiry in the policy +# frameworks: +# cis-aws-1.2: +# - "1.11" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_account_password_policy +# good_examples: checks/cloud/aws/iam/set_max_password_age.tf.go +# bad_examples: checks/cloud/aws/iam/set_max_password_age.tf.go +package builtin.aws.iam.aws0062 + +import rego.v1 + +deny contains res if { + policy := input.aws.iam.passwordpolicy + policy.maxagedays.value < 90 + res := result.new("Password policy allows a maximum password age of greater than 90 days.", policy.maxagedays) +} diff --git a/checks/cloud/aws/iam/set_max_password_age_test.rego b/checks/cloud/aws/iam/set_max_password_age_test.rego new file mode 100644 index 00000000..96c334e7 --- /dev/null +++ b/checks/cloud/aws/iam/set_max_password_age_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.iam.aws0062_test + +import rego.v1 + +import data.builtin.aws.iam.aws0062 as check +import data.lib.test + +test_allow_password_with_max_age_days_over_90 if { + inp := {"aws": {"iam": {"passwordpolicy": {"maxagedays": {"value": 91}}}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_password_with_max_age_days_within_90 if { + inp := {"aws": {"iam": {"passwordpolicy": {"maxagedays": {"value": 60}}}}} + test.assert_equal_message("Password policy allows a maximum password age of greater than 90 days.", check.deny) with input as inp +} diff --git a/checks/cloud/aws/iam/set_minimum_password_length.rego b/checks/cloud/aws/iam/set_minimum_password_length.rego new file mode 100644 index 00000000..b13036d8 --- /dev/null +++ b/checks/cloud/aws/iam/set_minimum_password_length.rego @@ -0,0 +1,45 @@ +# METADATA +# title: IAM Password policy should have minimum password length of 14 or more characters. +# description: | +# IAM account password policies should ensure that passwords have a minimum length. +# The account password policy should be set to enforce minimum password length of at least 14 characters. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# custom: +# id: AVD-AWS-0063 +# avd_id: AVD-AWS-0063 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: set-minimum-password-length +# recommended_action: Enforce longer, more complex passwords in the policy +# frameworks: +# cis-aws-1.2: +# - "1.9" +# cis-aws-1.4: +# - "1.8" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_account_password_policy +# good_examples: checks/cloud/aws/iam/set_minimum_password_length.tf.go +# bad_examples: checks/cloud/aws/iam/set_minimum_password_length.tf.go +package builtin.aws.iam.aws0063 + +import rego.v1 + +msg := "Password policy allows a maximum password age of greater than 90 days" + +deny contains res if { + policy := input.aws.iam.passwordpolicy + policy.minimumlength.value < 14 + res := result.new("Password policy allows a maximum password age of greater than 90 days", policy.minimumlength) +} diff --git a/checks/cloud/aws/iam/set_minimum_password_length_test.rego b/checks/cloud/aws/iam/set_minimum_password_length_test.rego new file mode 100644 index 00000000..053ca4e5 --- /dev/null +++ b/checks/cloud/aws/iam/set_minimum_password_length_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.iam.aws0063_test + +import rego.v1 + +import data.builtin.aws.iam.aws0063 as check +import data.lib.test + +test_allow_password_length_over_14 if { + inp := {"aws": {"iam": {"passwordpolicy": {"minimumlength": {"value": 15}}}}} + test.assert_empty(check.deny) with input as inp +} + +test_disallow_password_length_under_14 if { + inp := {"aws": {"iam": {"passwordpolicy": {"minimumlength": {"value": 13}}}}} + test.assert_equal_message("Password policy allows a maximum password age of greater than 90 days", check.deny) with input as inp +} diff --git a/checks/kubernetes/cisbenchmarks/apiserver/deny_service_external_ips_plugin_test.rego b/checks/kubernetes/cisbenchmarks/apiserver/deny_service_external_ips_plugin_test.rego index 57034549..5c728fb0 100644 --- a/checks/kubernetes/cisbenchmarks/apiserver/deny_service_external_ips_plugin_test.rego +++ b/checks/kubernetes/cisbenchmarks/apiserver/deny_service_external_ips_plugin_test.rego @@ -1,6 +1,6 @@ package builtin.kubernetes.KCV0003 -test_deny_service_external_ips_is_enabled { +test_disallow_service_external_ips_is_enabled { r := deny with input as { "apiVersion": "v1", "kind": "Pod", @@ -65,7 +65,7 @@ test_enable_admission_plugins_is_not_configured_args { count(r) == 0 } -test_deny_service_external_ips_is_not_enabled { +test_disallow_service_external_ips_is_not_enabled { r := deny with input as { "apiVersion": "v1", "kind": "Pod", @@ -86,7 +86,7 @@ test_deny_service_external_ips_is_not_enabled { count(r) == 0 } -test_deny_service_external_ips_is_enabled_with_others { +test_disallow_service_external_ips_is_enabled_with_others { r := deny with input as { "apiVersion": "v1", "kind": "Pod", diff --git a/cmd/go2rego/main.go b/cmd/go2rego/main.go new file mode 100644 index 00000000..2cb7440b --- /dev/null +++ b/cmd/go2rego/main.go @@ -0,0 +1,473 @@ +package main + +import ( + "bytes" + "errors" + "fmt" + "io/fs" + "log" + "os" + "path/filepath" + "regexp" + "slices" + "strings" + + "golang.org/x/exp/maps" + + "github.com/aquasecurity/trivy/pkg/iac/framework" + "github.com/aquasecurity/trivy/pkg/iac/providers" + "github.com/aquasecurity/trivy/pkg/iac/rules" + "github.com/aquasecurity/trivy/pkg/iac/scan" + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/format" + "github.com/open-policy-agent/opa/loader" +) + +var fileMappings = buildFileMappings() + +func main() { + + if len(os.Args) == 2 { + checkID := os.Args[1] + + rule := findCheckByID(checkID) + if rule == nil { + log.Fatal("Check not found") + } + + goCheckToRego(rule) + } else { + log.Println("Total checks:", len(rules.GetRegistered(framework.ALL))) + for _, r := range rules.GetRegistered(framework.ALL) { + goCheckToRego(&r.Rule) + } + } + +} + +func buildFileMappings() map[string]string { + + m := make(map[string]string) + + walkFn := func(path string, info fs.DirEntry, err error) error { + if err != nil { + return err + } + + if info.IsDir() { + return nil + } + + if !strings.HasSuffix(path, ".go") || slices.ContainsFunc( + []string{"_test.go", ".tf.go", ".cf.go"}, + func(s string) bool { + return strings.HasSuffix(path, s) + }, + ) { + return nil + } + + // read file + + b, err := os.ReadFile(path) + if err != nil { + return err + } + + r := regexp.MustCompile(`AVDID:\s*"([^"]+)"`) + + matches := r.FindStringSubmatch(string(b)) + if len(matches) != 2 { + log.Printf("expected 2 matches, got %d. File path: %s", len(matches), path) + return nil + } + if _, ok := m[matches[1]]; ok { + log.Printf("duplicate check id %s. File path: %s", matches[1], path) + } + + m[matches[1]] = removeExtension(path) + + return nil + } + + if err := filepath.WalkDir("checks", walkFn); err != nil { + log.Fatal(err) + } + + return m +} + +func goCheckToRego(rule *scan.Rule) { + outputPath := buildOutputPath(rule) + + goCheckPath := removeExtension(outputPath) + ".go" + if _, err := os.Stat(goCheckPath); errors.Is(err, os.ErrNotExist) { + log.Println("Go check file not found", goCheckPath) + } + + pkg := buildRegoPackage(rule) + pkgPath := ast.MustParseRef(pkg) // TODO: why without builtin prefix + + pkgAnnotation := buildPackageAnnotation(rule) + var comments []*ast.Comment + + for i := 0; i < len(pkgAnnotation); i++ { + comments = append(comments, &ast.Comment{ + Text: []byte(pkgAnnotation[i]), + Location: &ast.Location{ + Row: i + 1, + }, + }) + } + + if err := modifyOrCreateRegoCheck(outputPath, pkgPath, comments); err != nil { + log.Fatal(err) + } + + regoTestPath := removeExtension(outputPath) + "_test.rego" + if err := createTestRegoCheck(regoTestPath, pkgPath); err != nil { + log.Fatal(err) + } +} + +func modifyTestRegoCheck(regoTestPath string, pkgPath ast.Ref) error { + if _, err := os.Stat(regoTestPath); err != nil { + return err + } + + b, err := os.ReadFile(regoTestPath) + if err != nil { + return err + } + + result, err := loader.NewFileLoader(). + WithReader(bytes.NewReader(b)). + WithProcessAnnotation(true). + Filtered([]string{regoTestPath}, nil) + + if err != nil { + return err + } + + if len(result.Modules) != 1 { + return fmt.Errorf("expected 1 module, got %d", len(result.Modules)) + } + + module := maps.Values(result.ParsedModules())[0] + + f, err := os.OpenFile(regoTestPath, os.O_WRONLY|os.O_TRUNC, 0644) + if err != nil { + return err + } + defer f.Close() + + return updateAndWriteTestRegoCheck(f, pkgPath, module) +} + +func updateAndWriteTestRegoCheck(f *os.File, pkgPath ast.Ref, module *ast.Module) error { + + module.Package = &ast.Package{ + Path: ast.MustParseRef(pkgPath.String() + "_test"), + Location: &ast.Location{ + Row: 1, + }, + } + + module.Imports = []*ast.Import{ + { + Path: ast.MustParseTerm("rego.v1"), + Location: &ast.Location{ + Row: 3, + }, + }, + { + Path: ast.MustParseTerm(pkgPath.String()), + Alias: ast.Var("check"), + Location: &ast.Location{ + Row: 5, + }, + }, + { + Path: ast.MustParseTerm("data.lib.test"), + Location: &ast.Location{ + Row: 6, + }, + }, + } + + formatted, err := format.Ast(module) + if err != nil { + return err + } + + if _, err := f.Write(formatted); err != nil { + return err + } + + return nil +} + +func createTestRegoCheck(regoTestPath string, pkgPath ast.Ref) error { + if _, err := os.Stat(regoTestPath); err == nil { + return modifyTestRegoCheck(regoTestPath, pkgPath) + } else if !errors.Is(err, os.ErrNotExist) { + return err + } + + f, err := os.Create(regoTestPath) + if err != nil { + return err + } + defer f.Close() + + module := &ast.Module{} + + return updateAndWriteTestRegoCheck(f, pkgPath, module) +} + +func findCheckByID(id string) *scan.Rule { + for _, r := range rules.GetRegistered(framework.ALL) { + if r.Rule.AVDID == id { + return &r.Rule + } + } + return nil +} + +func modifyOrCreateRegoCheck(filePath string, pkgPath ast.Ref, annotationComments []*ast.Comment) error { + b, err := os.ReadFile(filePath) + if errors.Is(err, os.ErrNotExist) { + return createRegoCheck(filePath, pkgPath, annotationComments) + } + + result, err := loader.NewFileLoader(). + WithReader(bytes.NewReader(b)). + WithProcessAnnotation(true). + Filtered([]string{filePath}, nil) + + if err != nil { + return err + } + + if len(result.Modules) != 1 { + return fmt.Errorf("expected 1 module, got %d", len(result.Modules)) + } + + module := maps.Values(result.ParsedModules())[0] + + module.Annotations = nil + + var moduleComments []*ast.Comment + + for _, c := range module.Comments { + if c.Location.Row > module.Package.Location.Row { + moduleComments = append(moduleComments, c) + } + } + + module.Comments = append(moduleComments, annotationComments...) + module.Package.Path = pkgPath + + formatted, err := format.Ast(module) + if err != nil { + return err + } + + f, err := os.OpenFile(filePath, os.O_WRONLY|os.O_TRUNC, 0644) + if err != nil { + return err + } + defer f.Close() + + f.Write(formatted) + + return nil +} + +func createRegoCheck(filePath string, pkgPath ast.Ref, annotationComments []*ast.Comment) error { + + f, err := os.Create(filePath) + if err != nil { + return err + } + + defer f.Close() + + module := &ast.Module{ + Package: &ast.Package{ + Path: pkgPath, + Location: &ast.Location{ + Row: len(annotationComments) + 1, + }, + }, + Comments: annotationComments, + } + + formatted, err := format.Ast(module) + if err != nil { + return err + } + + if _, err := f.Write(formatted); err != nil { + return err + } + + return nil +} + +func buildOutputPath(rule *scan.Rule) string { + + p, ok := fileMappings[rule.AVDID] + if !ok { + log.Fatal("File mapping not found", rule.AVDID) + } + return p + ".rego" +} + +func cleanExplanation(s string) []string { + lines := strings.Split(s, "\n") + + for i := 0; i < len(lines); i++ { + lines[i] = strings.TrimSpace(lines[i]) + // Trim tabs + lines[i] = strings.ReplaceAll(lines[i], "\t", " ") + + if lines[i] == "" { + lines = append(lines[:i], lines[i+1:]...) + i-- + } + } + + return lines +} + +func buildPackageAnnotation(r *scan.Rule) []string { + + var lines []string + + var addLine = func(line string, ident int) { + lines = append(lines, strings.Repeat(" ", ident)+line) + } + + addLine("METADATA", 1) + + addLine("title: "+strings.ReplaceAll(r.Summary, "\n", " "), 1) // TODO + addLine("description: |", 1) + for _, line := range cleanExplanation(r.Explanation) { + addLine(line, 3) + } + addLine("scope: package", 1) + addLine("schemas:", 1) + + switch r.Provider { + case providers.KubernetesProvider: + addLine("- input: schema[\"kubernetes\"]", 3) + default: + addLine("- input: schema[\"cloud\"]", 3) + } + + if len(r.Links) > 0 { + addLine("related_resources:", 1) + for _, link := range r.Links { + if link == "" { + continue + } + addLine("- "+link, 3) + } + } + + addLine("custom:", 1) + addLine("id: "+r.AVDID, 3) + addLine("avd_id: "+r.AVDID, 3) + addLine("provider: "+string(r.Provider), 3) + addLine("service: "+r.Service, 3) + addLine("severity: "+string(r.Severity), 3) + addLine("short_code: "+r.ShortCode, 3) + addLine("recommended_action: "+r.Resolution, 3) + + generateFramework(r, &lines) + + addLine("input:", 3) + addLine("selector:", 5) + addLine("- type: "+string(r.Provider), 7) + addLine("subtypes:", 9) + addLine("- service: "+r.Service, 11) + addLine("provider: "+string(r.Provider), 13) + + if r.Terraform != nil { + addLine("terraform:", 3) + generateEngineMetadata(r, "tf", r.Terraform, &lines) + } + + if r.CloudFormation != nil { + addLine("cloudformation:", 3) + generateEngineMetadata(r, "cf", r.CloudFormation, &lines) + } + + return lines +} + +func generateFramework(r *scan.Rule, lines *[]string) { + if _, ok := r.Frameworks[framework.Default]; ok && len(r.Frameworks) == 1 { + return + } + + if len(r.Frameworks) > 0 { + + *lines = append(*lines, strings.Repeat(" ", 3)+"frameworks:") + for f, versions := range r.Frameworks { + if f == framework.Default { + continue + } + + *lines = append(*lines, strings.Repeat(" ", 5)+string(f)+":") + for _, version := range versions { + *lines = append(*lines, strings.Repeat(" ", 7)+"- \""+version+"\"") + } + } + } +} + +func generateEngineMetadata(r *scan.Rule, typ string, meta *scan.EngineMetadata, lines *[]string) { + if meta == nil { + return + } + + if len(meta.Links) > 0 { + *lines = append(*lines, strings.Repeat(" ", 5)+"links:") + for _, link := range meta.Links { + if link == "" { + continue + } + *lines = append(*lines, strings.Repeat(" ", 7)+"- "+link) + } + } + + outputPath := buildOutputPath(r) + examplePath := removeExtension(outputPath) + "." + typ + ".go" + + if len(meta.GoodExamples) > 0 { + *lines = append(*lines, strings.Repeat(" ", 5)+"good_examples: "+examplePath) + } + + if len(meta.BadExamples) > 0 { + *lines = append(*lines, strings.Repeat(" ", 5)+"bad_examples: "+examplePath) + } + + // TODO: support for remidantion markdown +} + +func removeExtension(s string) string { + return s[0 : len(s)-len(filepath.Ext(s))] +} + +func buildRegoPackage(r *scan.Rule) string { + id := strings.SplitN(r.AVDID, "-", 3) + service := strings.ReplaceAll(r.Service, "-", "") + switch r.Provider { + case providers.KubernetesProvider: + return strings.Join([]string{"data", "builtin", "kubernetes", id[1] + id[2]}, ".") + default: + return strings.Join([]string{"data", "builtin", string(r.Provider), service, string(r.Provider) + id[2]}, ".") + } +} diff --git a/go.mod b/go.mod index c76054e0..556a484d 100644 --- a/go.mod +++ b/go.mod @@ -13,6 +13,7 @@ require ( github.com/owenrumney/squealer v1.2.2 github.com/stretchr/testify v1.9.0 github.com/testcontainers/testcontainers-go v0.30.0 + golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa gopkg.in/yaml.v3 v3.0.1 mvdan.cc/sh/v3 v3.8.0 ) @@ -168,7 +169,6 @@ require ( go.uber.org/automaxprocs v1.5.3 // indirect go.uber.org/multierr v1.11.0 // indirect golang.org/x/crypto v0.22.0 // indirect - golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa // indirect golang.org/x/mod v0.16.0 // indirect golang.org/x/net v0.24.0 // indirect golang.org/x/oauth2 v0.18.0 // indirect diff --git a/lib/datetime.rego b/lib/datetime.rego new file mode 100644 index 00000000..b7b17b77 --- /dev/null +++ b/lib/datetime.rego @@ -0,0 +1,15 @@ +package lib.datetime + +import rego.v1 + +ns_in_day := 86400000000000 + +zero_time_string := "0001-01-01T00:00:00Z" + +time_is_never(value) := time.parse_rfc3339_ns(value) == 0 # TODO: rego doesn't parse zero time + +time_diff_gt_days(value, days) := (time.now_ns() - time.parse_rfc3339_ns(value)) > days_to_ns(days) + +time_diff_lt_days(value, days) := (time.now_ns() - time.parse_rfc3339_ns(value)) < days_to_ns(days) + +days_to_ns(days) := days * ns_in_day diff --git a/lib/iam.rego b/lib/iam.rego new file mode 100644 index 00000000..c96caa60 --- /dev/null +++ b/lib/iam.rego @@ -0,0 +1,24 @@ +package lib.iam + +import rego.v1 + +import data.lib.datetime + +is_user_logged_in(user) if { + # user.lastaccess.is_resolvable + not datetime.time_is_never(user.lastaccess.value) +} + +user_has_mfa_devices(user) if count(user.mfadevices) > 0 + +user_is_inactive(user, days) if { + is_user_logged_in(user) + datetime.time_diff_gt_days(user.lastaccess.value, days) +} + +key_is_unused(key, days) if { + key.active.value + datetime.time_diff_gt_days(key.lastaccess.value, days) +} + +is_root_user(user) := user.name.value == "root" diff --git a/lib/s3.rego b/lib/s3.rego new file mode 100644 index 00000000..20497051 --- /dev/null +++ b/lib/s3.rego @@ -0,0 +1,11 @@ +package lib.s3 + +import rego.v1 + +public_acls = {"public-read", "public-read-write", "website", "authenticated-read"} + +bucket_has_public_access(bucket) if { + bucket.acl.value in public_acls + not bucket.publicaccessblock.ignorepublicacls.value + not bucket.publicaccessblock.blockpublicacls.value +} diff --git a/lib/s3_test.rego b/lib/s3_test.rego new file mode 100644 index 00000000..9ad052c5 --- /dev/null +++ b/lib/s3_test.rego @@ -0,0 +1,12 @@ +package lib.s3_test + +import rego.v1 + +import data.lib.s3 + +test_has_public_access if { + s3.bucket_has_public_access({"acl": {"value": "public-read"}}) + not s3.bucket_has_public_access({"acl": {"value": "private"}}) + s3.bucket_has_public_access({"acl": {"value": "public-read-write", "ignorepublicacls": {"value": true}}}) + s3.bucket_has_public_access({"acl": {"value": "public-read-write", "blockpublicacls": {"value": true}}}) +} diff --git a/lib/test.rego b/lib/test.rego new file mode 100644 index 00000000..66f280eb --- /dev/null +++ b/lib/test.rego @@ -0,0 +1,37 @@ +package lib.test + +import rego.v1 + +assert_empty(v) if { + not assert_not_empty(v) +} + +assert_not_empty(v) if { + count(v) > 0 + trace_and_print(sprintf("assert_not_empty:\n %v", [v])) +} + +assert_equal_message(expected, results) if { + assert_count(results, 1) + not _assert_equal_message(results, expected) +} + +_assert_equal_message(expected, results) if { + msg := [res.msg | some res in results][0] + msg != expected # TODO: satisfy this + trace_and_print(sprintf("assert_equal_message:\n Got %q\n Expected %q", [msg, expected])) +} + +assert_count(expected, results) if { + not _assert_count(results, expected) +} + +_assert_count(expected, results) if { + count(results) != expected + trace_and_print(sprintf("assert_count:\n Got %v\n Expected %v", [count(results), expected])) +} + +trace_and_print(v) if { + trace(v) + print(v) +}