From a893f3f731b8df56a278b20f6493b3e4e88c73cd Mon Sep 17 00:00:00 2001 From: Simar Date: Thu, 25 Jan 2024 13:54:00 -0700 Subject: [PATCH 01/13] refactor(deps): Merge trivy-iac into Trivy --- go.mod | 32 +- internal/adapters/arm/adapt.go | 50 + internal/adapters/arm/appservice/adapt.go | 58 + internal/adapters/arm/authorization/adapt.go | 38 + internal/adapters/arm/compute/adapt.go | 85 + internal/adapters/arm/compute/adapt_test.go | 60 + internal/adapters/arm/container/adapt.go | 17 + internal/adapters/arm/database/adapt.go | 35 + internal/adapters/arm/database/firewall.go | 18 + internal/adapters/arm/database/maria.go | 27 + internal/adapters/arm/database/mssql.go | 61 + internal/adapters/arm/database/postgresql.go | 64 + internal/adapters/arm/datafactory/adapt.go | 27 + internal/adapters/arm/datalake/adapt.go | 28 + internal/adapters/arm/keyvault/adapt.go | 64 + internal/adapters/arm/monitor/adapt.go | 45 + internal/adapters/arm/network/adapt.go | 126 ++ internal/adapters/arm/securitycenter/adapt.go | 43 + internal/adapters/arm/storage/adapt.go | 69 + internal/adapters/arm/storage/adapt_test.go | 59 + internal/adapters/arm/synapse/adapt.go | 34 + internal/adapters/cloudformation/adapt.go | 14 + .../aws/accessanalyzer/accessanalyzer.go | 13 + .../aws/accessanalyzer/analyzer.go | 24 + internal/adapters/cloudformation/aws/adapt.go | 74 + .../aws/apigateway/apigateway.go | 21 + .../cloudformation/aws/apigateway/stage.go | 68 + .../cloudformation/aws/athena/athena.go | 14 + .../cloudformation/aws/athena/workgroup.go | 30 + .../aws/cloudfront/cloudfront.go | 13 + .../aws/cloudfront/distribution.go | 55 + .../aws/cloudtrail/cloudtrail.go | 13 + .../cloudformation/aws/cloudtrail/trails.go | 27 + .../aws/cloudwatch/cloudwatch.go | 14 + .../aws/cloudwatch/log_group.go | 26 + .../cloudformation/aws/codebuild/codebuild.go | 13 + .../cloudformation/aws/codebuild/project.go | 63 + .../cloudformation/aws/config/adapt_test.go | 71 + .../cloudformation/aws/config/aggregator.go | 41 + .../cloudformation/aws/config/config.go | 13 + .../cloudformation/aws/documentdb/cluster.go | 58 + .../aws/documentdb/documentdb.go | 13 + .../cloudformation/aws/dynamodb/cluster.go | 36 + .../cloudformation/aws/dynamodb/dynamodb.go | 13 + .../cloudformation/aws/ec2/adapt_test.go | 176 +++ .../adapters/cloudformation/aws/ec2/ec2.go | 20 + .../cloudformation/aws/ec2/instance.go | 106 ++ .../aws/ec2/launch_configuration.go | 48 + .../cloudformation/aws/ec2/launch_template.go | 56 + .../adapters/cloudformation/aws/ec2/nacl.go | 71 + .../cloudformation/aws/ec2/security_group.go | 68 + .../adapters/cloudformation/aws/ec2/subnet.go | 21 + .../adapters/cloudformation/aws/ec2/volume.go | 25 + .../adapters/cloudformation/aws/ecr/ecr.go | 13 + .../cloudformation/aws/ecr/repository.go | 93 ++ .../cloudformation/aws/ecs/cluster.go | 57 + .../adapters/cloudformation/aws/ecs/ecs.go | 14 + .../cloudformation/aws/ecs/task_definition.go | 86 ++ .../adapters/cloudformation/aws/efs/efs.go | 13 + .../cloudformation/aws/efs/filesystem.go | 23 + .../cloudformation/aws/eks/cluster.go | 56 + .../adapters/cloudformation/aws/eks/eks.go | 13 + .../cloudformation/aws/elasticache/cluster.go | 24 + .../aws/elasticache/elasticache.go | 15 + .../aws/elasticache/replication_group.go | 23 + .../aws/elasticache/security_group.go | 22 + .../aws/elasticsearch/domain.go | 84 + .../aws/elasticsearch/elasticsearch.go | 13 + .../cloudformation/aws/elb/adapt_test.go | 73 + .../adapters/cloudformation/aws/elb/elb.go | 13 + .../cloudformation/aws/elb/loadbalancer.go | 81 + .../adapters/cloudformation/aws/iam/iam.go | 27 + .../adapters/cloudformation/aws/iam/policy.go | 125 ++ .../cloudformation/aws/kinesis/kinesis.go | 13 + .../cloudformation/aws/kinesis/stream.go | 36 + .../cloudformation/aws/lambda/function.go | 53 + .../cloudformation/aws/lambda/lambda.go | 13 + .../adapters/cloudformation/aws/mq/broker.go | 33 + internal/adapters/cloudformation/aws/mq/mq.go | 13 + .../cloudformation/aws/msk/cluster.go | 80 + .../adapters/cloudformation/aws/msk/msk.go | 13 + .../cloudformation/aws/neptune/cluster.go | 34 + .../cloudformation/aws/neptune/neptune.go | 13 + .../cloudformation/aws/rds/adapt_test.go | 158 ++ .../cloudformation/aws/rds/cluster.go | 48 + .../cloudformation/aws/rds/instance.go | 130 ++ .../aws/rds/parameter_groups.go | 42 + .../adapters/cloudformation/aws/rds/rds.go | 18 + .../cloudformation/aws/redshift/cluster.go | 54 + .../cloudformation/aws/redshift/redshift.go | 16 + .../aws/redshift/security_group.go | 17 + .../adapters/cloudformation/aws/s3/bucket.go | 148 ++ internal/adapters/cloudformation/aws/s3/s3.go | 13 + .../adapters/cloudformation/aws/sam/api.go | 96 ++ .../cloudformation/aws/sam/function.go | 58 + .../cloudformation/aws/sam/http_api.go | 64 + .../adapters/cloudformation/aws/sam/sam.go | 17 + .../cloudformation/aws/sam/state_machines.go | 80 + .../adapters/cloudformation/aws/sam/tables.go | 42 + .../adapters/cloudformation/aws/sns/sns.go | 13 + .../adapters/cloudformation/aws/sns/topic.go | 24 + .../adapters/cloudformation/aws/sqs/queue.go | 66 + .../adapters/cloudformation/aws/sqs/sqs.go | 13 + .../adapters/cloudformation/aws/ssm/secret.go | 18 + .../adapters/cloudformation/aws/ssm/ssm.go | 13 + .../aws/workspaces/workspace.go | 31 + .../aws/workspaces/workspaces.go | 13 + internal/adapters/terraform/adapt.go | 31 + .../aws/accessanalyzer/accessanalyzer.go | 40 + internal/adapters/terraform/aws/adapt.go | 79 + .../terraform/aws/apigateway/adapt.go | 21 + .../terraform/aws/apigateway/adapt_test.go | 233 +++ .../terraform/aws/apigateway/apiv1.go | 115 ++ .../terraform/aws/apigateway/apiv1_test.go | 125 ++ .../terraform/aws/apigateway/apiv2.go | 69 + .../terraform/aws/apigateway/apiv2_test.go | 103 ++ .../terraform/aws/apigateway/namesv1.go | 24 + .../terraform/aws/apigateway/namesv1_test.go | 54 + .../terraform/aws/apigateway/namesv2.go | 28 + .../terraform/aws/apigateway/namesv2_test.go | 56 + .../adapters/terraform/aws/athena/adapt.go | 80 + .../terraform/aws/athena/adapt_test.go | 211 +++ .../terraform/aws/cloudfront/adapt.go | 79 + .../terraform/aws/cloudfront/adapt_test.go | 163 ++ .../terraform/aws/cloudtrail/adapt.go | 67 + .../terraform/aws/cloudtrail/adapt_test.go | 106 ++ .../terraform/aws/cloudwatch/adapt.go | 47 + .../terraform/aws/cloudwatch/adapt_test.go | 114 ++ .../adapters/terraform/aws/codebuild/adapt.go | 66 + .../terraform/aws/codebuild/adapt_test.go | 116 ++ .../adapters/terraform/aws/config/adapt.go | 33 + .../terraform/aws/config/adapt_test.go | 81 + .../terraform/aws/documentdb/adapt.go | 63 + .../terraform/aws/documentdb/adapt_test.go | 125 ++ .../adapters/terraform/aws/dynamodb/adapt.go | 94 ++ .../terraform/aws/dynamodb/adapt_test.go | 176 +++ internal/adapters/terraform/aws/ec2/adapt.go | 102 ++ .../adapters/terraform/aws/ec2/adapt_test.go | 255 +++ .../adapters/terraform/aws/ec2/autoscaling.go | 118 ++ .../terraform/aws/ec2/autoscaling_test.go | 199 +++ internal/adapters/terraform/aws/ec2/subnet.go | 26 + .../adapters/terraform/aws/ec2/subnet_test.go | 90 ++ internal/adapters/terraform/aws/ec2/volume.go | 40 + .../adapters/terraform/aws/ec2/volume_test.go | 112 ++ internal/adapters/terraform/aws/ec2/vpc.go | 229 +++ .../adapters/terraform/aws/ec2/vpc_test.go | 339 ++++ internal/adapters/terraform/aws/ecr/adapt.go | 113 ++ .../adapters/terraform/aws/ecr/adapt_test.go | 248 +++ internal/adapters/terraform/aws/ecs/adapt.go | 107 ++ .../adapters/terraform/aws/ecs/adapt_test.go | 246 +++ internal/adapters/terraform/aws/efs/adapt.go | 32 + .../adapters/terraform/aws/efs/adapt_test.go | 78 + internal/adapters/terraform/aws/eks/adapt.go | 91 ++ .../adapters/terraform/aws/eks/adapt_test.go | 163 ++ .../terraform/aws/elasticache/adapt.go | 85 + .../terraform/aws/elasticache/adapt_test.go | 233 +++ .../terraform/aws/elasticsearch/adapt.go | 100 ++ .../terraform/aws/elasticsearch/adapt_test.go | 173 +++ internal/adapters/terraform/aws/elb/adapt.go | 120 ++ .../adapters/terraform/aws/elb/adapt_test.go | 161 ++ internal/adapters/terraform/aws/emr/adapt.go | 49 + .../adapters/terraform/aws/emr/adapt_test.go | 116 ++ internal/adapters/terraform/aws/iam/adapt.go | 16 + .../adapters/terraform/aws/iam/adapt_test.go | 128 ++ .../adapters/terraform/aws/iam/convert.go | 240 +++ internal/adapters/terraform/aws/iam/groups.go | 32 + .../adapters/terraform/aws/iam/groups_test.go | 115 ++ .../adapters/terraform/aws/iam/passwords.go | 76 + .../terraform/aws/iam/passwords_test.go | 54 + .../adapters/terraform/aws/iam/policies.go | 109 ++ .../terraform/aws/iam/policies_test.go | 182 +++ internal/adapters/terraform/aws/iam/roles.go | 38 + .../adapters/terraform/aws/iam/roles_test.go | 220 +++ internal/adapters/terraform/aws/iam/users.go | 56 + .../adapters/terraform/aws/iam/users_test.go | 173 +++ .../adapters/terraform/aws/kinesis/adapt.go | 41 + .../terraform/aws/kinesis/adapt_test.go | 87 ++ internal/adapters/terraform/aws/kms/adapt.go | 36 + .../adapters/terraform/aws/kms/adapt_test.go | 80 + .../adapters/terraform/aws/lambda/adapt.go | 98 ++ .../terraform/aws/lambda/adapt_test.go | 155 ++ internal/adapters/terraform/aws/mq/adapt.go | 48 + .../adapters/terraform/aws/mq/adapt_test.go | 119 ++ internal/adapters/terraform/aws/msk/adapt.go | 97 ++ .../adapters/terraform/aws/msk/adapt_test.go | 200 +++ .../adapters/terraform/aws/neptune/adapt.go | 50 + .../terraform/aws/neptune/adapt_test.go | 97 ++ .../adapters/terraform/aws/provider/adapt.go | 166 ++ .../terraform/aws/provider/adapt_test.go | 129 ++ internal/adapters/terraform/aws/rds/adapt.go | 256 ++++ .../adapters/terraform/aws/rds/adapt_test.go | 332 ++++ .../adapters/terraform/aws/redshift/adapt.go | 117 ++ .../terraform/aws/redshift/adapt_test.go | 230 +++ internal/adapters/terraform/aws/s3/adapt.go | 18 + .../adapters/terraform/aws/s3/adapt_test.go | 385 +++++ internal/adapters/terraform/aws/s3/bucket.go | 283 ++++ .../adapters/terraform/aws/s3/bucket_test.go | 331 ++++ .../adapters/terraform/aws/s3/policies.go | 53 + .../terraform/aws/s3/public_access_block.go | 41 + internal/adapters/terraform/aws/sns/adapt.go | 38 + .../adapters/terraform/aws/sns/adapt_test.go | 82 + internal/adapters/terraform/aws/sqs/adapt.go | 167 ++ .../adapters/terraform/aws/sqs/adapt_test.go | 140 ++ internal/adapters/terraform/aws/ssm/adapt.go | 40 + .../adapters/terraform/aws/ssm/adapt_test.go | 110 ++ .../terraform/aws/workspaces/adapt.go | 57 + .../terraform/aws/workspaces/adapt_test.go | 111 ++ internal/adapters/terraform/azure/adapt.go | 37 + .../terraform/azure/appservice/adapt.go | 94 ++ .../terraform/azure/appservice/adapt_test.go | 188 +++ .../terraform/azure/authorization/adapt.go | 42 + .../azure/authorization/adapt_test.go | 119 ++ .../adapters/terraform/azure/compute/adapt.go | 135 ++ .../terraform/azure/compute/adapt_test.go | 238 +++ .../terraform/azure/container/adapt.go | 106 ++ .../terraform/azure/container/adapt_test.go | 262 ++++ .../terraform/azure/database/adapt.go | 439 ++++++ .../terraform/azure/database/adapt_test.go | 454 ++++++ .../terraform/azure/datafactory/adapt.go | 33 + .../terraform/azure/datafactory/adapt_test.go | 79 + .../terraform/azure/datalake/adapt.go | 38 + .../terraform/azure/datalake/adapt_test.go | 83 + .../terraform/azure/keyvault/adapt.go | 159 ++ .../terraform/azure/keyvault/adapt_test.go | 271 ++++ .../adapters/terraform/azure/monitor/adapt.go | 56 + .../terraform/azure/monitor/adapt_test.go | 128 ++ .../adapters/terraform/azure/network/adapt.go | 220 +++ .../terraform/azure/network/adapt_test.go | 262 ++++ .../terraform/azure/securitycenter/adapt.go | 59 + .../azure/securitycenter/adapt_test.go | 137 ++ .../adapters/terraform/azure/storage/adapt.go | 173 +++ .../terraform/azure/storage/adapt_test.go | 252 +++ .../adapters/terraform/azure/synapse/adapt.go | 32 + .../terraform/azure/synapse/adapt_test.go | 83 + .../adapters/terraform/cloudstack/adapt.go | 13 + .../terraform/cloudstack/compute/adapt.go | 49 + .../cloudstack/compute/adapt_test.go | 91 ++ .../adapters/terraform/digitalocean/adapt.go | 15 + .../terraform/digitalocean/compute/adapt.go | 107 ++ .../digitalocean/compute/adapt_test.go | 353 +++++ .../terraform/digitalocean/spaces/adapt.go | 91 ++ .../digitalocean/spaces/adapt_test.go | 144 ++ internal/adapters/terraform/github/adapt.go | 17 + .../github/branch_protections/adapt.go | 30 + .../github/branch_protections/adapt_test.go | 59 + .../terraform/github/repositories/adapt.go | 48 + .../github/repositories/adapt_test.go | 111 ++ .../terraform/github/secrets/adapt.go | 32 + .../terraform/github/secrets/adapt_test.go | 69 + internal/adapters/terraform/google/adapt.go | 27 + .../terraform/google/bigquery/adapt.go | 54 + .../terraform/google/bigquery/adapt_test.go | 155 ++ .../terraform/google/compute/adapt.go | 16 + .../terraform/google/compute/adapt_test.go | 210 +++ .../terraform/google/compute/disks.go | 38 + .../terraform/google/compute/disks_test.go | 93 ++ .../terraform/google/compute/instances.go | 124 ++ .../google/compute/instances_test.go | 169 ++ .../terraform/google/compute/metadata.go | 26 + .../terraform/google/compute/metadata_test.go | 56 + .../terraform/google/compute/networks.go | 198 +++ .../terraform/google/compute/networks_test.go | 126 ++ .../adapters/terraform/google/compute/ssl.go | 19 + .../terraform/google/compute/ssl_test.go | 62 + .../adapters/terraform/google/dns/adapt.go | 64 + .../terraform/google/dns/adapt_test.go | 113 ++ .../adapters/terraform/google/gke/adapt.go | 330 ++++ .../terraform/google/gke/adapt_test.go | 416 +++++ .../adapters/terraform/google/iam/adapt.go | 108 ++ .../terraform/google/iam/adapt_test.go | 266 ++++ .../adapters/terraform/google/iam/convert.go | 26 + .../terraform/google/iam/folder_iam.go | 117 ++ .../adapters/terraform/google/iam/folders.go | 40 + .../adapters/terraform/google/iam/org_iam.go | 113 ++ .../terraform/google/iam/project_iam.go | 287 ++++ .../terraform/google/iam/project_iam_test.go | 59 + .../adapters/terraform/google/iam/projects.go | 58 + .../iam/workload_identity_pool_providers.go | 18 + .../adapters/terraform/google/kms/adapt.go | 60 + .../terraform/google/kms/adapt_test.go | 126 ++ .../adapters/terraform/google/sql/adapt.go | 156 ++ .../terraform/google/sql/adapt_test.go | 278 ++++ .../terraform/google/storage/adapt.go | 129 ++ .../terraform/google/storage/adapt_test.go | 198 +++ .../adapters/terraform/google/storage/iam.go | 96 ++ .../adapters/terraform/kubernetes/adapt.go | 123 ++ .../terraform/kubernetes/adapt_test.go | 60 + .../terraform/nifcloud/computing/adapt.go | 16 + .../nifcloud/computing/adapt_test.go | 61 + .../terraform/nifcloud/computing/instance.go | 35 + .../nifcloud/computing/instance_test.go | 71 + .../nifcloud/computing/security_group.go | 76 + .../nifcloud/computing/security_group_test.go | 86 ++ .../adapters/terraform/nifcloud/dns/adapt.go | 12 + .../terraform/nifcloud/dns/adapt_test.go | 32 + .../adapters/terraform/nifcloud/dns/record.go | 23 + .../terraform/nifcloud/dns/record_test.go | 56 + .../adapters/terraform/nifcloud/nas/adapt.go | 13 + .../terraform/nifcloud/nas/adapt_test.go | 44 + .../terraform/nifcloud/nas/nas_instance.go | 22 + .../nifcloud/nas/nas_instance_test.go | 54 + .../nifcloud/nas/nas_security_group.go | 30 + .../nifcloud/nas/nas_security_group_test.go | 66 + .../terraform/nifcloud/network/adapt.go | 16 + .../terraform/nifcloud/network/adapt_test.go | 83 + .../nifcloud/network/elastic_load_balancer.go | 50 + .../network/elastic_load_balancer_test.go | 90 ++ .../nifcloud/network/load_balancer.go | 67 + .../nifcloud/network/load_balancer_test.go | 75 + .../terraform/nifcloud/network/router.go | 37 + .../terraform/nifcloud/network/router_test.go | 70 + .../terraform/nifcloud/network/vpn_gateway.go | 22 + .../nifcloud/network/vpn_gateway_test.go | 53 + .../adapters/terraform/nifcloud/nifcloud.go | 23 + .../adapters/terraform/nifcloud/rdb/adapt.go | 13 + .../terraform/nifcloud/rdb/adapt_test.go | 60 + .../terraform/nifcloud/rdb/db_instance.go | 26 + .../nifcloud/rdb/db_instance_test.go | 66 + .../nifcloud/rdb/db_security_group.go | 30 + .../nifcloud/rdb/db_security_group_test.go | 66 + .../nifcloud/sslcertificate/adapt.go | 12 + .../nifcloud/sslcertificate/adapt_test.go | 28 + .../sslcertificate/server_certificate.go | 41 + .../sslcertificate/server_certificate_test.go | 72 + .../adapters/terraform/openstack/adapt.go | 84 + .../terraform/openstack/adapt_test.go | 133 ++ .../terraform/openstack/networking.go | 77 + .../terraform/openstack/networking_test.go | 72 + internal/adapters/terraform/oracle/adapt.go | 30 + .../adapters/terraform/tftestutil/testutil.go | 26 + pkg/detection/detect.go | 296 ++++ pkg/detection/detect_test.go | 410 +++++ pkg/detection/peek.go | 53 + pkg/detection/testdata/big.file | Bin 0 -> 5120 bytes pkg/detection/testdata/small.file | 3 + pkg/extrafs/extrafs.go | 54 + .../analyzer/config/terraform/terraform.go | 2 +- pkg/fanal/analyzer/const.go | 2 +- pkg/misconf/scanner.go | 20 +- .../azure/arm/parser/armjson/bench_test.go | 71 + .../azure/arm/parser/armjson/decode.go | 66 + .../azure/arm/parser/armjson/decode_array.go | 51 + .../arm/parser/armjson/decode_boolean.go | 18 + .../arm/parser/armjson/decode_meta_test.go | 40 + .../azure/arm/parser/armjson/decode_null.go | 10 + .../azure/arm/parser/armjson/decode_number.go | 46 + .../azure/arm/parser/armjson/decode_object.go | 122 ++ .../azure/arm/parser/armjson/decode_string.go | 19 + pkg/scanners/azure/arm/parser/armjson/kind.go | 14 + pkg/scanners/azure/arm/parser/armjson/node.go | 59 + .../azure/arm/parser/armjson/parse.go | 150 ++ .../azure/arm/parser/armjson/parse_array.go | 54 + .../arm/parser/armjson/parse_array_test.go | 46 + .../azure/arm/parser/armjson/parse_boolean.go | 40 + .../arm/parser/armjson/parse_boolean_test.go | 54 + .../azure/arm/parser/armjson/parse_comment.go | 98 ++ .../arm/parser/armjson/parse_complex_test.go | 131 ++ .../azure/arm/parser/armjson/parse_null.go | 23 + .../arm/parser/armjson/parse_null_test.go | 18 + .../azure/arm/parser/armjson/parse_number.go | 163 ++ .../arm/parser/armjson/parse_number_test.go | 178 +++ .../azure/arm/parser/armjson/parse_object.go | 143 ++ .../arm/parser/armjson/parse_object_test.go | 115 ++ .../azure/arm/parser/armjson/parse_string.go | 91 ++ .../arm/parser/armjson/parse_string_test.go | 37 + .../arm/parser/armjson/parse_whitespace.go | 29 + .../azure/arm/parser/armjson/reader.go | 36 + .../azure/arm/parser/armjson/reader_test.go | 62 + .../azure/arm/parser/armjson/unmarshal.go | 40 + pkg/scanners/azure/arm/parser/parser.go | 194 +++ pkg/scanners/azure/arm/parser/parser_test.go | 338 ++++ pkg/scanners/azure/arm/parser/template.go | 78 + .../azure/arm/parser/template_test.go | 60 + .../azure/arm/parser/testdata/example.json | 15 + .../azure/arm/parser/testdata/postgres.json | 73 + pkg/scanners/azure/arm/scanner.go | 187 +++ pkg/scanners/azure/deployment.go | 179 +++ pkg/scanners/azure/expressions/lex.go | 203 +++ pkg/scanners/azure/expressions/node.go | 75 + .../azure/expressions/token_walker.go | 40 + pkg/scanners/azure/functions/add.go | 15 + pkg/scanners/azure/functions/add_test.go | 38 + pkg/scanners/azure/functions/and.go | 27 + pkg/scanners/azure/functions/and_test.go | 39 + pkg/scanners/azure/functions/array.go | 29 + pkg/scanners/azure/functions/array_test.go | 44 + pkg/scanners/azure/functions/base64.go | 52 + pkg/scanners/azure/functions/base64_test.go | 85 + pkg/scanners/azure/functions/bool.go | 20 + pkg/scanners/azure/functions/bool_test.go | 63 + pkg/scanners/azure/functions/casing.go | 29 + pkg/scanners/azure/functions/casing_test.go | 71 + pkg/scanners/azure/functions/coalesce.go | 10 + pkg/scanners/azure/functions/coalesce_test.go | 56 + pkg/scanners/azure/functions/concat.go | 28 + pkg/scanners/azure/functions/concat_test.go | 94 ++ pkg/scanners/azure/functions/contains.go | 40 + pkg/scanners/azure/functions/contains_test.go | 95 ++ pkg/scanners/azure/functions/copy_index.go | 25 + .../azure/functions/copy_index_test.go | 52 + pkg/scanners/azure/functions/create_array.go | 11 + .../azure/functions/create_array_test.go | 68 + pkg/scanners/azure/functions/create_object.go | 21 + .../azure/functions/create_object_test.go | 60 + pkg/scanners/azure/functions/data_uri.go | 36 + pkg/scanners/azure/functions/data_uri_test.go | 53 + pkg/scanners/azure/functions/date_time_add.go | 115 ++ .../azure/functions/date_time_epoch.go | 38 + .../azure/functions/date_time_epoch_test.go | 51 + .../azure/functions/datetime_add_test.go | 72 + pkg/scanners/azure/functions/deployment.go | 75 + pkg/scanners/azure/functions/div.go | 15 + pkg/scanners/azure/functions/div_test.go | 38 + pkg/scanners/azure/functions/empty.go | 33 + pkg/scanners/azure/functions/empty_test.go | 68 + pkg/scanners/azure/functions/ends_with.go | 22 + .../azure/functions/ends_with_test.go | 41 + pkg/scanners/azure/functions/equals.go | 25 + pkg/scanners/azure/functions/equals_test.go | 111 ++ pkg/scanners/azure/functions/false.go | 5 + pkg/scanners/azure/functions/first.go | 37 + pkg/scanners/azure/functions/first_test.go | 51 + pkg/scanners/azure/functions/float.go | 20 + pkg/scanners/azure/functions/float_test.go | 36 + pkg/scanners/azure/functions/format.go | 31 + pkg/scanners/azure/functions/format_test.go | 42 + pkg/scanners/azure/functions/functions.go | 99 ++ pkg/scanners/azure/functions/greater.go | 47 + pkg/scanners/azure/functions/greater_test.go | 119 ++ pkg/scanners/azure/functions/guid.go | 44 + pkg/scanners/azure/functions/guid_test.go | 35 + pkg/scanners/azure/functions/if.go | 15 + pkg/scanners/azure/functions/if_test.go | 44 + pkg/scanners/azure/functions/index_of.go | 22 + pkg/scanners/azure/functions/index_of_test.go | 48 + pkg/scanners/azure/functions/int.go | 20 + pkg/scanners/azure/functions/int_test.go | 36 + pkg/scanners/azure/functions/intersection.go | 76 + .../azure/functions/intersection_test.go | 106 ++ pkg/scanners/azure/functions/items.go | 6 + pkg/scanners/azure/functions/join.go | 22 + pkg/scanners/azure/functions/join_test.go | 39 + pkg/scanners/azure/functions/json.go | 20 + pkg/scanners/azure/functions/json_test.go | 42 + pkg/scanners/azure/functions/last.go | 37 + pkg/scanners/azure/functions/last_index_of.go | 22 + .../azure/functions/last_index_of_test.go | 48 + pkg/scanners/azure/functions/last_test.go | 51 + pkg/scanners/azure/functions/length.go | 29 + pkg/scanners/azure/functions/length_test.go | 53 + pkg/scanners/azure/functions/less.go | 47 + pkg/scanners/azure/functions/less_test.go | 119 ++ pkg/scanners/azure/functions/max.go | 33 + pkg/scanners/azure/functions/max_test.go | 58 + pkg/scanners/azure/functions/min.go | 33 + pkg/scanners/azure/functions/min_test.go | 58 + pkg/scanners/azure/functions/mod.go | 14 + pkg/scanners/azure/functions/mod_test.go | 41 + pkg/scanners/azure/functions/mul.go | 15 + pkg/scanners/azure/functions/mul_test.go | 38 + pkg/scanners/azure/functions/not.go | 13 + pkg/scanners/azure/functions/not_test.go | 33 + pkg/scanners/azure/functions/null.go | 5 + pkg/scanners/azure/functions/null_test.go | 12 + pkg/scanners/azure/functions/or.go | 20 + pkg/scanners/azure/functions/or_test.go | 44 + pkg/scanners/azure/functions/pad.go | 32 + pkg/scanners/azure/functions/pad_test.go | 61 + pkg/scanners/azure/functions/parameters.go | 1 + pkg/scanners/azure/functions/pick_zones.go | 23 + .../azure/functions/pick_zones_test.go | 14 + pkg/scanners/azure/functions/range.go | 30 + pkg/scanners/azure/functions/range_test.go | 47 + pkg/scanners/azure/functions/reference.go | 12 + .../azure/functions/reference_test.go | 12 + pkg/scanners/azure/functions/replace.go | 26 + pkg/scanners/azure/functions/replace_test.go | 41 + pkg/scanners/azure/functions/resource.go | 48 + pkg/scanners/azure/functions/resource_test.go | 12 + pkg/scanners/azure/functions/scope.go | 106 ++ pkg/scanners/azure/functions/scope_test.go | 34 + pkg/scanners/azure/functions/skip.go | 34 + pkg/scanners/azure/functions/skip_test.go | 65 + pkg/scanners/azure/functions/split.go | 36 + pkg/scanners/azure/functions/split_test.go | 38 + pkg/scanners/azure/functions/starts_with.go | 22 + .../azure/functions/starts_with_test.go | 41 + pkg/scanners/azure/functions/string.go | 16 + pkg/scanners/azure/functions/string_test.go | 44 + pkg/scanners/azure/functions/sub.go | 15 + pkg/scanners/azure/functions/sub_test.go | 43 + pkg/scanners/azure/functions/substring.go | 36 + .../azure/functions/substring_test.go | 49 + pkg/scanners/azure/functions/take.go | 49 + pkg/scanners/azure/functions/take_test.go | 63 + pkg/scanners/azure/functions/trim.go | 16 + pkg/scanners/azure/functions/trim_test.go | 71 + pkg/scanners/azure/functions/true.go | 5 + pkg/scanners/azure/functions/union.go | 60 + pkg/scanners/azure/functions/union_test.go | 110 ++ pkg/scanners/azure/functions/unique_string.go | 21 + .../azure/functions/unique_string_test.go | 38 + pkg/scanners/azure/functions/uri.go | 29 + pkg/scanners/azure/functions/uri_test.go | 48 + pkg/scanners/azure/functions/utc_now.go | 47 + pkg/scanners/azure/functions/utc_now_test.go | 40 + pkg/scanners/azure/resolver/resolver.go | 51 + pkg/scanners/azure/resolver/resolver_test.go | 101 ++ pkg/scanners/azure/value.go | 358 +++++ pkg/scanners/azure/value_test.go | 13 + pkg/scanners/cloudformation/cftypes/types.go | 12 + pkg/scanners/cloudformation/parser/errors.go | 24 + .../cloudformation/parser/file_context.go | 61 + .../parser/file_context_test.go | 61 + pkg/scanners/cloudformation/parser/fn_and.go | 38 + .../cloudformation/parser/fn_and_test.go | 186 +++ .../cloudformation/parser/fn_base64.go | 19 + .../cloudformation/parser/fn_base64_test.go | 35 + .../cloudformation/parser/fn_builtin.go | 65 + .../cloudformation/parser/fn_builtin_test.go | 63 + .../cloudformation/parser/fn_condition.go | 21 + .../parser/fn_condition_test.go | 98 ++ .../cloudformation/parser/fn_equals.go | 21 + .../cloudformation/parser/fn_equals_test.go | 180 +++ .../cloudformation/parser/fn_find_in_map.go | 45 + .../parser/fn_find_in_map_test.go | 100 ++ .../cloudformation/parser/fn_get_attr.go | 46 + .../cloudformation/parser/fn_get_attr_test.go | 50 + pkg/scanners/cloudformation/parser/fn_if.go | 40 + .../cloudformation/parser/fn_if_test.go | 56 + pkg/scanners/cloudformation/parser/fn_join.go | 34 + .../cloudformation/parser/fn_join_test.go | 152 ++ .../cloudformation/parser/fn_length.go | 24 + .../cloudformation/parser/fn_length_test.go | 99 ++ pkg/scanners/cloudformation/parser/fn_not.go | 23 + .../cloudformation/parser/fn_not_test.go | 124 ++ pkg/scanners/cloudformation/parser/fn_or.go | 39 + .../cloudformation/parser/fn_or_test.go | 184 +++ pkg/scanners/cloudformation/parser/fn_ref.go | 54 + .../cloudformation/parser/fn_ref_test.go | 89 ++ .../cloudformation/parser/fn_select.go | 41 + .../cloudformation/parser/fn_select_test.go | 77 + .../cloudformation/parser/fn_split.go | 44 + .../cloudformation/parser/fn_split_test.go | 56 + pkg/scanners/cloudformation/parser/fn_sub.go | 71 + .../cloudformation/parser/fn_sub_test.go | 103 ++ .../cloudformation/parser/intrinsics.go | 101 ++ .../cloudformation/parser/intrinsics_test.go | 45 + .../cloudformation/parser/parameter.go | 129 ++ .../cloudformation/parser/parameters_test.go | 89 ++ pkg/scanners/cloudformation/parser/parser.go | 236 +++ .../cloudformation/parser/parser_test.go | 374 +++++ .../cloudformation/parser/property.go | 428 ++++++ .../parser/property_conversion.go | 129 ++ .../cloudformation/parser/property_helpers.go | 267 ++++ .../parser/property_helpers_test.go | 195 +++ .../parser/pseudo_parameters.go | 46 + .../parser/pseudo_parameters_test.go | 36 + .../cloudformation/parser/reference.go | 58 + .../cloudformation/parser/resource.go | 211 +++ .../cloudformation/parser/resource_test.go | 75 + pkg/scanners/cloudformation/parser/util.go | 139 ++ pkg/scanners/cloudformation/scanner.go | 263 ++++ pkg/scanners/cloudformation/scanner_test.go | 103 ++ .../cloudformation/test/cf_scanning_test.go | 48 + .../test/examples/bucket/bucket.yaml | 24 + .../examples/ignores/bucket_with_ignores.yaml | 24 + .../test/examples/roles/roles.yml | 51 + pkg/scanners/dockerfile/parser/parser.go | 151 ++ pkg/scanners/dockerfile/parser/parser_test.go | 56 + pkg/scanners/dockerfile/scanner.go | 182 +++ pkg/scanners/dockerfile/scanner_test.go | 638 ++++++++ pkg/scanners/helm/options.go | 51 + pkg/scanners/helm/parser/option.go | 52 + pkg/scanners/helm/parser/parser.go | 322 ++++ pkg/scanners/helm/parser/parser_tar.go | 110 ++ pkg/scanners/helm/parser/parser_test.go | 24 + .../my-chart-0.1.0.tgz | Bin 0 -> 419 bytes .../my-chart/Chart.yaml | 6 + .../my-chart/templates/pod.yaml | 21 + pkg/scanners/helm/parser/vals.go | 114 ++ pkg/scanners/helm/scanner.go | 221 +++ pkg/scanners/helm/test/mysql/.helmignore | 21 + pkg/scanners/helm/test/mysql/Chart.lock | 6 + pkg/scanners/helm/test/mysql/Chart.yaml | 28 + pkg/scanners/helm/test/mysql/README.md | 491 ++++++ .../helm/test/mysql/charts/common/.helmignore | 22 + .../helm/test/mysql/charts/common/Chart.yaml | 23 + .../helm/test/mysql/charts/common/README.md | 345 +++++ .../charts/common/templates/_affinities.tpl | 102 ++ .../charts/common/templates/_capabilities.tpl | 128 ++ .../mysql/charts/common/templates/_errors.tpl | 23 + .../mysql/charts/common/templates/_images.tpl | 75 + .../charts/common/templates/_ingress.tpl | 68 + .../mysql/charts/common/templates/_labels.tpl | 18 + .../mysql/charts/common/templates/_names.tpl | 52 + .../charts/common/templates/_secrets.tpl | 131 ++ .../charts/common/templates/_storage.tpl | 23 + .../charts/common/templates/_tplvalues.tpl | 13 + .../mysql/charts/common/templates/_utils.tpl | 62 + .../charts/common/templates/_warnings.tpl | 14 + .../templates/validations/_cassandra.tpl | 72 + .../common/templates/validations/_mariadb.tpl | 103 ++ .../common/templates/validations/_mongodb.tpl | 108 ++ .../templates/validations/_postgresql.tpl | 129 ++ .../common/templates/validations/_redis.tpl | 76 + .../templates/validations/_validations.tpl | 46 + .../helm/test/mysql/charts/common/values.yaml | 5 + .../mysql/ci/values-production-with-rbac.yaml | 30 + .../helm/test/mysql/templates/NOTES.txt | 102 ++ .../helm/test/mysql/templates/_helpers.tpl | 192 +++ .../helm/test/mysql/templates/extra-list.yaml | 4 + .../test/mysql/templates/metrics-svc.yaml | 29 + .../test/mysql/templates/networkpolicy.yaml | 38 + .../mysql/templates/primary/configmap.yaml | 18 + .../primary/initialization-configmap.yaml | 14 + .../test/mysql/templates/primary/pdb.yaml | 25 + .../mysql/templates/primary/statefulset.yaml | 368 +++++ .../mysql/templates/primary/svc-headless.yaml | 24 + .../test/mysql/templates/primary/svc.yaml | 41 + .../helm/test/mysql/templates/role.yaml | 21 + .../test/mysql/templates/rolebinding.yaml | 21 + .../mysql/templates/secondary/configmap.yaml | 18 + .../test/mysql/templates/secondary/pdb.yaml | 25 + .../templates/secondary/statefulset.yaml | 338 ++++ .../templates/secondary/svc-headless.yaml | 26 + .../test/mysql/templates/secondary/svc.yaml | 43 + .../helm/test/mysql/templates/secrets.yaml | 21 + .../test/mysql/templates/serviceaccount.yaml | 22 + .../test/mysql/templates/servicemonitor.yaml | 42 + .../helm/test/mysql/values.schema.json | 178 +++ pkg/scanners/helm/test/mysql/values.yaml | 1020 ++++++++++++ pkg/scanners/helm/test/option_test.go | 167 ++ pkg/scanners/helm/test/parser_test.go | 199 +++ pkg/scanners/helm/test/scanner_test.go | 306 ++++ .../aws-cluster-autoscaler-bad.tar.gz | Bin 0 -> 4054 bytes .../mysql/templates/primary/configmap.yaml | 42 + .../mysql/templates/primary/statefulset.yaml | 147 ++ .../mysql/templates/primary/svc-headless.yaml | 25 + .../expected/mysql/templates/primary/svc.yaml | 25 + .../expected/mysql/templates/secrets.yaml | 15 + .../mysql/templates/serviceaccount.yaml | 14 + .../testchart/templates/deployment.yaml | 46 + .../options/testchart/templates/service.yaml | 21 + .../testchart/templates/serviceaccount.yaml | 11 + .../with-api-version/templates/pdb.yaml | 17 + .../testchart/templates/deployment.yaml | 46 + .../expected/testchart/templates/service.yaml | 21 + .../testchart/templates/serviceaccount.yaml | 11 + .../with-tarred-dep/templates/deployment.yaml | 78 + .../with-tarred-dep/templates/ingress.yaml | 26 + .../with-tarred-dep/templates/service.yaml | 24 + .../helm/test/testdata/mysql-8.8.26.tar | Bin 0 -> 284672 bytes .../helm/test/testdata/mysql-8.8.26.tar.gz | Bin 0 -> 40449 bytes .../helm/test/testdata/mysql-8.8.26.tgz | Bin 0 -> 40449 bytes pkg/scanners/helm/test/testdata/nope.tgz | Bin 0 -> 114 bytes .../helm/test/testdata/numberName/Chart.yaml | 3 + .../testdata/simmilar-templates/Chart.yaml | 6 + .../templates/deployment.yaml | 21 + .../templates/manifest.yaml | 2 + .../test/testdata/templated-name/Chart.yaml | 7 + .../helm/test/testdata/testchart/.helmignore | 23 + .../helm/test/testdata/testchart/Chart.yaml | 24 + .../testdata/testchart/templates/NOTES.txt | 22 + .../testdata/testchart/templates/_helpers.tpl | 62 + .../testchart/templates/deployment.yaml | 61 + .../testdata/testchart/templates/hpa.yaml | 28 + .../testdata/testchart/templates/ingress.yaml | 61 + .../testdata/testchart/templates/service.yaml | 15 + .../testchart/templates/serviceaccount.yaml | 12 + .../templates/tests/test-connection.yaml | 15 + .../helm/test/testdata/testchart/values.yaml | 86 ++ .../testdata/with-api-version/.helmignore | 23 + .../test/testdata/with-api-version/Chart.yaml | 24 + .../with-api-version/templates/_helpers.tpl | 62 + .../with-api-version/templates/pdb.yaml | 11 + .../testdata/with-api-version/values.yaml | 0 .../test/testdata/with-tarred-dep/.helmignore | 22 + .../test/testdata/with-tarred-dep/Chart.yaml | 14 + .../test/testdata/with-tarred-dep/LICENSE | 201 +++ .../with-tarred-dep/charts/common-1.16.1.tgz | Bin 0 -> 14613 bytes .../testdata/with-tarred-dep/renovate.json | 5 + .../with-tarred-dep/templates/.gitkeep | 0 .../with-tarred-dep/templates/deployment.yaml | 62 + .../with-tarred-dep/templates/ingress.yaml | 36 + .../templates/secrets-crdb-ca.yaml | 17 + .../templates/secrets-dbconn.yaml | 17 + .../with-tarred-dep/templates/service.yaml | 17 + .../test/testdata/with-tarred-dep/values.yaml | 30 + pkg/scanners/helm/test/values/values.yaml | 3 + pkg/scanners/json/parser/parser.go | 89 ++ pkg/scanners/json/parser/parser_test.go | 51 + pkg/scanners/json/scanner.go | 170 ++ pkg/scanners/json/scanner_test.go | 77 + pkg/scanners/kubernetes/parser/manifest.go | 33 + .../kubernetes/parser/manifest_node.go | 140 ++ pkg/scanners/kubernetes/parser/parser.go | 137 ++ pkg/scanners/kubernetes/scanner.go | 176 +++ pkg/scanners/kubernetes/scanner_test.go | 733 +++++++++ pkg/scanners/scanner.go | 21 + pkg/scanners/terraform/executor/executor.go | 269 ++++ .../terraform/executor/executor_test.go | 125 ++ pkg/scanners/terraform/executor/option.go | 103 ++ pkg/scanners/terraform/executor/pool.go | 299 ++++ pkg/scanners/terraform/executor/statistics.go | 91 ++ pkg/scanners/terraform/options.go | 211 +++ pkg/scanners/terraform/parser/evaluator.go | 511 +++++++ .../terraform/parser/evaluator_test.go | 94 ++ pkg/scanners/terraform/parser/funcs/cidr.go | 212 +++ .../terraform/parser/funcs/collection.go | 711 +++++++++ .../terraform/parser/funcs/conversion.go | 223 +++ pkg/scanners/terraform/parser/funcs/crypto.go | 335 ++++ .../terraform/parser/funcs/datetime.go | 71 + .../terraform/parser/funcs/defaults.go | 288 ++++ .../terraform/parser/funcs/encoding.go | 254 +++ .../terraform/parser/funcs/filesystem.go | 467 ++++++ pkg/scanners/terraform/parser/funcs/marks.go | 44 + pkg/scanners/terraform/parser/funcs/number.go | 170 ++ .../terraform/parser/funcs/sensitive.go | 67 + pkg/scanners/terraform/parser/funcs/string.go | 54 + pkg/scanners/terraform/parser/functions.go | 123 ++ pkg/scanners/terraform/parser/load_blocks.go | 130 ++ .../terraform/parser/load_blocks_test.go | 13 + pkg/scanners/terraform/parser/load_module.go | 183 +++ .../terraform/parser/load_module_metadata.go | 33 + pkg/scanners/terraform/parser/load_vars.go | 83 + .../terraform/parser/load_vars_test.go | 46 + .../terraform/parser/module_retrieval.go | 33 + pkg/scanners/terraform/parser/option.go | 67 + pkg/scanners/terraform/parser/parser.go | 349 +++++ .../parser/parser_integration_test.go | 51 + pkg/scanners/terraform/parser/parser_test.go | 1141 ++++++++++++++ .../terraform/parser/resolvers/cache.go | 62 + .../terraform/parser/resolvers/local.go | 26 + .../terraform/parser/resolvers/options.go | 28 + .../terraform/parser/resolvers/registry.go | 165 ++ .../terraform/parser/resolvers/remote.go | 92 ++ .../terraform/parser/resolvers/writable.go | 36 + .../parser/resolvers/writable_windows.go | 24 + pkg/scanners/terraform/parser/sort.go | 58 + .../parser/testdata/tfvars/terraform.tfvars | 1 + .../testdata/tfvars/terraform.tfvars.json | 10 + pkg/scanners/terraform/scanner.go | 379 +++++ .../terraform/scanner_integration_test.go | 132 ++ pkg/scanners/terraform/scanner_test.go | 1361 +++++++++++++++++ pkg/scanners/terraformplan/parser/option.go | 17 + pkg/scanners/terraformplan/parser/parser.go | 219 +++ .../terraformplan/parser/plan_file.go | 67 + pkg/scanners/terraformplan/scanner.go | 162 ++ pkg/scanners/terraformplan/scanner_test.go | 120 ++ .../terraformplan/test/parser_test.go | 21 + .../terraformplan/test/scanner_test.go | 39 + .../terraformplan/test/testdata/plan.json | 1 + pkg/scanners/toml/parser/parser.go | 89 ++ pkg/scanners/toml/parser/parser_test.go | 55 + pkg/scanners/toml/scanner.go | 164 ++ pkg/scanners/toml/scanner_test.go | 82 + pkg/scanners/universal/scanner.go | 63 + pkg/scanners/yaml/parser/parser.go | 109 ++ pkg/scanners/yaml/parser/parser_test.go | 150 ++ pkg/scanners/yaml/scanner.go | 165 ++ pkg/scanners/yaml/scanner_test.go | 85 + test/attribute_test.go | 712 +++++++++ test/block_test.go | 138 ++ test/count_test.go | 194 +++ test/deterministic_test.go | 51 + test/docker_test.go | 138 ++ test/fs_test.go | 24 + test/ignore_test.go | 529 +++++++ test/json_test.go | 104 ++ test/kubernetes_test.go | 131 ++ test/module_test.go | 632 ++++++++ test/performance_test.go | 59 + test/rules_test.go | 41 + test/setup_test.go | 59 + .../dockerfile/DS001/Dockerfile.allowed | 3 + .../dockerfile/DS001/Dockerfile.denied | 3 + .../dockerfile/DS002/Dockerfile.allowed | 3 + .../dockerfile/DS002/Dockerfile.denied | 2 + .../dockerfile/DS004/Dockerfile.allowed | 3 + .../dockerfile/DS004/Dockerfile.denied | 3 + .../dockerfile/DS005/Dockerfile.allowed | 3 + .../dockerfile/DS005/Dockerfile.denied | 4 + .../dockerfile/DS006/Dockerfile.allowed | 6 + .../dockerfile/DS006/Dockerfile.denied | 6 + .../dockerfile/DS007/Dockerfile.allowed | 6 + .../dockerfile/DS007/Dockerfile.denied | 8 + .../dockerfile/DS008/Dockerfile.allowed | 3 + .../dockerfile/DS008/Dockerfile.denied | 3 + .../dockerfile/DS009/Dockerfile.allowed | 3 + .../dockerfile/DS009/Dockerfile.denied | 3 + .../dockerfile/DS010/Dockerfile.allowed | 3 + .../dockerfile/DS010/Dockerfile.denied | 3 + .../dockerfile/DS011/Dockerfile.allowed | 3 + .../dockerfile/DS011/Dockerfile.denied | 3 + .../dockerfile/DS012/Dockerfile.allowed | 10 + .../dockerfile/DS012/Dockerfile.denied | 10 + .../dockerfile/DS013/Dockerfile.allowed | 4 + .../dockerfile/DS013/Dockerfile.denied | 4 + .../dockerfile/DS014/Dockerfile.allowed | 7 + .../dockerfile/DS014/Dockerfile.denied | 7 + .../dockerfile/DS015/Dockerfile.allowed | 5 + .../dockerfile/DS015/Dockerfile.denied | 5 + .../dockerfile/DS016/Dockerfile.allowed | 5 + .../dockerfile/DS016/Dockerfile.denied | 6 + .../dockerfile/DS017/Dockerfile.allowed | 4 + .../dockerfile/DS017/Dockerfile.denied | 5 + .../dockerfile/DS019/Dockerfile.allowed | 5 + .../dockerfile/DS019/Dockerfile.denied | 4 + .../dockerfile/DS020/Dockerfile.allowed | 5 + .../dockerfile/DS020/Dockerfile.denied | 5 + .../dockerfile/DS021/Dockerfile.allowed | 3 + .../dockerfile/DS021/Dockerfile.denied | 3 + .../dockerfile/DS022/Dockerfile.allowed | 2 + .../dockerfile/DS022/Dockerfile.denied | 3 + .../dockerfile/DS023/Dockerfile.allowed | 7 + .../dockerfile/DS023/Dockerfile.denied | 8 + .../dockerfile/DS024/Dockerfile.allowed | 4 + .../dockerfile/DS024/Dockerfile.denied | 4 + test/testdata/kubernetes/KSV001/allowed.yaml | 11 + test/testdata/kubernetes/KSV001/denied.yaml | 13 + test/testdata/kubernetes/KSV002/allowed.yaml | 15 + test/testdata/kubernetes/KSV002/denied.yaml | 15 + test/testdata/kubernetes/KSV003/allowed.yaml | 13 + test/testdata/kubernetes/KSV003/denied.yaml | 9 + test/testdata/kubernetes/KSV005/allowed.yaml | 13 + test/testdata/kubernetes/KSV005/denied.yaml | 17 + test/testdata/kubernetes/KSV006/allowed.yaml | 15 + test/testdata/kubernetes/KSV006/denied.yaml | 18 + test/testdata/kubernetes/KSV008/allowed.yaml | 14 + test/testdata/kubernetes/KSV008/denied.yaml | 14 + test/testdata/kubernetes/KSV009/allowed.yaml | 14 + test/testdata/kubernetes/KSV009/denied.yaml | 14 + test/testdata/kubernetes/KSV010/allowed.yaml | 14 + test/testdata/kubernetes/KSV010/denied.yaml | 14 + test/testdata/kubernetes/KSV011/allowed.yaml | 16 + test/testdata/kubernetes/KSV011/denied.yaml | 13 + test/testdata/kubernetes/KSV012/allowed.yaml | 11 + test/testdata/kubernetes/KSV012/denied.yaml | 9 + test/testdata/kubernetes/KSV013/allowed.yaml | 13 + test/testdata/kubernetes/KSV013/denied.yaml | 13 + test/testdata/kubernetes/KSV014/allowed.yaml | 15 + test/testdata/kubernetes/KSV014/denied.yaml | 15 + test/testdata/kubernetes/KSV015/allowed.yaml | 16 + test/testdata/kubernetes/KSV015/denied.yaml | 13 + test/testdata/kubernetes/KSV016/allowed.yaml | 16 + test/testdata/kubernetes/KSV016/denied.yaml | 13 + test/testdata/kubernetes/KSV017/allowed.yaml | 13 + test/testdata/kubernetes/KSV017/denied.yaml | 15 + test/testdata/kubernetes/KSV018/allowed.yaml | 16 + test/testdata/kubernetes/KSV018/denied.yaml | 14 + test/testdata/kubernetes/KSV020/allowed.yaml | 15 + test/testdata/kubernetes/KSV020/denied.yaml | 13 + test/testdata/kubernetes/KSV021/allowed.yaml | 15 + test/testdata/kubernetes/KSV021/denied.yaml | 13 + test/testdata/kubernetes/KSV022/allowed.yaml | 13 + test/testdata/kubernetes/KSV022/denied.yaml | 17 + test/testdata/kubernetes/KSV023/allowed.yaml | 13 + test/testdata/kubernetes/KSV023/denied.yaml | 17 + test/testdata/kubernetes/KSV024/allowed.yaml | 13 + test/testdata/kubernetes/KSV024/denied.yaml | 15 + test/testdata/kubernetes/KSV025/allowed.yaml | 14 + test/testdata/kubernetes/KSV025/denied.yaml | 16 + test/testdata/kubernetes/KSV026/allowed.yaml | 17 + test/testdata/kubernetes/KSV026/denied.yaml | 19 + test/testdata/kubernetes/KSV027/allowed.yaml | 15 + test/testdata/kubernetes/KSV027/denied.yaml | 17 + test/testdata/kubernetes/KSV028/allowed.yaml | 15 + test/testdata/kubernetes/KSV028/denied.yaml | 24 + test/testdata/kubernetes/KSV030/allowed.yaml | 14 + test/testdata/kubernetes/KSV030/denied.yaml | 14 + test/testdata/kubernetes/KSV036/allowed.yaml | 12 + test/testdata/kubernetes/KSV036/denied.yaml | 0 test/testdata/kubernetes/KSV037/allowed.yaml | 13 + test/testdata/kubernetes/KSV037/denied.yaml | 12 + test/testdata/kubernetes/KSV038/allowed.yaml | 8 + test/testdata/kubernetes/KSV038/denied.yaml | 6 + test/testdata/kubernetes/KSV102/allowed.yaml | 24 + test/testdata/kubernetes/KSV102/denied.yaml | 19 + .../kubernetes/optional/KSV004/allowed.yaml | 13 + .../kubernetes/optional/KSV004/denied.yaml | 11 + .../kubernetes/optional/KSV007/allowed.yaml | 5 + .../kubernetes/optional/KSV007/denied.yaml | 10 + .../kubernetes/optional/KSV032/allowed.yaml | 8 + .../kubernetes/optional/KSV032/denied.yaml | 8 + .../kubernetes/optional/KSV033/allowed.yaml | 8 + .../kubernetes/optional/KSV033/denied.yaml | 8 + .../kubernetes/optional/KSV034/allowed.yaml | 8 + .../kubernetes/optional/KSV034/denied.yaml | 8 + .../kubernetes/optional/KSV035/allowed.yaml | 8 + .../kubernetes/optional/KSV035/denied.yaml | 8 + .../kubernetes/optional/KSV039/allowed.yaml | 35 + .../kubernetes/optional/KSV039/denied.yaml | 11 + .../kubernetes/optional/KSV040/allowed.yaml | 11 + .../kubernetes/optional/KSV040/denied.yaml | 10 + test/testutil/util.go | 113 ++ test/tf/fail/main.tf | 3 + test/wildcard_test.go | 85 + 898 files changed, 70024 insertions(+), 27 deletions(-) create mode 100644 internal/adapters/arm/adapt.go create mode 100644 internal/adapters/arm/appservice/adapt.go create mode 100644 internal/adapters/arm/authorization/adapt.go create mode 100644 internal/adapters/arm/compute/adapt.go create mode 100644 internal/adapters/arm/compute/adapt_test.go create mode 100644 internal/adapters/arm/container/adapt.go create mode 100644 internal/adapters/arm/database/adapt.go create mode 100644 internal/adapters/arm/database/firewall.go create mode 100644 internal/adapters/arm/database/maria.go create mode 100644 internal/adapters/arm/database/mssql.go create mode 100644 internal/adapters/arm/database/postgresql.go create mode 100644 internal/adapters/arm/datafactory/adapt.go create mode 100644 internal/adapters/arm/datalake/adapt.go create mode 100644 internal/adapters/arm/keyvault/adapt.go create mode 100644 internal/adapters/arm/monitor/adapt.go create mode 100644 internal/adapters/arm/network/adapt.go create mode 100644 internal/adapters/arm/securitycenter/adapt.go create mode 100644 internal/adapters/arm/storage/adapt.go create mode 100644 internal/adapters/arm/storage/adapt_test.go create mode 100644 internal/adapters/arm/synapse/adapt.go create mode 100644 internal/adapters/cloudformation/adapt.go create mode 100644 internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go create mode 100644 internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go create mode 100644 internal/adapters/cloudformation/aws/adapt.go create mode 100644 internal/adapters/cloudformation/aws/apigateway/apigateway.go create mode 100644 internal/adapters/cloudformation/aws/apigateway/stage.go create mode 100644 internal/adapters/cloudformation/aws/athena/athena.go create mode 100644 internal/adapters/cloudformation/aws/athena/workgroup.go create mode 100644 internal/adapters/cloudformation/aws/cloudfront/cloudfront.go create mode 100644 internal/adapters/cloudformation/aws/cloudfront/distribution.go create mode 100644 internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go create mode 100644 internal/adapters/cloudformation/aws/cloudtrail/trails.go create mode 100644 internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go create mode 100644 internal/adapters/cloudformation/aws/cloudwatch/log_group.go create mode 100644 internal/adapters/cloudformation/aws/codebuild/codebuild.go create mode 100644 internal/adapters/cloudformation/aws/codebuild/project.go create mode 100644 internal/adapters/cloudformation/aws/config/adapt_test.go create mode 100644 internal/adapters/cloudformation/aws/config/aggregator.go create mode 100644 internal/adapters/cloudformation/aws/config/config.go create mode 100644 internal/adapters/cloudformation/aws/documentdb/cluster.go create mode 100644 internal/adapters/cloudformation/aws/documentdb/documentdb.go create mode 100644 internal/adapters/cloudformation/aws/dynamodb/cluster.go create mode 100644 internal/adapters/cloudformation/aws/dynamodb/dynamodb.go create mode 100644 internal/adapters/cloudformation/aws/ec2/adapt_test.go create mode 100644 internal/adapters/cloudformation/aws/ec2/ec2.go create mode 100644 internal/adapters/cloudformation/aws/ec2/instance.go create mode 100644 internal/adapters/cloudformation/aws/ec2/launch_configuration.go create mode 100644 internal/adapters/cloudformation/aws/ec2/launch_template.go create mode 100644 internal/adapters/cloudformation/aws/ec2/nacl.go create mode 100644 internal/adapters/cloudformation/aws/ec2/security_group.go create mode 100644 internal/adapters/cloudformation/aws/ec2/subnet.go create mode 100644 internal/adapters/cloudformation/aws/ec2/volume.go create mode 100644 internal/adapters/cloudformation/aws/ecr/ecr.go create mode 100644 internal/adapters/cloudformation/aws/ecr/repository.go create mode 100644 internal/adapters/cloudformation/aws/ecs/cluster.go create mode 100644 internal/adapters/cloudformation/aws/ecs/ecs.go create mode 100644 internal/adapters/cloudformation/aws/ecs/task_definition.go create mode 100644 internal/adapters/cloudformation/aws/efs/efs.go create mode 100644 internal/adapters/cloudformation/aws/efs/filesystem.go create mode 100644 internal/adapters/cloudformation/aws/eks/cluster.go create mode 100644 internal/adapters/cloudformation/aws/eks/eks.go create mode 100644 internal/adapters/cloudformation/aws/elasticache/cluster.go create mode 100644 internal/adapters/cloudformation/aws/elasticache/elasticache.go create mode 100644 internal/adapters/cloudformation/aws/elasticache/replication_group.go create mode 100644 internal/adapters/cloudformation/aws/elasticache/security_group.go create mode 100644 internal/adapters/cloudformation/aws/elasticsearch/domain.go create mode 100644 internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go create mode 100644 internal/adapters/cloudformation/aws/elb/adapt_test.go create mode 100644 internal/adapters/cloudformation/aws/elb/elb.go create mode 100644 internal/adapters/cloudformation/aws/elb/loadbalancer.go create mode 100644 internal/adapters/cloudformation/aws/iam/iam.go create mode 100644 internal/adapters/cloudformation/aws/iam/policy.go create mode 100644 internal/adapters/cloudformation/aws/kinesis/kinesis.go create mode 100644 internal/adapters/cloudformation/aws/kinesis/stream.go create mode 100644 internal/adapters/cloudformation/aws/lambda/function.go create mode 100644 internal/adapters/cloudformation/aws/lambda/lambda.go create mode 100644 internal/adapters/cloudformation/aws/mq/broker.go create mode 100644 internal/adapters/cloudformation/aws/mq/mq.go create mode 100644 internal/adapters/cloudformation/aws/msk/cluster.go create mode 100644 internal/adapters/cloudformation/aws/msk/msk.go create mode 100644 internal/adapters/cloudformation/aws/neptune/cluster.go create mode 100644 internal/adapters/cloudformation/aws/neptune/neptune.go create mode 100644 internal/adapters/cloudformation/aws/rds/adapt_test.go create mode 100644 internal/adapters/cloudformation/aws/rds/cluster.go create mode 100644 internal/adapters/cloudformation/aws/rds/instance.go create mode 100644 internal/adapters/cloudformation/aws/rds/parameter_groups.go create mode 100644 internal/adapters/cloudformation/aws/rds/rds.go create mode 100644 internal/adapters/cloudformation/aws/redshift/cluster.go create mode 100644 internal/adapters/cloudformation/aws/redshift/redshift.go create mode 100644 internal/adapters/cloudformation/aws/redshift/security_group.go create mode 100644 internal/adapters/cloudformation/aws/s3/bucket.go create mode 100644 internal/adapters/cloudformation/aws/s3/s3.go create mode 100644 internal/adapters/cloudformation/aws/sam/api.go create mode 100644 internal/adapters/cloudformation/aws/sam/function.go create mode 100644 internal/adapters/cloudformation/aws/sam/http_api.go create mode 100644 internal/adapters/cloudformation/aws/sam/sam.go create mode 100644 internal/adapters/cloudformation/aws/sam/state_machines.go create mode 100644 internal/adapters/cloudformation/aws/sam/tables.go create mode 100644 internal/adapters/cloudformation/aws/sns/sns.go create mode 100644 internal/adapters/cloudformation/aws/sns/topic.go create mode 100644 internal/adapters/cloudformation/aws/sqs/queue.go create mode 100644 internal/adapters/cloudformation/aws/sqs/sqs.go create mode 100644 internal/adapters/cloudformation/aws/ssm/secret.go create mode 100644 internal/adapters/cloudformation/aws/ssm/ssm.go create mode 100644 internal/adapters/cloudformation/aws/workspaces/workspace.go create mode 100644 internal/adapters/cloudformation/aws/workspaces/workspaces.go create mode 100644 internal/adapters/terraform/adapt.go create mode 100644 internal/adapters/terraform/aws/accessanalyzer/accessanalyzer.go create mode 100644 internal/adapters/terraform/aws/adapt.go create mode 100644 internal/adapters/terraform/aws/apigateway/adapt.go create mode 100644 internal/adapters/terraform/aws/apigateway/adapt_test.go create mode 100644 internal/adapters/terraform/aws/apigateway/apiv1.go create mode 100644 internal/adapters/terraform/aws/apigateway/apiv1_test.go create mode 100644 internal/adapters/terraform/aws/apigateway/apiv2.go create mode 100644 internal/adapters/terraform/aws/apigateway/apiv2_test.go create mode 100644 internal/adapters/terraform/aws/apigateway/namesv1.go create mode 100644 internal/adapters/terraform/aws/apigateway/namesv1_test.go create mode 100644 internal/adapters/terraform/aws/apigateway/namesv2.go create mode 100644 internal/adapters/terraform/aws/apigateway/namesv2_test.go create mode 100644 internal/adapters/terraform/aws/athena/adapt.go create mode 100644 internal/adapters/terraform/aws/athena/adapt_test.go create mode 100644 internal/adapters/terraform/aws/cloudfront/adapt.go create mode 100644 internal/adapters/terraform/aws/cloudfront/adapt_test.go create mode 100644 internal/adapters/terraform/aws/cloudtrail/adapt.go create mode 100644 internal/adapters/terraform/aws/cloudtrail/adapt_test.go create mode 100644 internal/adapters/terraform/aws/cloudwatch/adapt.go create mode 100644 internal/adapters/terraform/aws/cloudwatch/adapt_test.go create mode 100644 internal/adapters/terraform/aws/codebuild/adapt.go create mode 100644 internal/adapters/terraform/aws/codebuild/adapt_test.go create mode 100644 internal/adapters/terraform/aws/config/adapt.go create mode 100644 internal/adapters/terraform/aws/config/adapt_test.go create mode 100644 internal/adapters/terraform/aws/documentdb/adapt.go create mode 100644 internal/adapters/terraform/aws/documentdb/adapt_test.go create mode 100644 internal/adapters/terraform/aws/dynamodb/adapt.go create mode 100644 internal/adapters/terraform/aws/dynamodb/adapt_test.go create mode 100644 internal/adapters/terraform/aws/ec2/adapt.go create mode 100644 internal/adapters/terraform/aws/ec2/adapt_test.go create mode 100644 internal/adapters/terraform/aws/ec2/autoscaling.go create mode 100644 internal/adapters/terraform/aws/ec2/autoscaling_test.go create mode 100644 internal/adapters/terraform/aws/ec2/subnet.go create mode 100644 internal/adapters/terraform/aws/ec2/subnet_test.go create mode 100644 internal/adapters/terraform/aws/ec2/volume.go create mode 100644 internal/adapters/terraform/aws/ec2/volume_test.go create mode 100644 internal/adapters/terraform/aws/ec2/vpc.go create mode 100644 internal/adapters/terraform/aws/ec2/vpc_test.go create mode 100644 internal/adapters/terraform/aws/ecr/adapt.go create mode 100644 internal/adapters/terraform/aws/ecr/adapt_test.go create mode 100644 internal/adapters/terraform/aws/ecs/adapt.go create mode 100644 internal/adapters/terraform/aws/ecs/adapt_test.go create mode 100644 internal/adapters/terraform/aws/efs/adapt.go create mode 100644 internal/adapters/terraform/aws/efs/adapt_test.go create mode 100644 internal/adapters/terraform/aws/eks/adapt.go create mode 100644 internal/adapters/terraform/aws/eks/adapt_test.go create mode 100644 internal/adapters/terraform/aws/elasticache/adapt.go create mode 100644 internal/adapters/terraform/aws/elasticache/adapt_test.go create mode 100644 internal/adapters/terraform/aws/elasticsearch/adapt.go create mode 100644 internal/adapters/terraform/aws/elasticsearch/adapt_test.go create mode 100644 internal/adapters/terraform/aws/elb/adapt.go create mode 100644 internal/adapters/terraform/aws/elb/adapt_test.go create mode 100644 internal/adapters/terraform/aws/emr/adapt.go create mode 100644 internal/adapters/terraform/aws/emr/adapt_test.go create mode 100644 internal/adapters/terraform/aws/iam/adapt.go create mode 100644 internal/adapters/terraform/aws/iam/adapt_test.go create mode 100644 internal/adapters/terraform/aws/iam/convert.go create mode 100644 internal/adapters/terraform/aws/iam/groups.go create mode 100644 internal/adapters/terraform/aws/iam/groups_test.go create mode 100644 internal/adapters/terraform/aws/iam/passwords.go create mode 100644 internal/adapters/terraform/aws/iam/passwords_test.go create mode 100644 internal/adapters/terraform/aws/iam/policies.go create mode 100644 internal/adapters/terraform/aws/iam/policies_test.go create mode 100644 internal/adapters/terraform/aws/iam/roles.go create mode 100644 internal/adapters/terraform/aws/iam/roles_test.go create mode 100644 internal/adapters/terraform/aws/iam/users.go create mode 100644 internal/adapters/terraform/aws/iam/users_test.go create mode 100644 internal/adapters/terraform/aws/kinesis/adapt.go create mode 100644 internal/adapters/terraform/aws/kinesis/adapt_test.go create mode 100644 internal/adapters/terraform/aws/kms/adapt.go create mode 100644 internal/adapters/terraform/aws/kms/adapt_test.go create mode 100644 internal/adapters/terraform/aws/lambda/adapt.go create mode 100644 internal/adapters/terraform/aws/lambda/adapt_test.go create mode 100644 internal/adapters/terraform/aws/mq/adapt.go create mode 100644 internal/adapters/terraform/aws/mq/adapt_test.go create mode 100644 internal/adapters/terraform/aws/msk/adapt.go create mode 100644 internal/adapters/terraform/aws/msk/adapt_test.go create mode 100644 internal/adapters/terraform/aws/neptune/adapt.go create mode 100644 internal/adapters/terraform/aws/neptune/adapt_test.go create mode 100644 internal/adapters/terraform/aws/provider/adapt.go create mode 100644 internal/adapters/terraform/aws/provider/adapt_test.go create mode 100644 internal/adapters/terraform/aws/rds/adapt.go create mode 100644 internal/adapters/terraform/aws/rds/adapt_test.go create mode 100644 internal/adapters/terraform/aws/redshift/adapt.go create mode 100644 internal/adapters/terraform/aws/redshift/adapt_test.go create mode 100644 internal/adapters/terraform/aws/s3/adapt.go create mode 100644 internal/adapters/terraform/aws/s3/adapt_test.go create mode 100644 internal/adapters/terraform/aws/s3/bucket.go create mode 100644 internal/adapters/terraform/aws/s3/bucket_test.go create mode 100644 internal/adapters/terraform/aws/s3/policies.go create mode 100644 internal/adapters/terraform/aws/s3/public_access_block.go create mode 100644 internal/adapters/terraform/aws/sns/adapt.go create mode 100644 internal/adapters/terraform/aws/sns/adapt_test.go create mode 100644 internal/adapters/terraform/aws/sqs/adapt.go create mode 100644 internal/adapters/terraform/aws/sqs/adapt_test.go create mode 100644 internal/adapters/terraform/aws/ssm/adapt.go create mode 100644 internal/adapters/terraform/aws/ssm/adapt_test.go create mode 100644 internal/adapters/terraform/aws/workspaces/adapt.go create mode 100644 internal/adapters/terraform/aws/workspaces/adapt_test.go create mode 100644 internal/adapters/terraform/azure/adapt.go create mode 100644 internal/adapters/terraform/azure/appservice/adapt.go create mode 100644 internal/adapters/terraform/azure/appservice/adapt_test.go create mode 100644 internal/adapters/terraform/azure/authorization/adapt.go create mode 100644 internal/adapters/terraform/azure/authorization/adapt_test.go create mode 100644 internal/adapters/terraform/azure/compute/adapt.go create mode 100644 internal/adapters/terraform/azure/compute/adapt_test.go create mode 100644 internal/adapters/terraform/azure/container/adapt.go create mode 100644 internal/adapters/terraform/azure/container/adapt_test.go create mode 100644 internal/adapters/terraform/azure/database/adapt.go create mode 100644 internal/adapters/terraform/azure/database/adapt_test.go create mode 100644 internal/adapters/terraform/azure/datafactory/adapt.go create mode 100644 internal/adapters/terraform/azure/datafactory/adapt_test.go create mode 100644 internal/adapters/terraform/azure/datalake/adapt.go create mode 100644 internal/adapters/terraform/azure/datalake/adapt_test.go create mode 100644 internal/adapters/terraform/azure/keyvault/adapt.go create mode 100644 internal/adapters/terraform/azure/keyvault/adapt_test.go create mode 100644 internal/adapters/terraform/azure/monitor/adapt.go create mode 100644 internal/adapters/terraform/azure/monitor/adapt_test.go create mode 100644 internal/adapters/terraform/azure/network/adapt.go create mode 100644 internal/adapters/terraform/azure/network/adapt_test.go create mode 100644 internal/adapters/terraform/azure/securitycenter/adapt.go create mode 100644 internal/adapters/terraform/azure/securitycenter/adapt_test.go create mode 100644 internal/adapters/terraform/azure/storage/adapt.go create mode 100644 internal/adapters/terraform/azure/storage/adapt_test.go create mode 100644 internal/adapters/terraform/azure/synapse/adapt.go create mode 100644 internal/adapters/terraform/azure/synapse/adapt_test.go create mode 100644 internal/adapters/terraform/cloudstack/adapt.go create mode 100644 internal/adapters/terraform/cloudstack/compute/adapt.go create mode 100644 internal/adapters/terraform/cloudstack/compute/adapt_test.go create mode 100644 internal/adapters/terraform/digitalocean/adapt.go create mode 100644 internal/adapters/terraform/digitalocean/compute/adapt.go create mode 100644 internal/adapters/terraform/digitalocean/compute/adapt_test.go create mode 100644 internal/adapters/terraform/digitalocean/spaces/adapt.go create mode 100644 internal/adapters/terraform/digitalocean/spaces/adapt_test.go create mode 100644 internal/adapters/terraform/github/adapt.go create mode 100644 internal/adapters/terraform/github/branch_protections/adapt.go create mode 100644 internal/adapters/terraform/github/branch_protections/adapt_test.go create mode 100644 internal/adapters/terraform/github/repositories/adapt.go create mode 100644 internal/adapters/terraform/github/repositories/adapt_test.go create mode 100644 internal/adapters/terraform/github/secrets/adapt.go create mode 100644 internal/adapters/terraform/github/secrets/adapt_test.go create mode 100644 internal/adapters/terraform/google/adapt.go create mode 100644 internal/adapters/terraform/google/bigquery/adapt.go create mode 100644 internal/adapters/terraform/google/bigquery/adapt_test.go create mode 100644 internal/adapters/terraform/google/compute/adapt.go create mode 100644 internal/adapters/terraform/google/compute/adapt_test.go create mode 100644 internal/adapters/terraform/google/compute/disks.go create mode 100644 internal/adapters/terraform/google/compute/disks_test.go create mode 100644 internal/adapters/terraform/google/compute/instances.go create mode 100644 internal/adapters/terraform/google/compute/instances_test.go create mode 100644 internal/adapters/terraform/google/compute/metadata.go create mode 100644 internal/adapters/terraform/google/compute/metadata_test.go create mode 100644 internal/adapters/terraform/google/compute/networks.go create mode 100644 internal/adapters/terraform/google/compute/networks_test.go create mode 100644 internal/adapters/terraform/google/compute/ssl.go create mode 100644 internal/adapters/terraform/google/compute/ssl_test.go create mode 100644 internal/adapters/terraform/google/dns/adapt.go create mode 100644 internal/adapters/terraform/google/dns/adapt_test.go create mode 100644 internal/adapters/terraform/google/gke/adapt.go create mode 100644 internal/adapters/terraform/google/gke/adapt_test.go create mode 100644 internal/adapters/terraform/google/iam/adapt.go create mode 100644 internal/adapters/terraform/google/iam/adapt_test.go create mode 100644 internal/adapters/terraform/google/iam/convert.go create mode 100644 internal/adapters/terraform/google/iam/folder_iam.go create mode 100644 internal/adapters/terraform/google/iam/folders.go create mode 100644 internal/adapters/terraform/google/iam/org_iam.go create mode 100644 internal/adapters/terraform/google/iam/project_iam.go create mode 100644 internal/adapters/terraform/google/iam/project_iam_test.go create mode 100644 internal/adapters/terraform/google/iam/projects.go create mode 100644 internal/adapters/terraform/google/iam/workload_identity_pool_providers.go create mode 100644 internal/adapters/terraform/google/kms/adapt.go create mode 100644 internal/adapters/terraform/google/kms/adapt_test.go create mode 100644 internal/adapters/terraform/google/sql/adapt.go create mode 100644 internal/adapters/terraform/google/sql/adapt_test.go create mode 100644 internal/adapters/terraform/google/storage/adapt.go create mode 100644 internal/adapters/terraform/google/storage/adapt_test.go create mode 100644 internal/adapters/terraform/google/storage/iam.go create mode 100644 internal/adapters/terraform/kubernetes/adapt.go create mode 100644 internal/adapters/terraform/kubernetes/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/computing/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/computing/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/computing/instance.go create mode 100644 internal/adapters/terraform/nifcloud/computing/instance_test.go create mode 100644 internal/adapters/terraform/nifcloud/computing/security_group.go create mode 100644 internal/adapters/terraform/nifcloud/computing/security_group_test.go create mode 100644 internal/adapters/terraform/nifcloud/dns/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/dns/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/dns/record.go create mode 100644 internal/adapters/terraform/nifcloud/dns/record_test.go create mode 100644 internal/adapters/terraform/nifcloud/nas/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/nas/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/nas/nas_instance.go create mode 100644 internal/adapters/terraform/nifcloud/nas/nas_instance_test.go create mode 100644 internal/adapters/terraform/nifcloud/nas/nas_security_group.go create mode 100644 internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go create mode 100644 internal/adapters/terraform/nifcloud/network/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/network/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/network/elastic_load_balancer.go create mode 100644 internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go create mode 100644 internal/adapters/terraform/nifcloud/network/load_balancer.go create mode 100644 internal/adapters/terraform/nifcloud/network/load_balancer_test.go create mode 100644 internal/adapters/terraform/nifcloud/network/router.go create mode 100644 internal/adapters/terraform/nifcloud/network/router_test.go create mode 100644 internal/adapters/terraform/nifcloud/network/vpn_gateway.go create mode 100644 internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go create mode 100644 internal/adapters/terraform/nifcloud/nifcloud.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/db_instance.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/db_instance_test.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/db_security_group.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go create mode 100644 internal/adapters/terraform/nifcloud/sslcertificate/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/sslcertificate/server_certificate.go create mode 100644 internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go create mode 100644 internal/adapters/terraform/openstack/adapt.go create mode 100644 internal/adapters/terraform/openstack/adapt_test.go create mode 100644 internal/adapters/terraform/openstack/networking.go create mode 100644 internal/adapters/terraform/openstack/networking_test.go create mode 100644 internal/adapters/terraform/oracle/adapt.go create mode 100644 internal/adapters/terraform/tftestutil/testutil.go create mode 100644 pkg/detection/detect.go create mode 100644 pkg/detection/detect_test.go create mode 100644 pkg/detection/peek.go create mode 100644 pkg/detection/testdata/big.file create mode 100644 pkg/detection/testdata/small.file create mode 100644 pkg/extrafs/extrafs.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/bench_test.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/decode.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/decode_array.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/decode_boolean.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/decode_meta_test.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/decode_null.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/decode_number.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/decode_object.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/decode_string.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/kind.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/node.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_array.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_array_test.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_boolean.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_boolean_test.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_comment.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_complex_test.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_null.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_null_test.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_number.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_number_test.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_object.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_object_test.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_string.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_string_test.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/parse_whitespace.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/reader.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/reader_test.go create mode 100644 pkg/scanners/azure/arm/parser/armjson/unmarshal.go create mode 100644 pkg/scanners/azure/arm/parser/parser.go create mode 100644 pkg/scanners/azure/arm/parser/parser_test.go create mode 100644 pkg/scanners/azure/arm/parser/template.go create mode 100644 pkg/scanners/azure/arm/parser/template_test.go create mode 100644 pkg/scanners/azure/arm/parser/testdata/example.json create mode 100644 pkg/scanners/azure/arm/parser/testdata/postgres.json create mode 100644 pkg/scanners/azure/arm/scanner.go create mode 100644 pkg/scanners/azure/deployment.go create mode 100644 pkg/scanners/azure/expressions/lex.go create mode 100644 pkg/scanners/azure/expressions/node.go create mode 100644 pkg/scanners/azure/expressions/token_walker.go create mode 100644 pkg/scanners/azure/functions/add.go create mode 100644 pkg/scanners/azure/functions/add_test.go create mode 100644 pkg/scanners/azure/functions/and.go create mode 100644 pkg/scanners/azure/functions/and_test.go create mode 100644 pkg/scanners/azure/functions/array.go create mode 100644 pkg/scanners/azure/functions/array_test.go create mode 100644 pkg/scanners/azure/functions/base64.go create mode 100644 pkg/scanners/azure/functions/base64_test.go create mode 100644 pkg/scanners/azure/functions/bool.go create mode 100644 pkg/scanners/azure/functions/bool_test.go create mode 100644 pkg/scanners/azure/functions/casing.go create mode 100644 pkg/scanners/azure/functions/casing_test.go create mode 100644 pkg/scanners/azure/functions/coalesce.go create mode 100644 pkg/scanners/azure/functions/coalesce_test.go create mode 100644 pkg/scanners/azure/functions/concat.go create mode 100644 pkg/scanners/azure/functions/concat_test.go create mode 100644 pkg/scanners/azure/functions/contains.go create mode 100644 pkg/scanners/azure/functions/contains_test.go create mode 100644 pkg/scanners/azure/functions/copy_index.go create mode 100644 pkg/scanners/azure/functions/copy_index_test.go create mode 100644 pkg/scanners/azure/functions/create_array.go create mode 100644 pkg/scanners/azure/functions/create_array_test.go create mode 100644 pkg/scanners/azure/functions/create_object.go create mode 100644 pkg/scanners/azure/functions/create_object_test.go create mode 100644 pkg/scanners/azure/functions/data_uri.go create mode 100644 pkg/scanners/azure/functions/data_uri_test.go create mode 100644 pkg/scanners/azure/functions/date_time_add.go create mode 100644 pkg/scanners/azure/functions/date_time_epoch.go create mode 100644 pkg/scanners/azure/functions/date_time_epoch_test.go create mode 100644 pkg/scanners/azure/functions/datetime_add_test.go create mode 100644 pkg/scanners/azure/functions/deployment.go create mode 100644 pkg/scanners/azure/functions/div.go create mode 100644 pkg/scanners/azure/functions/div_test.go create mode 100644 pkg/scanners/azure/functions/empty.go create mode 100644 pkg/scanners/azure/functions/empty_test.go create mode 100644 pkg/scanners/azure/functions/ends_with.go create mode 100644 pkg/scanners/azure/functions/ends_with_test.go create mode 100644 pkg/scanners/azure/functions/equals.go create mode 100644 pkg/scanners/azure/functions/equals_test.go create mode 100644 pkg/scanners/azure/functions/false.go create mode 100644 pkg/scanners/azure/functions/first.go create mode 100644 pkg/scanners/azure/functions/first_test.go create mode 100644 pkg/scanners/azure/functions/float.go create mode 100644 pkg/scanners/azure/functions/float_test.go create mode 100644 pkg/scanners/azure/functions/format.go create mode 100644 pkg/scanners/azure/functions/format_test.go create mode 100644 pkg/scanners/azure/functions/functions.go create mode 100644 pkg/scanners/azure/functions/greater.go create mode 100644 pkg/scanners/azure/functions/greater_test.go create mode 100644 pkg/scanners/azure/functions/guid.go create mode 100644 pkg/scanners/azure/functions/guid_test.go create mode 100644 pkg/scanners/azure/functions/if.go create mode 100644 pkg/scanners/azure/functions/if_test.go create mode 100644 pkg/scanners/azure/functions/index_of.go create mode 100644 pkg/scanners/azure/functions/index_of_test.go create mode 100644 pkg/scanners/azure/functions/int.go create mode 100644 pkg/scanners/azure/functions/int_test.go create mode 100644 pkg/scanners/azure/functions/intersection.go create mode 100644 pkg/scanners/azure/functions/intersection_test.go create mode 100644 pkg/scanners/azure/functions/items.go create mode 100644 pkg/scanners/azure/functions/join.go create mode 100644 pkg/scanners/azure/functions/join_test.go create mode 100644 pkg/scanners/azure/functions/json.go create mode 100644 pkg/scanners/azure/functions/json_test.go create mode 100644 pkg/scanners/azure/functions/last.go create mode 100644 pkg/scanners/azure/functions/last_index_of.go create mode 100644 pkg/scanners/azure/functions/last_index_of_test.go create mode 100644 pkg/scanners/azure/functions/last_test.go create mode 100644 pkg/scanners/azure/functions/length.go create mode 100644 pkg/scanners/azure/functions/length_test.go create mode 100644 pkg/scanners/azure/functions/less.go create mode 100644 pkg/scanners/azure/functions/less_test.go create mode 100644 pkg/scanners/azure/functions/max.go create mode 100644 pkg/scanners/azure/functions/max_test.go create mode 100644 pkg/scanners/azure/functions/min.go create mode 100644 pkg/scanners/azure/functions/min_test.go create mode 100644 pkg/scanners/azure/functions/mod.go create mode 100644 pkg/scanners/azure/functions/mod_test.go create mode 100644 pkg/scanners/azure/functions/mul.go create mode 100644 pkg/scanners/azure/functions/mul_test.go create mode 100644 pkg/scanners/azure/functions/not.go create mode 100644 pkg/scanners/azure/functions/not_test.go create mode 100644 pkg/scanners/azure/functions/null.go create mode 100644 pkg/scanners/azure/functions/null_test.go create mode 100644 pkg/scanners/azure/functions/or.go create mode 100644 pkg/scanners/azure/functions/or_test.go create mode 100644 pkg/scanners/azure/functions/pad.go create mode 100644 pkg/scanners/azure/functions/pad_test.go create mode 100644 pkg/scanners/azure/functions/parameters.go create mode 100644 pkg/scanners/azure/functions/pick_zones.go create mode 100644 pkg/scanners/azure/functions/pick_zones_test.go create mode 100644 pkg/scanners/azure/functions/range.go create mode 100644 pkg/scanners/azure/functions/range_test.go create mode 100644 pkg/scanners/azure/functions/reference.go create mode 100644 pkg/scanners/azure/functions/reference_test.go create mode 100644 pkg/scanners/azure/functions/replace.go create mode 100644 pkg/scanners/azure/functions/replace_test.go create mode 100644 pkg/scanners/azure/functions/resource.go create mode 100644 pkg/scanners/azure/functions/resource_test.go create mode 100644 pkg/scanners/azure/functions/scope.go create mode 100644 pkg/scanners/azure/functions/scope_test.go create mode 100644 pkg/scanners/azure/functions/skip.go create mode 100644 pkg/scanners/azure/functions/skip_test.go create mode 100644 pkg/scanners/azure/functions/split.go create mode 100644 pkg/scanners/azure/functions/split_test.go create mode 100644 pkg/scanners/azure/functions/starts_with.go create mode 100644 pkg/scanners/azure/functions/starts_with_test.go create mode 100644 pkg/scanners/azure/functions/string.go create mode 100644 pkg/scanners/azure/functions/string_test.go create mode 100644 pkg/scanners/azure/functions/sub.go create mode 100644 pkg/scanners/azure/functions/sub_test.go create mode 100644 pkg/scanners/azure/functions/substring.go create mode 100644 pkg/scanners/azure/functions/substring_test.go create mode 100644 pkg/scanners/azure/functions/take.go create mode 100644 pkg/scanners/azure/functions/take_test.go create mode 100644 pkg/scanners/azure/functions/trim.go create mode 100644 pkg/scanners/azure/functions/trim_test.go create mode 100644 pkg/scanners/azure/functions/true.go create mode 100644 pkg/scanners/azure/functions/union.go create mode 100644 pkg/scanners/azure/functions/union_test.go create mode 100644 pkg/scanners/azure/functions/unique_string.go create mode 100644 pkg/scanners/azure/functions/unique_string_test.go create mode 100644 pkg/scanners/azure/functions/uri.go create mode 100644 pkg/scanners/azure/functions/uri_test.go create mode 100644 pkg/scanners/azure/functions/utc_now.go create mode 100644 pkg/scanners/azure/functions/utc_now_test.go create mode 100644 pkg/scanners/azure/resolver/resolver.go create mode 100644 pkg/scanners/azure/resolver/resolver_test.go create mode 100644 pkg/scanners/azure/value.go create mode 100644 pkg/scanners/azure/value_test.go create mode 100644 pkg/scanners/cloudformation/cftypes/types.go create mode 100644 pkg/scanners/cloudformation/parser/errors.go create mode 100644 pkg/scanners/cloudformation/parser/file_context.go create mode 100644 pkg/scanners/cloudformation/parser/file_context_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_and.go create mode 100644 pkg/scanners/cloudformation/parser/fn_and_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_base64.go create mode 100644 pkg/scanners/cloudformation/parser/fn_base64_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_builtin.go create mode 100644 pkg/scanners/cloudformation/parser/fn_builtin_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_condition.go create mode 100644 pkg/scanners/cloudformation/parser/fn_condition_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_equals.go create mode 100644 pkg/scanners/cloudformation/parser/fn_equals_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_find_in_map.go create mode 100644 pkg/scanners/cloudformation/parser/fn_find_in_map_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_get_attr.go create mode 100644 pkg/scanners/cloudformation/parser/fn_get_attr_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_if.go create mode 100644 pkg/scanners/cloudformation/parser/fn_if_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_join.go create mode 100644 pkg/scanners/cloudformation/parser/fn_join_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_length.go create mode 100644 pkg/scanners/cloudformation/parser/fn_length_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_not.go create mode 100644 pkg/scanners/cloudformation/parser/fn_not_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_or.go create mode 100644 pkg/scanners/cloudformation/parser/fn_or_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_ref.go create mode 100644 pkg/scanners/cloudformation/parser/fn_ref_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_select.go create mode 100644 pkg/scanners/cloudformation/parser/fn_select_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_split.go create mode 100644 pkg/scanners/cloudformation/parser/fn_split_test.go create mode 100644 pkg/scanners/cloudformation/parser/fn_sub.go create mode 100644 pkg/scanners/cloudformation/parser/fn_sub_test.go create mode 100644 pkg/scanners/cloudformation/parser/intrinsics.go create mode 100644 pkg/scanners/cloudformation/parser/intrinsics_test.go create mode 100644 pkg/scanners/cloudformation/parser/parameter.go create mode 100644 pkg/scanners/cloudformation/parser/parameters_test.go create mode 100644 pkg/scanners/cloudformation/parser/parser.go create mode 100644 pkg/scanners/cloudformation/parser/parser_test.go create mode 100644 pkg/scanners/cloudformation/parser/property.go create mode 100644 pkg/scanners/cloudformation/parser/property_conversion.go create mode 100644 pkg/scanners/cloudformation/parser/property_helpers.go create mode 100644 pkg/scanners/cloudformation/parser/property_helpers_test.go create mode 100644 pkg/scanners/cloudformation/parser/pseudo_parameters.go create mode 100644 pkg/scanners/cloudformation/parser/pseudo_parameters_test.go create mode 100644 pkg/scanners/cloudformation/parser/reference.go create mode 100644 pkg/scanners/cloudformation/parser/resource.go create mode 100644 pkg/scanners/cloudformation/parser/resource_test.go create mode 100644 pkg/scanners/cloudformation/parser/util.go create mode 100644 pkg/scanners/cloudformation/scanner.go create mode 100644 pkg/scanners/cloudformation/scanner_test.go create mode 100644 pkg/scanners/cloudformation/test/cf_scanning_test.go create mode 100644 pkg/scanners/cloudformation/test/examples/bucket/bucket.yaml create mode 100644 pkg/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml create mode 100644 pkg/scanners/cloudformation/test/examples/roles/roles.yml create mode 100644 pkg/scanners/dockerfile/parser/parser.go create mode 100644 pkg/scanners/dockerfile/parser/parser_test.go create mode 100644 pkg/scanners/dockerfile/scanner.go create mode 100644 pkg/scanners/dockerfile/scanner_test.go create mode 100644 pkg/scanners/helm/options.go create mode 100644 pkg/scanners/helm/parser/option.go create mode 100644 pkg/scanners/helm/parser/parser.go create mode 100644 pkg/scanners/helm/parser/parser_tar.go create mode 100644 pkg/scanners/helm/parser/parser_test.go create mode 100644 pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz create mode 100644 pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml create mode 100644 pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml create mode 100644 pkg/scanners/helm/parser/vals.go create mode 100644 pkg/scanners/helm/scanner.go create mode 100644 pkg/scanners/helm/test/mysql/.helmignore create mode 100644 pkg/scanners/helm/test/mysql/Chart.lock create mode 100644 pkg/scanners/helm/test/mysql/Chart.yaml create mode 100644 pkg/scanners/helm/test/mysql/README.md create mode 100644 pkg/scanners/helm/test/mysql/charts/common/.helmignore create mode 100644 pkg/scanners/helm/test/mysql/charts/common/Chart.yaml create mode 100644 pkg/scanners/helm/test/mysql/charts/common/README.md create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_errors.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_images.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_labels.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_names.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_storage.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_utils.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl create mode 100644 pkg/scanners/helm/test/mysql/charts/common/values.yaml create mode 100644 pkg/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/NOTES.txt create mode 100644 pkg/scanners/helm/test/mysql/templates/_helpers.tpl create mode 100644 pkg/scanners/helm/test/mysql/templates/extra-list.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/metrics-svc.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/networkpolicy.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/primary/configmap.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/primary/pdb.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/primary/statefulset.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/primary/svc-headless.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/primary/svc.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/role.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/rolebinding.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/secondary/configmap.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/secondary/pdb.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/secondary/statefulset.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/secondary/svc.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/secrets.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/serviceaccount.yaml create mode 100644 pkg/scanners/helm/test/mysql/templates/servicemonitor.yaml create mode 100644 pkg/scanners/helm/test/mysql/values.schema.json create mode 100644 pkg/scanners/helm/test/mysql/values.yaml create mode 100644 pkg/scanners/helm/test/option_test.go create mode 100644 pkg/scanners/helm/test/parser_test.go create mode 100644 pkg/scanners/helm/test/scanner_test.go create mode 100644 pkg/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz create mode 100644 pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/testchart/templates/service.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml create mode 100644 pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml create mode 100644 pkg/scanners/helm/test/testdata/mysql-8.8.26.tar create mode 100644 pkg/scanners/helm/test/testdata/mysql-8.8.26.tar.gz create mode 100644 pkg/scanners/helm/test/testdata/mysql-8.8.26.tgz create mode 100644 pkg/scanners/helm/test/testdata/nope.tgz create mode 100644 pkg/scanners/helm/test/testdata/numberName/Chart.yaml create mode 100644 pkg/scanners/helm/test/testdata/simmilar-templates/Chart.yaml create mode 100644 pkg/scanners/helm/test/testdata/simmilar-templates/templates/deployment.yaml create mode 100644 pkg/scanners/helm/test/testdata/simmilar-templates/templates/manifest.yaml create mode 100644 pkg/scanners/helm/test/testdata/templated-name/Chart.yaml create mode 100644 pkg/scanners/helm/test/testdata/testchart/.helmignore create mode 100644 pkg/scanners/helm/test/testdata/testchart/Chart.yaml create mode 100644 pkg/scanners/helm/test/testdata/testchart/templates/NOTES.txt create mode 100644 pkg/scanners/helm/test/testdata/testchart/templates/_helpers.tpl create mode 100644 pkg/scanners/helm/test/testdata/testchart/templates/deployment.yaml create mode 100644 pkg/scanners/helm/test/testdata/testchart/templates/hpa.yaml create mode 100644 pkg/scanners/helm/test/testdata/testchart/templates/ingress.yaml create mode 100644 pkg/scanners/helm/test/testdata/testchart/templates/service.yaml create mode 100644 pkg/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml create mode 100644 pkg/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml create mode 100644 pkg/scanners/helm/test/testdata/testchart/values.yaml create mode 100644 pkg/scanners/helm/test/testdata/with-api-version/.helmignore create mode 100644 pkg/scanners/helm/test/testdata/with-api-version/Chart.yaml create mode 100644 pkg/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl create mode 100644 pkg/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml create mode 100644 pkg/scanners/helm/test/testdata/with-api-version/values.yaml create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/.helmignore create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/LICENSE create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/renovate.json create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml create mode 100644 pkg/scanners/helm/test/testdata/with-tarred-dep/values.yaml create mode 100644 pkg/scanners/helm/test/values/values.yaml create mode 100644 pkg/scanners/json/parser/parser.go create mode 100644 pkg/scanners/json/parser/parser_test.go create mode 100644 pkg/scanners/json/scanner.go create mode 100644 pkg/scanners/json/scanner_test.go create mode 100644 pkg/scanners/kubernetes/parser/manifest.go create mode 100644 pkg/scanners/kubernetes/parser/manifest_node.go create mode 100644 pkg/scanners/kubernetes/parser/parser.go create mode 100644 pkg/scanners/kubernetes/scanner.go create mode 100644 pkg/scanners/kubernetes/scanner_test.go create mode 100644 pkg/scanners/scanner.go create mode 100644 pkg/scanners/terraform/executor/executor.go create mode 100644 pkg/scanners/terraform/executor/executor_test.go create mode 100644 pkg/scanners/terraform/executor/option.go create mode 100644 pkg/scanners/terraform/executor/pool.go create mode 100644 pkg/scanners/terraform/executor/statistics.go create mode 100644 pkg/scanners/terraform/options.go create mode 100644 pkg/scanners/terraform/parser/evaluator.go create mode 100644 pkg/scanners/terraform/parser/evaluator_test.go create mode 100644 pkg/scanners/terraform/parser/funcs/cidr.go create mode 100644 pkg/scanners/terraform/parser/funcs/collection.go create mode 100644 pkg/scanners/terraform/parser/funcs/conversion.go create mode 100644 pkg/scanners/terraform/parser/funcs/crypto.go create mode 100644 pkg/scanners/terraform/parser/funcs/datetime.go create mode 100644 pkg/scanners/terraform/parser/funcs/defaults.go create mode 100644 pkg/scanners/terraform/parser/funcs/encoding.go create mode 100644 pkg/scanners/terraform/parser/funcs/filesystem.go create mode 100644 pkg/scanners/terraform/parser/funcs/marks.go create mode 100644 pkg/scanners/terraform/parser/funcs/number.go create mode 100644 pkg/scanners/terraform/parser/funcs/sensitive.go create mode 100644 pkg/scanners/terraform/parser/funcs/string.go create mode 100644 pkg/scanners/terraform/parser/functions.go create mode 100644 pkg/scanners/terraform/parser/load_blocks.go create mode 100644 pkg/scanners/terraform/parser/load_blocks_test.go create mode 100644 pkg/scanners/terraform/parser/load_module.go create mode 100644 pkg/scanners/terraform/parser/load_module_metadata.go create mode 100644 pkg/scanners/terraform/parser/load_vars.go create mode 100644 pkg/scanners/terraform/parser/load_vars_test.go create mode 100644 pkg/scanners/terraform/parser/module_retrieval.go create mode 100644 pkg/scanners/terraform/parser/option.go create mode 100644 pkg/scanners/terraform/parser/parser.go create mode 100644 pkg/scanners/terraform/parser/parser_integration_test.go create mode 100644 pkg/scanners/terraform/parser/parser_test.go create mode 100644 pkg/scanners/terraform/parser/resolvers/cache.go create mode 100644 pkg/scanners/terraform/parser/resolvers/local.go create mode 100644 pkg/scanners/terraform/parser/resolvers/options.go create mode 100644 pkg/scanners/terraform/parser/resolvers/registry.go create mode 100644 pkg/scanners/terraform/parser/resolvers/remote.go create mode 100644 pkg/scanners/terraform/parser/resolvers/writable.go create mode 100644 pkg/scanners/terraform/parser/resolvers/writable_windows.go create mode 100644 pkg/scanners/terraform/parser/sort.go create mode 100644 pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars create mode 100644 pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json create mode 100644 pkg/scanners/terraform/scanner.go create mode 100644 pkg/scanners/terraform/scanner_integration_test.go create mode 100644 pkg/scanners/terraform/scanner_test.go create mode 100644 pkg/scanners/terraformplan/parser/option.go create mode 100644 pkg/scanners/terraformplan/parser/parser.go create mode 100644 pkg/scanners/terraformplan/parser/plan_file.go create mode 100644 pkg/scanners/terraformplan/scanner.go create mode 100644 pkg/scanners/terraformplan/scanner_test.go create mode 100644 pkg/scanners/terraformplan/test/parser_test.go create mode 100644 pkg/scanners/terraformplan/test/scanner_test.go create mode 100644 pkg/scanners/terraformplan/test/testdata/plan.json create mode 100644 pkg/scanners/toml/parser/parser.go create mode 100644 pkg/scanners/toml/parser/parser_test.go create mode 100644 pkg/scanners/toml/scanner.go create mode 100644 pkg/scanners/toml/scanner_test.go create mode 100644 pkg/scanners/universal/scanner.go create mode 100644 pkg/scanners/yaml/parser/parser.go create mode 100644 pkg/scanners/yaml/parser/parser_test.go create mode 100644 pkg/scanners/yaml/scanner.go create mode 100644 pkg/scanners/yaml/scanner_test.go create mode 100644 test/attribute_test.go create mode 100644 test/block_test.go create mode 100644 test/count_test.go create mode 100644 test/deterministic_test.go create mode 100644 test/docker_test.go create mode 100644 test/fs_test.go create mode 100644 test/ignore_test.go create mode 100644 test/json_test.go create mode 100644 test/kubernetes_test.go create mode 100644 test/module_test.go create mode 100644 test/performance_test.go create mode 100644 test/rules_test.go create mode 100644 test/setup_test.go create mode 100644 test/testdata/dockerfile/DS001/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS001/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS002/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS002/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS004/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS004/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS005/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS005/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS006/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS006/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS007/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS007/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS008/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS008/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS009/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS009/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS010/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS010/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS011/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS011/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS012/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS012/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS013/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS013/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS014/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS014/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS015/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS015/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS016/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS016/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS017/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS017/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS019/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS019/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS020/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS020/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS021/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS021/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS022/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS022/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS023/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS023/Dockerfile.denied create mode 100644 test/testdata/dockerfile/DS024/Dockerfile.allowed create mode 100644 test/testdata/dockerfile/DS024/Dockerfile.denied create mode 100644 test/testdata/kubernetes/KSV001/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV001/denied.yaml create mode 100644 test/testdata/kubernetes/KSV002/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV002/denied.yaml create mode 100644 test/testdata/kubernetes/KSV003/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV003/denied.yaml create mode 100644 test/testdata/kubernetes/KSV005/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV005/denied.yaml create mode 100644 test/testdata/kubernetes/KSV006/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV006/denied.yaml create mode 100644 test/testdata/kubernetes/KSV008/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV008/denied.yaml create mode 100644 test/testdata/kubernetes/KSV009/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV009/denied.yaml create mode 100644 test/testdata/kubernetes/KSV010/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV010/denied.yaml create mode 100644 test/testdata/kubernetes/KSV011/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV011/denied.yaml create mode 100644 test/testdata/kubernetes/KSV012/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV012/denied.yaml create mode 100644 test/testdata/kubernetes/KSV013/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV013/denied.yaml create mode 100644 test/testdata/kubernetes/KSV014/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV014/denied.yaml create mode 100644 test/testdata/kubernetes/KSV015/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV015/denied.yaml create mode 100644 test/testdata/kubernetes/KSV016/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV016/denied.yaml create mode 100644 test/testdata/kubernetes/KSV017/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV017/denied.yaml create mode 100644 test/testdata/kubernetes/KSV018/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV018/denied.yaml create mode 100644 test/testdata/kubernetes/KSV020/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV020/denied.yaml create mode 100644 test/testdata/kubernetes/KSV021/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV021/denied.yaml create mode 100644 test/testdata/kubernetes/KSV022/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV022/denied.yaml create mode 100644 test/testdata/kubernetes/KSV023/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV023/denied.yaml create mode 100644 test/testdata/kubernetes/KSV024/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV024/denied.yaml create mode 100644 test/testdata/kubernetes/KSV025/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV025/denied.yaml create mode 100644 test/testdata/kubernetes/KSV026/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV026/denied.yaml create mode 100644 test/testdata/kubernetes/KSV027/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV027/denied.yaml create mode 100644 test/testdata/kubernetes/KSV028/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV028/denied.yaml create mode 100644 test/testdata/kubernetes/KSV030/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV030/denied.yaml create mode 100644 test/testdata/kubernetes/KSV036/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV036/denied.yaml create mode 100644 test/testdata/kubernetes/KSV037/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV037/denied.yaml create mode 100644 test/testdata/kubernetes/KSV038/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV038/denied.yaml create mode 100644 test/testdata/kubernetes/KSV102/allowed.yaml create mode 100644 test/testdata/kubernetes/KSV102/denied.yaml create mode 100644 test/testdata/kubernetes/optional/KSV004/allowed.yaml create mode 100644 test/testdata/kubernetes/optional/KSV004/denied.yaml create mode 100644 test/testdata/kubernetes/optional/KSV007/allowed.yaml create mode 100644 test/testdata/kubernetes/optional/KSV007/denied.yaml create mode 100644 test/testdata/kubernetes/optional/KSV032/allowed.yaml create mode 100644 test/testdata/kubernetes/optional/KSV032/denied.yaml create mode 100644 test/testdata/kubernetes/optional/KSV033/allowed.yaml create mode 100644 test/testdata/kubernetes/optional/KSV033/denied.yaml create mode 100644 test/testdata/kubernetes/optional/KSV034/allowed.yaml create mode 100644 test/testdata/kubernetes/optional/KSV034/denied.yaml create mode 100644 test/testdata/kubernetes/optional/KSV035/allowed.yaml create mode 100644 test/testdata/kubernetes/optional/KSV035/denied.yaml create mode 100644 test/testdata/kubernetes/optional/KSV039/allowed.yaml create mode 100644 test/testdata/kubernetes/optional/KSV039/denied.yaml create mode 100644 test/testdata/kubernetes/optional/KSV040/allowed.yaml create mode 100644 test/testdata/kubernetes/optional/KSV040/denied.yaml create mode 100644 test/testutil/util.go create mode 100644 test/tf/fail/main.tf create mode 100644 test/wildcard_test.go diff --git a/go.mod b/go.mod index e36c08e265dc..b6ac31504872 100644 --- a/go.mod +++ b/go.mod @@ -117,7 +117,23 @@ require ( modernc.org/sqlite v1.28.0 ) -require github.com/bitnami/go-version v0.0.0-20231130084017-bb00604d650c +require ( + github.com/Masterminds/semver v1.5.0 + github.com/apparentlymart/go-cidr v1.1.0 + github.com/aws/smithy-go v1.19.0 + github.com/bitnami/go-version v0.0.0-20231130084017-bb00604d650c + github.com/hashicorp/go-uuid v1.0.3 + github.com/hashicorp/hcl/v2 v2.19.1 + github.com/liamg/iamgo v0.0.9 + github.com/liamg/jfather v0.0.7 + github.com/liamg/memoryfs v1.6.0 + github.com/mitchellh/go-homedir v1.1.0 + github.com/olekukonko/tablewriter v0.0.5 + github.com/zclconf/go-cty v1.13.0 + github.com/zclconf/go-cty-yaml v1.0.3 + golang.org/x/crypto v0.18.0 + helm.sh/helm/v3 v3.14.0 +) require ( cloud.google.com/go v0.110.8 // indirect @@ -141,7 +157,6 @@ require ( github.com/Intevation/jsonpath v0.2.1 // indirect github.com/MakeNowJust/heredoc v1.0.0 // indirect github.com/Masterminds/goutils v1.1.1 // indirect - github.com/Masterminds/semver v1.5.0 // indirect github.com/Masterminds/semver/v3 v3.2.1 // indirect github.com/Masterminds/squirrel v1.5.4 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect @@ -154,7 +169,6 @@ require ( github.com/alecthomas/chroma v0.10.0 // indirect github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 // indirect - github.com/apparentlymart/go-cidr v1.1.0 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect @@ -204,7 +218,6 @@ require ( github.com/aws/aws-sdk-go-v2/service/sso v1.18.6 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.21.6 // indirect github.com/aws/aws-sdk-go-v2/service/workspaces v1.35.6 // indirect - github.com/aws/smithy-go v1.19.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect github.com/briandowns/spinner v1.23.0 // indirect @@ -281,11 +294,9 @@ require ( github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-safetemp v1.0.0 // indirect - github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/go-version v1.6.0 // indirect github.com/hashicorp/golang-lru v0.6.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect - github.com/hashicorp/hcl/v2 v2.19.1 // indirect github.com/huandu/xstrings v1.4.0 // indirect github.com/imdario/mergo v0.3.15 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect @@ -299,9 +310,6 @@ require ( github.com/klauspost/compress v1.16.6 // indirect github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect - github.com/liamg/iamgo v0.0.9 // indirect - github.com/liamg/jfather v0.0.7 // indirect - github.com/liamg/memoryfs v1.6.0 // indirect github.com/lib/pq v1.10.9 // indirect github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40 // indirect @@ -313,7 +321,6 @@ require ( github.com/microsoft/go-rustaudit v0.0.0-20220808201409-204dfee52032 // indirect github.com/miekg/dns v1.1.53 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect - github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect @@ -331,7 +338,6 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f // indirect github.com/oklog/ulid v1.3.1 // indirect - github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/opencontainers/runc v1.1.5 // indirect github.com/opencontainers/runtime-spec v1.1.0-rc.1 // indirect github.com/opencontainers/selinux v1.11.0 // indirect @@ -369,8 +375,6 @@ require ( github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/yashtewari/glob-intersection v0.2.0 // indirect github.com/yuin/gopher-lua v1.1.0 // indirect - github.com/zclconf/go-cty v1.13.0 // indirect - github.com/zclconf/go-cty-yaml v1.0.3 // indirect go.mongodb.org/mongo-driver v1.11.3 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect @@ -381,7 +385,6 @@ require ( go.starlark.net v0.0.0-20230525235612-a134d8f9ddca // indirect go.uber.org/goleak v1.3.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.18.0 // indirect golang.org/x/net v0.20.0 // indirect golang.org/x/oauth2 v0.13.0 // indirect golang.org/x/sys v0.16.0 // indirect @@ -398,7 +401,6 @@ require ( gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - helm.sh/helm/v3 v3.14.0 // indirect k8s.io/apiextensions-apiserver v0.29.0 // indirect k8s.io/apimachinery v0.29.0 // indirect k8s.io/apiserver v0.29.0 // indirect diff --git a/internal/adapters/arm/adapt.go b/internal/adapters/arm/adapt.go new file mode 100644 index 000000000000..e160d6219012 --- /dev/null +++ b/internal/adapters/arm/adapt.go @@ -0,0 +1,50 @@ +package arm + +import ( + "context" + + "github.com/aquasecurity/trivy/internal/adapters/arm/appservice" + "github.com/aquasecurity/trivy/internal/adapters/arm/authorization" + "github.com/aquasecurity/trivy/internal/adapters/arm/compute" + "github.com/aquasecurity/trivy/internal/adapters/arm/container" + "github.com/aquasecurity/trivy/internal/adapters/arm/database" + "github.com/aquasecurity/trivy/internal/adapters/arm/datafactory" + "github.com/aquasecurity/trivy/internal/adapters/arm/datalake" + "github.com/aquasecurity/trivy/internal/adapters/arm/keyvault" + "github.com/aquasecurity/trivy/internal/adapters/arm/monitor" + "github.com/aquasecurity/trivy/internal/adapters/arm/network" + "github.com/aquasecurity/trivy/internal/adapters/arm/securitycenter" + "github.com/aquasecurity/trivy/internal/adapters/arm/storage" + "github.com/aquasecurity/trivy/internal/adapters/arm/synapse" + + "github.com/aquasecurity/defsec/pkg/providers/azure" + "github.com/aquasecurity/defsec/pkg/state" + scanner "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +// Adapt ... +func Adapt(ctx context.Context, deployment scanner.Deployment) *state.State { + return &state.State{ + Azure: adaptAzure(deployment), + } +} + +func adaptAzure(deployment scanner.Deployment) azure.Azure { + + return azure.Azure{ + AppService: appservice.Adapt(deployment), + Authorization: authorization.Adapt(deployment), + Compute: compute.Adapt(deployment), + Container: container.Adapt(deployment), + Database: database.Adapt(deployment), + DataFactory: datafactory.Adapt(deployment), + DataLake: datalake.Adapt(deployment), + KeyVault: keyvault.Adapt(deployment), + Monitor: monitor.Adapt(deployment), + Network: network.Adapt(deployment), + SecurityCenter: securitycenter.Adapt(deployment), + Storage: storage.Adapt(deployment), + Synapse: synapse.Adapt(deployment), + } + +} diff --git a/internal/adapters/arm/appservice/adapt.go b/internal/adapters/arm/appservice/adapt.go new file mode 100644 index 000000000000..dbcf431f7b6e --- /dev/null +++ b/internal/adapters/arm/appservice/adapt.go @@ -0,0 +1,58 @@ +package appservice + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/appservice" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) appservice.AppService { + return appservice.AppService{ + Services: adaptServices(deployment), + FunctionApps: adaptFunctionApps(deployment), + } +} + +func adaptFunctionApps(deployment azure.Deployment) []appservice.FunctionApp { + var functionApps []appservice.FunctionApp + + for _, resource := range deployment.GetResourcesByType("Microsoft.Web/sites") { + functionApps = append(functionApps, adaptFunctionApp(resource)) + } + return functionApps +} + +func adaptServices(deployment azure.Deployment) []appservice.Service { + var services []appservice.Service + for _, resource := range deployment.GetResourcesByType("Microsoft.Web/sites") { + services = append(services, adaptService(resource)) + } + return services +} + +func adaptFunctionApp(resource azure.Resource) appservice.FunctionApp { + return appservice.FunctionApp{ + Metadata: resource.Metadata, + HTTPSOnly: resource.Properties.GetMapValue("httpsOnly").AsBoolValue(false, resource.Properties.GetMetadata()), + } +} + +func adaptService(resource azure.Resource) appservice.Service { + return appservice.Service{ + Metadata: resource.Metadata, + EnableClientCert: resource.Properties.GetMapValue("clientCertEnabled").AsBoolValue(false, resource.Properties.GetMetadata()), + Identity: struct{ Type defsecTypes.StringValue }{ + Type: resource.Properties.GetMapValue("identity").GetMapValue("type").AsStringValue("", resource.Properties.GetMetadata()), + }, + Authentication: struct{ Enabled defsecTypes.BoolValue }{ + Enabled: resource.Properties.GetMapValue("siteAuthSettings").GetMapValue("enabled").AsBoolValue(false, resource.Properties.GetMetadata()), + }, + Site: struct { + EnableHTTP2 defsecTypes.BoolValue + MinimumTLSVersion defsecTypes.StringValue + }{ + EnableHTTP2: resource.Properties.GetMapValue("httpsOnly").AsBoolValue(false, resource.Properties.GetMetadata()), + MinimumTLSVersion: resource.Properties.GetMapValue("minTlsVersion").AsStringValue("", resource.Properties.GetMetadata()), + }, + } +} diff --git a/internal/adapters/arm/authorization/adapt.go b/internal/adapters/arm/authorization/adapt.go new file mode 100644 index 000000000000..12b08e45ce43 --- /dev/null +++ b/internal/adapters/arm/authorization/adapt.go @@ -0,0 +1,38 @@ +package authorization + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/authorization" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) authorization.Authorization { + return authorization.Authorization{ + RoleDefinitions: adaptRoleDefinitions(deployment), + } +} + +func adaptRoleDefinitions(deployment azure.Deployment) (roleDefinitions []authorization.RoleDefinition) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Authorization/roleDefinitions") { + roleDefinitions = append(roleDefinitions, adaptRoleDefinition(resource)) + } + return roleDefinitions +} + +func adaptRoleDefinition(resource azure.Resource) authorization.RoleDefinition { + + return authorization.RoleDefinition{ + Metadata: resource.Metadata, + Permissions: adaptPermissions(resource), + AssignableScopes: resource.Properties.GetMapValue("assignableScopes").AsStringValuesList(""), + } +} + +func adaptPermissions(resource azure.Resource) (permissions []authorization.Permission) { + for _, permission := range resource.Properties.GetMapValue("permissions").AsList() { + permissions = append(permissions, authorization.Permission{ + Metadata: resource.Metadata, + Actions: permission.GetMapValue("actions").AsStringValuesList(""), + }) + } + return permissions +} diff --git a/internal/adapters/arm/compute/adapt.go b/internal/adapters/arm/compute/adapt.go new file mode 100644 index 000000000000..97940d367670 --- /dev/null +++ b/internal/adapters/arm/compute/adapt.go @@ -0,0 +1,85 @@ +package compute + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/compute" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) compute.Compute { + return compute.Compute{ + LinuxVirtualMachines: adaptLinuxVirtualMachines(deployment), + WindowsVirtualMachines: adaptWindowsVirtualMachines(deployment), + ManagedDisks: adaptManagedDisks(deployment), + } +} + +func adaptManagedDisks(deployment azure.Deployment) (managedDisks []compute.ManagedDisk) { + + for _, resource := range deployment.GetResourcesByType("Microsoft.Compute/disks") { + managedDisks = append(managedDisks, adaptManagedDisk(resource)) + } + + return managedDisks +} + +func adaptManagedDisk(resource azure.Resource) compute.ManagedDisk { + hasEncryption := resource.Properties.HasKey("encryption") + + return compute.ManagedDisk{ + Metadata: resource.Metadata, + Encryption: compute.Encryption{ + Metadata: resource.Metadata, + Enabled: defsecTypes.Bool(hasEncryption, resource.Metadata), + }, + } +} + +func adaptWindowsVirtualMachines(deployment azure.Deployment) (windowsVirtualMachines []compute.WindowsVirtualMachine) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Compute/virtualMachines") { + if resource.Properties.GetMapValue("osProfile").GetMapValue("windowsConfiguration").AsMap() != nil { + windowsVirtualMachines = append(windowsVirtualMachines, adaptWindowsVirtualMachine(resource)) + } + } + + return windowsVirtualMachines +} + +func adaptWindowsVirtualMachine(resource azure.Resource) compute.WindowsVirtualMachine { + return compute.WindowsVirtualMachine{ + Metadata: resource.Metadata, + VirtualMachine: compute.VirtualMachine{ + Metadata: resource.Metadata, + CustomData: resource.Properties.GetMapValue("osProfile"). + GetMapValue("customData").AsStringValue("", resource.Metadata), + }, + } +} + +func adaptLinuxVirtualMachines(deployment azure.Deployment) (linuxVirtualMachines []compute.LinuxVirtualMachine) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Compute/virtualMachines") { + if resource.Properties.GetMapValue("osProfile").GetMapValue("linuxConfiguration").AsMap() != nil { + linuxVirtualMachines = append(linuxVirtualMachines, adaptLinuxVirtualMachine(resource)) + } + } + + return linuxVirtualMachines +} + +func adaptLinuxVirtualMachine(resource azure.Resource) compute.LinuxVirtualMachine { + return compute.LinuxVirtualMachine{ + Metadata: resource.Metadata, + VirtualMachine: compute.VirtualMachine{ + Metadata: resource.Metadata, + CustomData: resource.Properties.GetMapValue("osProfile"). + GetMapValue("customData").AsStringValue("", resource.Metadata), + }, + OSProfileLinuxConfig: compute.OSProfileLinuxConfig{ + Metadata: resource.Metadata, + DisablePasswordAuthentication: resource.Properties.GetMapValue("osProfile"). + GetMapValue("linuxConfiguration"). + GetMapValue("disablePasswordAuthentication").AsBoolValue(false, resource.Metadata), + }, + } + +} diff --git a/internal/adapters/arm/compute/adapt_test.go b/internal/adapters/arm/compute/adapt_test.go new file mode 100644 index 000000000000..18b2f8528c83 --- /dev/null +++ b/internal/adapters/arm/compute/adapt_test.go @@ -0,0 +1,60 @@ +package compute + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" + + "github.com/stretchr/testify/assert" + + "github.com/stretchr/testify/require" +) + +func Test_AdaptLinuxVM(t *testing.T) { + + input := azure.Deployment{ + Resources: []azure.Resource{ + { + Type: azure.NewValue("Microsoft.Compute/virtualMachines", types.NewTestMetadata()), + Properties: azure.NewValue(map[string]azure.Value{ + "osProfile": azure.NewValue(map[string]azure.Value{ + "linuxConfiguration": azure.NewValue(map[string]azure.Value{ + "disablePasswordAuthentication": azure.NewValue(true, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, + }, + } + + output := Adapt(input) + + require.Len(t, output.LinuxVirtualMachines, 1) + require.Len(t, output.WindowsVirtualMachines, 0) + + linuxVM := output.LinuxVirtualMachines[0] + assert.True(t, linuxVM.OSProfileLinuxConfig.DisablePasswordAuthentication.IsTrue()) + +} + +func Test_AdaptWindowsVM(t *testing.T) { + + input := azure.Deployment{ + Resources: []azure.Resource{ + { + Type: azure.NewValue("Microsoft.Compute/virtualMachines", types.NewTestMetadata()), + Properties: azure.NewValue(map[string]azure.Value{ + "osProfile": azure.NewValue(map[string]azure.Value{ + "windowsConfiguration": azure.NewValue(map[string]azure.Value{}, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, + }, + } + + output := Adapt(input) + + require.Len(t, output.LinuxVirtualMachines, 0) + require.Len(t, output.WindowsVirtualMachines, 1) +} diff --git a/internal/adapters/arm/container/adapt.go b/internal/adapters/arm/container/adapt.go new file mode 100644 index 000000000000..90acaf2c5539 --- /dev/null +++ b/internal/adapters/arm/container/adapt.go @@ -0,0 +1,17 @@ +package container + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/container" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) container.Container { + return container.Container{ + KubernetesClusters: adaptKubernetesClusters(deployment), + } +} + +func adaptKubernetesClusters(deployment azure.Deployment) []container.KubernetesCluster { + + return nil +} diff --git a/internal/adapters/arm/database/adapt.go b/internal/adapters/arm/database/adapt.go new file mode 100644 index 000000000000..84606f53359f --- /dev/null +++ b/internal/adapters/arm/database/adapt.go @@ -0,0 +1,35 @@ +package database + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) database.Database { + return database.Database{ + MSSQLServers: adaptMSSQLServers(deployment), + MariaDBServers: adaptMariaDBServers(deployment), + MySQLServers: adaptMySQLServers(deployment), + PostgreSQLServers: adaptPostgreSQLServers(deployment), + } +} + +func adaptMySQLServers(deployment azure.Deployment) (mysqlDbServers []database.MySQLServer) { + for _, resource := range deployment.GetResourcesByType("Microsoft.DBforMySQL/servers") { + mysqlDbServers = append(mysqlDbServers, adaptMySQLServer(resource, deployment)) + } + return mysqlDbServers +} + +func adaptMySQLServer(resource azure.Resource, deployment azure.Deployment) database.MySQLServer { + return database.MySQLServer{ + Metadata: resource.Metadata, + Server: database.Server{ + Metadata: resource.Metadata, + EnableSSLEnforcement: resource.Properties.GetMapValue("sslEnforcement").AsBoolValue(false, resource.Metadata), + MinimumTLSVersion: resource.Properties.GetMapValue("minimalTlsVersion").AsStringValue("TLSEnforcementDisabled", resource.Metadata), + EnablePublicNetworkAccess: resource.Properties.GetMapValue("publicNetworkAccess").AsBoolValue(false, resource.Metadata), + FirewallRules: addFirewallRule(resource), + }, + } +} diff --git a/internal/adapters/arm/database/firewall.go b/internal/adapters/arm/database/firewall.go new file mode 100644 index 000000000000..3f4ca50f272e --- /dev/null +++ b/internal/adapters/arm/database/firewall.go @@ -0,0 +1,18 @@ +package database + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func addFirewallRule(resource azure.Resource) []database.FirewallRule { + var rules []database.FirewallRule + for _, rule := range resource.Properties.GetMapValue("firewallRules").AsMap() { + rules = append(rules, database.FirewallRule{ + Metadata: rule.Metadata, + StartIP: rule.GetMapValue("startIpAddress").AsStringValue("", rule.Metadata), + EndIP: rule.GetMapValue("endIpAddress").AsStringValue("", rule.Metadata), + }) + } + return rules +} diff --git a/internal/adapters/arm/database/maria.go b/internal/adapters/arm/database/maria.go new file mode 100644 index 000000000000..e645c3fe2230 --- /dev/null +++ b/internal/adapters/arm/database/maria.go @@ -0,0 +1,27 @@ +package database + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func adaptMariaDBServers(deployment azure.Deployment) (mariaDbServers []database.MariaDBServer) { + for _, resource := range deployment.GetResourcesByType("Microsoft.DBforMariaDB/servers") { + mariaDbServers = append(mariaDbServers, adaptMariaDBServer(resource, deployment)) + } + return mariaDbServers + +} + +func adaptMariaDBServer(resource azure.Resource, deployment azure.Deployment) database.MariaDBServer { + return database.MariaDBServer{ + Metadata: resource.Metadata, + Server: database.Server{ + Metadata: resource.Metadata, + EnableSSLEnforcement: resource.Properties.GetMapValue("sslEnforcement").AsBoolValue(false, resource.Metadata), + MinimumTLSVersion: resource.Properties.GetMapValue("minimalTlsVersion").AsStringValue("TLSEnforcementDisabled", resource.Metadata), + EnablePublicNetworkAccess: resource.Properties.GetMapValue("publicNetworkAccess").AsBoolValue(false, resource.Metadata), + FirewallRules: addFirewallRule(resource), + }, + } +} diff --git a/internal/adapters/arm/database/mssql.go b/internal/adapters/arm/database/mssql.go new file mode 100644 index 000000000000..6e598b5439b9 --- /dev/null +++ b/internal/adapters/arm/database/mssql.go @@ -0,0 +1,61 @@ +package database + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func adaptMSSQLServers(deployment azure.Deployment) (msSQlServers []database.MSSQLServer) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Sql/servers") { + msSQlServers = append(msSQlServers, adaptMSSQLServer(resource, deployment)) + } + return msSQlServers +} + +func adaptMSSQLServer(resource azure.Resource, deployment azure.Deployment) database.MSSQLServer { + return database.MSSQLServer{ + Metadata: resource.Metadata, + Server: database.Server{ + Metadata: resource.Metadata, + EnableSSLEnforcement: resource.Properties.GetMapValue("sslEnforcement").AsBoolValue(false, resource.Metadata), + MinimumTLSVersion: resource.Properties.GetMapValue("minimalTlsVersion").AsStringValue("TLSEnforcementDisabled", resource.Metadata), + EnablePublicNetworkAccess: resource.Properties.GetMapValue("publicNetworkAccess").AsBoolValue(false, resource.Metadata), + FirewallRules: addFirewallRule(resource), + }, + ExtendedAuditingPolicies: adaptExtendedAuditingPolicies(resource, deployment), + SecurityAlertPolicies: adaptSecurityAlertPolicies(resource, deployment), + } +} + +func adaptExtendedAuditingPolicies(resource azure.Resource, deployment azure.Deployment) (policies []database.ExtendedAuditingPolicy) { + + for _, policy := range deployment.GetResourcesByType("Microsoft.Sql/servers/extendedAuditingSettings") { + policies = append(policies, database.ExtendedAuditingPolicy{ + Metadata: policy.Metadata, + RetentionInDays: policy.Properties.GetMapValue("retentionDays").AsIntValue(0, policy.Metadata), + }) + } + + return policies +} + +func adaptSecurityAlertPolicies(resource azure.Resource, deployment azure.Deployment) (policies []database.SecurityAlertPolicy) { + for _, policy := range deployment.GetResourcesByType("Microsoft.Sql/servers/securityAlertPolicies") { + policies = append(policies, database.SecurityAlertPolicy{ + Metadata: policy.Metadata, + EmailAddresses: adaptStringList(policy.Properties.GetMapValue("emailAddresses")), + DisabledAlerts: adaptStringList(policy.Properties.GetMapValue("disabledAlerts")), + EmailAccountAdmins: policy.Properties.GetMapValue("emailAccountAdmins").AsBoolValue(false, policy.Metadata), + }) + } + return policies +} + +func adaptStringList(value azure.Value) []defsecTypes.StringValue { + var list []defsecTypes.StringValue + for _, v := range value.AsList() { + list = append(list, v.AsStringValue("", value.Metadata)) + } + return list +} diff --git a/internal/adapters/arm/database/postgresql.go b/internal/adapters/arm/database/postgresql.go new file mode 100644 index 000000000000..d7847eae334a --- /dev/null +++ b/internal/adapters/arm/database/postgresql.go @@ -0,0 +1,64 @@ +package database + +import ( + "fmt" + "strings" + + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func adaptPostgreSQLServers(deployment azure.Deployment) (databases []database.PostgreSQLServer) { + for _, resource := range deployment.GetResourcesByType("Microsoft.DBforPostgreSQL/servers") { + databases = append(databases, adaptPostgreSQLServer(resource, deployment)) + } + + return databases +} + +func adaptPostgreSQLServer(resource azure.Resource, deployment azure.Deployment) database.PostgreSQLServer { + return database.PostgreSQLServer{ + Metadata: resource.Metadata, + Server: database.Server{ + Metadata: resource.Metadata, + EnableSSLEnforcement: resource.Properties.GetMapValue("sslEnforcement").AsBoolValue(false, resource.Metadata), + MinimumTLSVersion: resource.Properties.GetMapValue("minimalTlsVersion").AsStringValue("TLSEnforcementDisabled", resource.Metadata), + EnablePublicNetworkAccess: resource.Properties.GetMapValue("publicNetworkAccess").AsBoolValue(false, resource.Metadata), + FirewallRules: addFirewallRule(resource), + }, + Config: adaptPostgreSQLConfiguration(resource, deployment), + } +} + +func adaptPostgreSQLConfiguration(resource azure.Resource, deployment azure.Deployment) database.PostgresSQLConfig { + + parent := fmt.Sprintf("%s/", resource.Name.AsString()) + + config := database.PostgresSQLConfig{ + Metadata: resource.Metadata, + LogCheckpoints: defsecTypes.BoolDefault(false, resource.Metadata), + ConnectionThrottling: defsecTypes.BoolDefault(false, resource.Metadata), + LogConnections: defsecTypes.BoolDefault(false, resource.Metadata), + } + + for _, configuration := range deployment.GetResourcesByType("Microsoft.DBforPostgreSQL/servers/configurations") { + if strings.HasPrefix(configuration.Name.AsString(), parent) { + val := configuration.Properties.GetMapValue("value") + if strings.HasSuffix(configuration.Name.AsString(), "log_checkpoints") { + config.LogCheckpoints = val.AsBoolValue(false, configuration.Metadata) + continue + } + if strings.HasSuffix(configuration.Name.AsString(), "log_connections") { + config.LogConnections = val.AsBoolValue(false, configuration.Metadata) + continue + } + if strings.HasSuffix(configuration.Name.AsString(), "connection_throttling") { + config.ConnectionThrottling = val.AsBoolValue(false, configuration.Metadata) + continue + } + } + } + + return config +} diff --git a/internal/adapters/arm/datafactory/adapt.go b/internal/adapters/arm/datafactory/adapt.go new file mode 100644 index 000000000000..f39bdf39a433 --- /dev/null +++ b/internal/adapters/arm/datafactory/adapt.go @@ -0,0 +1,27 @@ +package datafactory + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/datafactory" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) datafactory.DataFactory { + + return datafactory.DataFactory{ + DataFactories: adaptDataFactories(deployment), + } +} + +func adaptDataFactories(deployment azure.Deployment) (factories []datafactory.Factory) { + for _, resource := range deployment.GetResourcesByType("Microsoft.DataFactory/factories") { + factories = append(factories, adaptDataFactory(resource)) + } + return factories +} + +func adaptDataFactory(resource azure.Resource) datafactory.Factory { + return datafactory.Factory{ + Metadata: resource.Metadata, + EnablePublicNetwork: resource.Properties.GetMapValue("publicNetworkAccess").AsBoolValue(true, resource.Metadata), + } +} diff --git a/internal/adapters/arm/datalake/adapt.go b/internal/adapters/arm/datalake/adapt.go new file mode 100644 index 000000000000..facb5d5d0be3 --- /dev/null +++ b/internal/adapters/arm/datalake/adapt.go @@ -0,0 +1,28 @@ +package datalake + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/datalake" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) datalake.DataLake { + + return datalake.DataLake{ + Stores: adaptStores(deployment), + } +} + +func adaptStores(deployment azure.Deployment) (stores []datalake.Store) { + for _, resource := range deployment.GetResourcesByType("Microsoft.DataLakeStore/accounts") { + stores = append(stores, adaptStore(resource)) + } + + return stores +} + +func adaptStore(resource azure.Resource) datalake.Store { + return datalake.Store{ + Metadata: resource.Metadata, + EnableEncryption: resource.Properties.GetMapValue("encryptionState").AsBoolValue(false, resource.Metadata), + } +} diff --git a/internal/adapters/arm/keyvault/adapt.go b/internal/adapters/arm/keyvault/adapt.go new file mode 100644 index 000000000000..a64da026af3e --- /dev/null +++ b/internal/adapters/arm/keyvault/adapt.go @@ -0,0 +1,64 @@ +package keyvault + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/keyvault" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) keyvault.KeyVault { + return keyvault.KeyVault{ + Vaults: adaptVaults(deployment), + } +} + +func adaptVaults(deployment azure.Deployment) (vaults []keyvault.Vault) { + for _, resource := range deployment.GetResourcesByType("Microsoft.KeyVault/vaults") { + vaults = append(vaults, adaptVault(resource, deployment)) + } + + return vaults +} + +func adaptVault(resource azure.Resource, deployment azure.Deployment) keyvault.Vault { + return keyvault.Vault{ + Metadata: resource.Metadata, + Secrets: adaptSecrets(resource, deployment), + Keys: adaptKeys(resource, deployment), + EnablePurgeProtection: resource.Properties.GetMapValue("enablePurgeProtection").AsBoolValue(false, resource.Metadata), + SoftDeleteRetentionDays: resource.Properties.GetMapValue("softDeleteRetentionInDays").AsIntValue(7, resource.Metadata), + NetworkACLs: keyvault.NetworkACLs{ + Metadata: resource.Metadata, + DefaultAction: resource.Properties.GetMapValue("properties").GetMapValue("networkAcls").GetMapValue("defaultAction").AsStringValue("", resource.Metadata), + }, + } +} + +func adaptKeys(resource azure.Resource, deployment azure.Deployment) (keys []keyvault.Key) { + for _, resource := range deployment.GetResourcesByType("Microsoft.KeyVault/vaults/keys") { + keys = append(keys, adaptKey(resource)) + } + + return keys +} + +func adaptKey(resource azure.Resource) keyvault.Key { + return keyvault.Key{ + Metadata: resource.Metadata, + ExpiryDate: resource.Properties.GetMapValue("attributes").GetMapValue("exp").AsTimeValue(resource.Metadata), + } +} + +func adaptSecrets(resource azure.Resource, deployment azure.Deployment) (secrets []keyvault.Secret) { + for _, resource := range deployment.GetResourcesByType("Microsoft.KeyVault/vaults/secrets") { + secrets = append(secrets, adaptSecret(resource)) + } + return secrets +} + +func adaptSecret(resource azure.Resource) keyvault.Secret { + return keyvault.Secret{ + Metadata: resource.Metadata, + ContentType: resource.Properties.GetMapValue("contentType").AsStringValue("", resource.Metadata), + ExpiryDate: resource.Properties.GetMapValue("attributes").GetMapValue("exp").AsTimeValue(resource.Metadata), + } +} diff --git a/internal/adapters/arm/monitor/adapt.go b/internal/adapters/arm/monitor/adapt.go new file mode 100644 index 000000000000..e6a2afb341b3 --- /dev/null +++ b/internal/adapters/arm/monitor/adapt.go @@ -0,0 +1,45 @@ +package monitor + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/monitor" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) monitor.Monitor { + return monitor.Monitor{ + LogProfiles: adaptLogProfiles(deployment), + } +} + +func adaptLogProfiles(deployment azure.Deployment) (logProfiles []monitor.LogProfile) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Insights/logProfiles") { + logProfiles = append(logProfiles, adaptLogProfile(resource)) + } + return logProfiles +} + +func adaptLogProfile(resource azure.Resource) monitor.LogProfile { + categories := resource.Properties.GetMapValue("categories").AsList() + var categoriesList []types.StringValue + for _, category := range categories { + categoriesList = append(categoriesList, category.AsStringValue("", category.Metadata)) + } + + locations := resource.Properties.GetMapValue("locations").AsList() + var locationsList []types.StringValue + for _, location := range locations { + locationsList = append(locationsList, location.AsStringValue("", location.Metadata)) + } + + return monitor.LogProfile{ + Metadata: resource.Metadata, + RetentionPolicy: monitor.RetentionPolicy{ + Metadata: resource.Metadata, + Enabled: resource.Properties.GetMapValue("retentionPolicy").GetMapValue("enabled").AsBoolValue(false, resource.Metadata), + Days: resource.Properties.GetMapValue("retentionPolicy").GetMapValue("days").AsIntValue(0, resource.Metadata), + }, + Categories: categoriesList, + Locations: locationsList, + } +} diff --git a/internal/adapters/arm/network/adapt.go b/internal/adapters/arm/network/adapt.go new file mode 100644 index 000000000000..fe92618143b9 --- /dev/null +++ b/internal/adapters/arm/network/adapt.go @@ -0,0 +1,126 @@ +package network + +import ( + "strconv" + "strings" + + "github.com/aquasecurity/defsec/pkg/providers/azure/network" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) network.Network { + return network.Network{ + SecurityGroups: adaptSecurityGroups(deployment), + NetworkWatcherFlowLogs: adaptNetworkWatcherFlowLogs(deployment), + } +} + +func adaptSecurityGroups(deployment azure.Deployment) (sgs []network.SecurityGroup) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Network/networkSecurityGroups") { + sgs = append(sgs, adaptSecurityGroup(resource, deployment)) + } + return sgs + +} + +func adaptSecurityGroup(resource azure.Resource, deployment azure.Deployment) network.SecurityGroup { + return network.SecurityGroup{ + Metadata: resource.Metadata, + Rules: adaptSecurityGroupRules(resource, deployment), + } +} + +func adaptSecurityGroupRules(resource azure.Resource, deployment azure.Deployment) (rules []network.SecurityGroupRule) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Network/networkSecurityGroups/securityRules") { + rules = append(rules, adaptSecurityGroupRule(resource)) + } + return rules +} + +func adaptSecurityGroupRule(resource azure.Resource) network.SecurityGroupRule { + sourceAddressPrefixes := resource.Properties.GetMapValue("sourceAddressPrefixes").AsStringValuesList("") + sourceAddressPrefixes = append(sourceAddressPrefixes, resource.Properties.GetMapValue("sourceAddressPrefix").AsStringValue("", resource.Metadata)) + + var sourcePortRanges []network.PortRange + for _, portRange := range resource.Properties.GetMapValue("sourcePortRanges").AsList() { + sourcePortRanges = append(sourcePortRanges, expandRange(portRange.AsString(), resource.Metadata)) + } + sourcePortRanges = append(sourcePortRanges, expandRange(resource.Properties.GetMapValue("sourcePortRange").AsString(), resource.Metadata)) + + destinationAddressPrefixes := resource.Properties.GetMapValue("destinationAddressPrefixes").AsStringValuesList("") + destinationAddressPrefixes = append(destinationAddressPrefixes, resource.Properties.GetMapValue("destinationAddressPrefix").AsStringValue("", resource.Metadata)) + + var destinationPortRanges []network.PortRange + for _, portRange := range resource.Properties.GetMapValue("destinationPortRanges").AsList() { + destinationPortRanges = append(destinationPortRanges, expandRange(portRange.AsString(), resource.Metadata)) + } + destinationPortRanges = append(destinationPortRanges, expandRange(resource.Properties.GetMapValue("destinationPortRange").AsString(), resource.Metadata)) + + allow := defsecTypes.BoolDefault(false, resource.Metadata) + if resource.Properties.GetMapValue("access").AsString() == "Allow" { + allow = defsecTypes.Bool(true, resource.Metadata) + } + + outbound := defsecTypes.BoolDefault(false, resource.Metadata) + if resource.Properties.GetMapValue("direction").AsString() == "Outbound" { + outbound = defsecTypes.Bool(true, resource.Metadata) + } + + return network.SecurityGroupRule{ + Metadata: resource.Metadata, + Outbound: outbound, + Allow: allow, + SourceAddresses: sourceAddressPrefixes, + SourcePorts: sourcePortRanges, + DestinationAddresses: destinationAddressPrefixes, + DestinationPorts: destinationPortRanges, + Protocol: resource.Properties.GetMapValue("protocol").AsStringValue("", resource.Metadata), + } +} + +func adaptNetworkWatcherFlowLogs(deployment azure.Deployment) (flowLogs []network.NetworkWatcherFlowLog) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Network/networkWatchers/flowLogs") { + flowLogs = append(flowLogs, adaptNetworkWatcherFlowLog(resource)) + } + return flowLogs +} + +func adaptNetworkWatcherFlowLog(resource azure.Resource) network.NetworkWatcherFlowLog { + return network.NetworkWatcherFlowLog{ + Metadata: resource.Metadata, + RetentionPolicy: network.RetentionPolicy{ + Metadata: resource.Metadata, + Enabled: resource.Properties.GetMapValue("retentionPolicy").GetMapValue("enabled").AsBoolValue(false, resource.Metadata), + Days: resource.Properties.GetMapValue("retentionPolicy").GetMapValue("days").AsIntValue(0, resource.Metadata), + }, + } +} + +func expandRange(r string, m defsecTypes.Metadata) network.PortRange { + start := 0 + end := 65535 + switch { + case r == "*": + case strings.Contains(r, "-"): + if parts := strings.Split(r, "-"); len(parts) == 2 { + if p1, err := strconv.ParseInt(parts[0], 10, 32); err == nil { + start = int(p1) + } + if p2, err := strconv.ParseInt(parts[1], 10, 32); err == nil { + end = int(p2) + } + } + default: + if val, err := strconv.ParseInt(r, 10, 32); err == nil { + start = int(val) + end = int(val) + } + } + + return network.PortRange{ + Metadata: m, + Start: start, + End: end, + } +} diff --git a/internal/adapters/arm/securitycenter/adapt.go b/internal/adapters/arm/securitycenter/adapt.go new file mode 100644 index 000000000000..ee3f73cad064 --- /dev/null +++ b/internal/adapters/arm/securitycenter/adapt.go @@ -0,0 +1,43 @@ +package securitycenter + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/securitycenter" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) securitycenter.SecurityCenter { + return securitycenter.SecurityCenter{ + Contacts: adaptContacts(deployment), + Subscriptions: adaptSubscriptions(deployment), + } +} + +func adaptContacts(deployment azure.Deployment) (contacts []securitycenter.Contact) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Security/securityContacts") { + contacts = append(contacts, adaptContact(resource)) + } + + return contacts +} + +func adaptContact(resource azure.Resource) securitycenter.Contact { + return securitycenter.Contact{ + Metadata: resource.Metadata, + EnableAlertNotifications: resource.Properties.GetMapValue("email").AsBoolValue(false, resource.Metadata), + Phone: resource.Properties.GetMapValue("phone").AsStringValue("", resource.Metadata), + } +} + +func adaptSubscriptions(deployment azure.Deployment) (subscriptions []securitycenter.SubscriptionPricing) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Security/pricings") { + subscriptions = append(subscriptions, adaptSubscription(resource)) + } + return subscriptions +} + +func adaptSubscription(resource azure.Resource) securitycenter.SubscriptionPricing { + return securitycenter.SubscriptionPricing{ + Metadata: resource.Metadata, + Tier: resource.Properties.GetMapValue("pricingTier").AsStringValue("Free", resource.Metadata), + } +} diff --git a/internal/adapters/arm/storage/adapt.go b/internal/adapters/arm/storage/adapt.go new file mode 100644 index 000000000000..e81e85cc1c26 --- /dev/null +++ b/internal/adapters/arm/storage/adapt.go @@ -0,0 +1,69 @@ +package storage + +import ( + "strings" + + "github.com/aquasecurity/defsec/pkg/providers/azure/storage" + "github.com/aquasecurity/trivy/pkg/scanners/azure" + + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(deployment azure.Deployment) storage.Storage { + return storage.Storage{ + Accounts: adaptAccounts(deployment), + } +} + +func adaptAccounts(deployment azure.Deployment) []storage.Account { + var accounts []storage.Account + for _, resource := range deployment.GetResourcesByType("Microsoft.Storage/storageAccounts") { + + var networkRules []storage.NetworkRule + for _, acl := range resource.Properties.GetMapValue("networkAcls").AsList() { + + var bypasses []types.StringValue + bypassProp := acl.GetMapValue("bypass") + for _, bypass := range strings.Split(bypassProp.AsString(), ",") { + bypasses = append(bypasses, types.String(bypass, bypassProp.GetMetadata())) + } + + networkRules = append(networkRules, storage.NetworkRule{ + Metadata: acl.GetMetadata(), + Bypass: bypasses, + AllowByDefault: types.Bool(acl.GetMapValue("defaultAction").EqualTo("Allow"), acl.GetMetadata()), + }) + } + + var queues []storage.Queue + for _, queueResource := range resource.GetResourcesByType("queueServices/queues") { + queues = append(queues, storage.Queue{ + Metadata: queueResource.Metadata, + Name: queueResource.Name.AsStringValue("", queueResource.Metadata), + }) + } + + var containers []storage.Container + for _, containerResource := range resource.GetResourcesByType("containerServices/containers") { + containers = append(containers, storage.Container{ + Metadata: containerResource.Metadata, + PublicAccess: containerResource.Properties.GetMapValue("publicAccess").AsStringValue("None", containerResource.Metadata), + }) + } + + account := storage.Account{ + Metadata: resource.Metadata, + NetworkRules: networkRules, + EnforceHTTPS: resource.Properties.GetMapValue("supportsHttpsTrafficOnly").AsBoolValue(false, resource.Properties.GetMetadata()), + Containers: containers, + QueueProperties: storage.QueueProperties{ + Metadata: resource.Properties.GetMetadata(), + EnableLogging: types.BoolDefault(false, resource.Properties.GetMetadata()), + }, + MinimumTLSVersion: resource.Properties.GetMapValue("minimumTlsVersion").AsStringValue("TLS1_0", resource.Properties.GetMetadata()), + Queues: queues, + } + accounts = append(accounts, account) + } + return accounts +} diff --git a/internal/adapters/arm/storage/adapt_test.go b/internal/adapters/arm/storage/adapt_test.go new file mode 100644 index 000000000000..494863b607a9 --- /dev/null +++ b/internal/adapters/arm/storage/adapt_test.go @@ -0,0 +1,59 @@ +package storage + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/scanners/azure" + + "github.com/stretchr/testify/assert" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/require" +) + +func Test_AdaptStorageDefaults(t *testing.T) { + + input := azure.Deployment{ + Resources: []azure.Resource{ + { + Type: azure.NewValue("Microsoft.Storage/storageAccounts", types.NewTestMetadata()), + Properties: azure.NewValue(map[string]azure.Value{}, types.NewTestMetadata()), + }, + }, + } + + output := Adapt(input) + + require.Len(t, output.Accounts, 1) + + account := output.Accounts[0] + assert.Equal(t, "TLS1_0", account.MinimumTLSVersion.Value()) + assert.Equal(t, false, account.EnforceHTTPS.Value()) + +} + +func Test_AdaptStorage(t *testing.T) { + + input := azure.Deployment{ + Resources: []azure.Resource{ + { + Type: azure.NewValue("Microsoft.Storage/storageAccounts", types.NewTestMetadata()), + Name: azure.Value{}, + Properties: azure.NewValue(map[string]azure.Value{ + "minimumTlsVersion": azure.NewValue("TLS1_2", types.NewTestMetadata()), + "supportsHttpsTrafficOnly": azure.NewValue(true, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, + }, + } + + output := Adapt(input) + + require.Len(t, output.Accounts, 1) + + account := output.Accounts[0] + assert.Equal(t, "TLS1_2", account.MinimumTLSVersion.Value()) + assert.Equal(t, true, account.EnforceHTTPS.Value()) + +} diff --git a/internal/adapters/arm/synapse/adapt.go b/internal/adapters/arm/synapse/adapt.go new file mode 100644 index 000000000000..f67dbaf5bfef --- /dev/null +++ b/internal/adapters/arm/synapse/adapt.go @@ -0,0 +1,34 @@ +package synapse + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/synapse" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" +) + +func Adapt(deployment azure.Deployment) synapse.Synapse { + return synapse.Synapse{ + Workspaces: adaptWorkspaces(deployment), + } +} + +func adaptWorkspaces(deployment azure.Deployment) (workspaces []synapse.Workspace) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Synapse/workspaces") { + workspaces = append(workspaces, adaptWorkspace(resource)) + } + return workspaces +} + +func adaptWorkspace(resource azure.Resource) synapse.Workspace { + + managedVirtualNetwork := resource.Properties.GetMapValue("managedVirtualNetwork").AsString() + enableManagedVirtualNetwork := types.BoolDefault(false, resource.Metadata) + if managedVirtualNetwork == "default" { + enableManagedVirtualNetwork = types.Bool(true, resource.Metadata) + } + + return synapse.Workspace{ + Metadata: resource.Metadata, + EnableManagedVirtualNetwork: enableManagedVirtualNetwork, + } +} diff --git a/internal/adapters/cloudformation/adapt.go b/internal/adapters/cloudformation/adapt.go new file mode 100644 index 000000000000..bc2fb65c5bb3 --- /dev/null +++ b/internal/adapters/cloudformation/adapt.go @@ -0,0 +1,14 @@ +package cloudformation + +import ( + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) *state.State { + return &state.State{ + AWS: aws.Adapt(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go b/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go new file mode 100644 index 000000000000..db59784ee91e --- /dev/null +++ b/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go @@ -0,0 +1,13 @@ +package accessanalyzer + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/accessanalyzer" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) accessanalyzer.AccessAnalyzer { + return accessanalyzer.AccessAnalyzer{ + Analyzers: getAccessAnalyzer(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go b/internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go new file mode 100644 index 000000000000..50cdd9c920b0 --- /dev/null +++ b/internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go @@ -0,0 +1,24 @@ +package accessanalyzer + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/accessanalyzer" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getAccessAnalyzer(ctx parser.FileContext) (analyzers []accessanalyzer.Analyzer) { + + analyzersList := ctx.GetResourcesByType("AWS::AccessAnalyzer::Analyzer") + + for _, r := range analyzersList { + aa := accessanalyzer.Analyzer{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("AnalyzerName"), + ARN: r.StringDefault(""), + Active: types.BoolDefault(false, r.Metadata()), + } + + analyzers = append(analyzers, aa) + } + return analyzers +} diff --git a/internal/adapters/cloudformation/aws/adapt.go b/internal/adapters/cloudformation/aws/adapt.go new file mode 100644 index 000000000000..4e4446ad857e --- /dev/null +++ b/internal/adapters/cloudformation/aws/adapt.go @@ -0,0 +1,74 @@ +package aws + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/apigateway" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/athena" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/cloudfront" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/cloudtrail" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/cloudwatch" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/codebuild" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/config" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/documentdb" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/dynamodb" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ec2" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ecr" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ecs" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/efs" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/eks" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/elasticache" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/elasticsearch" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/elb" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/iam" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/kinesis" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/lambda" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/mq" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/msk" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/neptune" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/rds" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/redshift" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/s3" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/sam" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/sns" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/sqs" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ssm" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/workspaces" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) aws.AWS { + return aws.AWS{ + APIGateway: apigateway.Adapt(cfFile), + Athena: athena.Adapt(cfFile), + Cloudfront: cloudfront.Adapt(cfFile), + CloudTrail: cloudtrail.Adapt(cfFile), + CloudWatch: cloudwatch.Adapt(cfFile), + CodeBuild: codebuild.Adapt(cfFile), + Config: config.Adapt(cfFile), + DocumentDB: documentdb.Adapt(cfFile), + DynamoDB: dynamodb.Adapt(cfFile), + EC2: ec2.Adapt(cfFile), + ECR: ecr.Adapt(cfFile), + ECS: ecs.Adapt(cfFile), + EFS: efs.Adapt(cfFile), + IAM: iam.Adapt(cfFile), + EKS: eks.Adapt(cfFile), + ElastiCache: elasticache.Adapt(cfFile), + Elasticsearch: elasticsearch.Adapt(cfFile), + ELB: elb.Adapt(cfFile), + MSK: msk.Adapt(cfFile), + MQ: mq.Adapt(cfFile), + Kinesis: kinesis.Adapt(cfFile), + Lambda: lambda.Adapt(cfFile), + Neptune: neptune.Adapt(cfFile), + RDS: rds.Adapt(cfFile), + Redshift: redshift.Adapt(cfFile), + S3: s3.Adapt(cfFile), + SAM: sam.Adapt(cfFile), + SNS: sns.Adapt(cfFile), + SQS: sqs.Adapt(cfFile), + SSM: ssm.Adapt(cfFile), + WorkSpaces: workspaces.Adapt(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/apigateway/apigateway.go b/internal/adapters/cloudformation/aws/apigateway/apigateway.go new file mode 100644 index 000000000000..56b82fc069f7 --- /dev/null +++ b/internal/adapters/cloudformation/aws/apigateway/apigateway.go @@ -0,0 +1,21 @@ +package apigateway + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway" + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) apigateway.APIGateway { + return apigateway.APIGateway{ + V1: v1.APIGateway{ + APIs: nil, + DomainNames: nil, + }, + V2: v2.APIGateway{ + APIs: getApis(cfFile), + }, + } +} diff --git a/internal/adapters/cloudformation/aws/apigateway/stage.go b/internal/adapters/cloudformation/aws/apigateway/stage.go new file mode 100644 index 000000000000..550df03de608 --- /dev/null +++ b/internal/adapters/cloudformation/aws/apigateway/stage.go @@ -0,0 +1,68 @@ +package apigateway + +import ( + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getApis(cfFile parser.FileContext) (apis []v2.API) { + + apiResources := cfFile.GetResourcesByType("AWS::ApiGatewayV2::Api") + for _, apiRes := range apiResources { + api := v2.API{ + Metadata: apiRes.Metadata(), + Name: types.StringDefault("", apiRes.Metadata()), + ProtocolType: types.StringDefault("", apiRes.Metadata()), + Stages: getStages(apiRes.ID(), cfFile), + } + apis = append(apis, api) + } + + return apis +} + +func getStages(apiId string, cfFile parser.FileContext) []v2.Stage { + var apiStages []v2.Stage + + stageResources := cfFile.GetResourcesByType("AWS::ApiGatewayV2::Stage") + for _, r := range stageResources { + stageApiId := r.GetStringProperty("ApiId") + if stageApiId.Value() != apiId { + continue + } + + s := v2.Stage{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("StageName"), + AccessLogging: getAccessLogging(r), + } + apiStages = append(apiStages, s) + } + + return apiStages +} + +func getAccessLogging(r *parser.Resource) v2.AccessLogging { + + loggingProp := r.GetProperty("AccessLogSettings") + if loggingProp.IsNil() { + return v2.AccessLogging{ + Metadata: r.Metadata(), + CloudwatchLogGroupARN: types.StringDefault("", r.Metadata()), + } + } + + destinationProp := r.GetProperty("AccessLogSettings.DestinationArn") + + if destinationProp.IsNil() { + return v2.AccessLogging{ + Metadata: loggingProp.Metadata(), + CloudwatchLogGroupARN: types.StringDefault("", r.Metadata()), + } + } + return v2.AccessLogging{ + Metadata: destinationProp.Metadata(), + CloudwatchLogGroupARN: destinationProp.AsStringValue(), + } +} diff --git a/internal/adapters/cloudformation/aws/athena/athena.go b/internal/adapters/cloudformation/aws/athena/athena.go new file mode 100644 index 000000000000..7207fd65bac9 --- /dev/null +++ b/internal/adapters/cloudformation/aws/athena/athena.go @@ -0,0 +1,14 @@ +package athena + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/athena" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) athena.Athena { + return athena.Athena{ + Databases: nil, + Workgroups: getWorkGroups(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/athena/workgroup.go b/internal/adapters/cloudformation/aws/athena/workgroup.go new file mode 100644 index 000000000000..fa4f2219b82f --- /dev/null +++ b/internal/adapters/cloudformation/aws/athena/workgroup.go @@ -0,0 +1,30 @@ +package athena + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/athena" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getWorkGroups(cfFile parser.FileContext) []athena.Workgroup { + + var workgroups []athena.Workgroup + + workgroupResources := cfFile.GetResourcesByType("AWS::Athena::WorkGroup") + + for _, r := range workgroupResources { + + wg := athena.Workgroup{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("Name"), + Encryption: athena.EncryptionConfiguration{ + Metadata: r.Metadata(), + Type: r.GetStringProperty("WorkGroupConfiguration.ResultConfiguration.EncryptionConfiguration.EncryptionOption"), + }, + EnforceConfiguration: r.GetBoolProperty("WorkGroupConfiguration.EnforceWorkGroupConfiguration"), + } + + workgroups = append(workgroups, wg) + } + + return workgroups +} diff --git a/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go b/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go new file mode 100644 index 000000000000..c7b8cbc5c049 --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go @@ -0,0 +1,13 @@ +package cloudfront + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) cloudfront.Cloudfront { + return cloudfront.Cloudfront{ + Distributions: getDistributions(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/cloudfront/distribution.go b/internal/adapters/cloudformation/aws/cloudfront/distribution.go new file mode 100644 index 000000000000..e1b23c80794f --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudfront/distribution.go @@ -0,0 +1,55 @@ +package cloudfront + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getDistributions(ctx parser.FileContext) (distributions []cloudfront.Distribution) { + + distributionResources := ctx.GetResourcesByType("AWS::CloudFront::Distribution") + + for _, r := range distributionResources { + distribution := cloudfront.Distribution{ + Metadata: r.Metadata(), + WAFID: r.GetStringProperty("DistributionConfig.WebACLId"), + Logging: cloudfront.Logging{ + Metadata: r.Metadata(), + Bucket: r.GetStringProperty("DistributionConfig.Logging.Bucket"), + }, + DefaultCacheBehaviour: getDefaultCacheBehaviour(r), + OrdererCacheBehaviours: nil, + ViewerCertificate: cloudfront.ViewerCertificate{ + Metadata: r.Metadata(), + MinimumProtocolVersion: r.GetStringProperty("DistributionConfig.ViewerCertificate.MinimumProtocolVersion"), + }, + } + + distributions = append(distributions, distribution) + } + + return distributions +} + +func getDefaultCacheBehaviour(r *parser.Resource) cloudfront.CacheBehaviour { + defaultCache := r.GetProperty("DistributionConfig.DefaultCacheBehavior") + if defaultCache.IsNil() { + return cloudfront.CacheBehaviour{ + Metadata: r.Metadata(), + ViewerProtocolPolicy: types.StringDefault("allow-all", r.Metadata()), + } + } + protoProp := r.GetProperty("DistributionConfig.DefaultCacheBehavior.ViewerProtocolPolicy") + if protoProp.IsNotString() { + return cloudfront.CacheBehaviour{ + Metadata: r.Metadata(), + ViewerProtocolPolicy: types.StringDefault("allow-all", r.Metadata()), + } + } + + return cloudfront.CacheBehaviour{ + Metadata: r.Metadata(), + ViewerProtocolPolicy: protoProp.AsStringValue(), + } +} diff --git a/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go b/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go new file mode 100644 index 000000000000..30d6892c3dcd --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go @@ -0,0 +1,13 @@ +package cloudtrail + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) cloudtrail.CloudTrail { + return cloudtrail.CloudTrail{ + Trails: getCloudTrails(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/cloudtrail/trails.go b/internal/adapters/cloudformation/aws/cloudtrail/trails.go new file mode 100644 index 000000000000..ebaf6250cda4 --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudtrail/trails.go @@ -0,0 +1,27 @@ +package cloudtrail + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getCloudTrails(ctx parser.FileContext) (trails []cloudtrail.Trail) { + + cloudtrailResources := ctx.GetResourcesByType("AWS::CloudTrail::Trail") + + for _, r := range cloudtrailResources { + ct := cloudtrail.Trail{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("TrailName"), + EnableLogFileValidation: r.GetBoolProperty("EnableLogFileValidation"), + IsMultiRegion: r.GetBoolProperty("IsMultiRegionTrail"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + CloudWatchLogsLogGroupArn: r.GetStringProperty("CloudWatchLogsLogGroupArn"), + IsLogging: r.GetBoolProperty("IsLogging"), + BucketName: r.GetStringProperty("S3BucketName"), + } + + trails = append(trails, ct) + } + return trails +} diff --git a/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go b/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go new file mode 100644 index 000000000000..1ef5ab08b686 --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go @@ -0,0 +1,14 @@ +package cloudwatch + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) cloudwatch.CloudWatch { + return cloudwatch.CloudWatch{ + LogGroups: getLogGroups(cfFile), + Alarms: nil, + } +} diff --git a/internal/adapters/cloudformation/aws/cloudwatch/log_group.go b/internal/adapters/cloudformation/aws/cloudwatch/log_group.go new file mode 100644 index 000000000000..bffb0fa361ef --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudwatch/log_group.go @@ -0,0 +1,26 @@ +package cloudwatch + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getLogGroups(ctx parser.FileContext) (logGroups []cloudwatch.LogGroup) { + + logGroupResources := ctx.GetResourcesByType("AWS::Logs::LogGroup") + + for _, r := range logGroupResources { + group := cloudwatch.LogGroup{ + Metadata: r.Metadata(), + Arn: types.StringDefault("", r.Metadata()), + Name: r.GetStringProperty("LogGroupName"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + RetentionInDays: r.GetIntProperty("RetentionInDays", 0), + MetricFilters: nil, + } + logGroups = append(logGroups, group) + } + + return logGroups +} diff --git a/internal/adapters/cloudformation/aws/codebuild/codebuild.go b/internal/adapters/cloudformation/aws/codebuild/codebuild.go new file mode 100644 index 000000000000..b6a6a6753c00 --- /dev/null +++ b/internal/adapters/cloudformation/aws/codebuild/codebuild.go @@ -0,0 +1,13 @@ +package codebuild + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) codebuild.CodeBuild { + return codebuild.CodeBuild{ + Projects: getProjects(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/codebuild/project.go b/internal/adapters/cloudformation/aws/codebuild/project.go new file mode 100644 index 000000000000..7d72c4760c4e --- /dev/null +++ b/internal/adapters/cloudformation/aws/codebuild/project.go @@ -0,0 +1,63 @@ +package codebuild + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getProjects(ctx parser.FileContext) (projects []codebuild.Project) { + + projectResources := ctx.GetResourcesByType("AWS::CodeBuild::Project") + + for _, r := range projectResources { + project := codebuild.Project{ + Metadata: r.Metadata(), + ArtifactSettings: getArtifactSettings(r), + SecondaryArtifactSettings: getSecondaryArtifactSettings(r), + } + + projects = append(projects, project) + } + + return projects +} + +func getSecondaryArtifactSettings(r *parser.Resource) (secondaryArtifacts []codebuild.ArtifactSettings) { + secondaryArtifactsList := r.GetProperty("SecondaryArtifacts") + if secondaryArtifactsList.IsNil() || !secondaryArtifactsList.IsList() { + return + } + + for _, a := range secondaryArtifactsList.AsList() { + settings := codebuild.ArtifactSettings{ + Metadata: secondaryArtifactsList.Metadata(), + EncryptionEnabled: types.BoolDefault(true, secondaryArtifactsList.Metadata()), + } + encryptionDisabled := a.GetProperty("EncryptionDisabled") + if encryptionDisabled.IsBool() { + settings.EncryptionEnabled = types.Bool(!encryptionDisabled.AsBool(), encryptionDisabled.Metadata()) + } + secondaryArtifacts = append(secondaryArtifacts, settings) + } + + return secondaryArtifacts +} + +func getArtifactSettings(r *parser.Resource) codebuild.ArtifactSettings { + + settings := codebuild.ArtifactSettings{ + Metadata: r.Metadata(), + EncryptionEnabled: types.BoolDefault(true, r.Metadata()), + } + + artifactsProperty := r.GetProperty("Artifacts") + if artifactsProperty.IsNotNil() { + encryptionDisabled := artifactsProperty.GetProperty("EncryptionDisabled") + if encryptionDisabled.IsBool() { + settings.EncryptionEnabled = types.Bool(!encryptionDisabled.AsBool(), encryptionDisabled.Metadata()) + } + } + + return settings +} diff --git a/internal/adapters/cloudformation/aws/config/adapt_test.go b/internal/adapters/cloudformation/aws/config/adapt_test.go new file mode 100644 index 000000000000..a7e21abe61d9 --- /dev/null +++ b/internal/adapters/cloudformation/aws/config/adapt_test.go @@ -0,0 +1,71 @@ +package config + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/aws/config" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected config.Config + }{ + { + name: "Config aggregator with AccountAggregationSources", + source: `AWSTemplateFormatVersion: "2010-09-09" +Resources: + ConfigurationAggregator: + Type: AWS::Config::ConfigurationAggregator + Properties: + AccountAggregationSources: + - AllAwsRegions: "true" +`, + expected: config.Config{ + ConfigurationAggregrator: config.ConfigurationAggregrator{ + Metadata: types.NewTestMetadata(), + SourceAllRegions: types.Bool(true, types.NewTestMetadata()), + }, + }, + }, + { + name: "Config aggregator with OrganizationAggregationSource", + source: `AWSTemplateFormatVersion: "2010-09-09" +Resources: + ConfigurationAggregator: + Type: AWS::Config::ConfigurationAggregator + Properties: + OrganizationAggregationSource: + AllAwsRegions: "true" +`, + expected: config.Config{ + ConfigurationAggregrator: config.ConfigurationAggregrator{ + Metadata: types.NewTestMetadata(), + SourceAllRegions: types.Bool(true, types.NewTestMetadata()), + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "template.yaml": tt.source, + }) + + p := parser.New() + fctx, err := p.ParseFile(context.TODO(), fs, "template.yaml") + require.NoError(t, err) + + testutil.AssertDefsecEqual(t, tt.expected, Adapt(*fctx)) + }) + } + +} diff --git a/internal/adapters/cloudformation/aws/config/aggregator.go b/internal/adapters/cloudformation/aws/config/aggregator.go new file mode 100644 index 000000000000..cecea4af59f1 --- /dev/null +++ b/internal/adapters/cloudformation/aws/config/aggregator.go @@ -0,0 +1,41 @@ +package config + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/config" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getConfigurationAggregator(ctx parser.FileContext) config.ConfigurationAggregrator { + + aggregator := config.ConfigurationAggregrator{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + SourceAllRegions: defsecTypes.BoolDefault(false, ctx.Metadata()), + } + + aggregatorResources := ctx.GetResourcesByType("AWS::Config::ConfigurationAggregator") + + if len(aggregatorResources) == 0 { + return aggregator + } + + return config.ConfigurationAggregrator{ + Metadata: aggregatorResources[0].Metadata(), + SourceAllRegions: isSourcingAllRegions(aggregatorResources[0]), + } +} + +func isSourcingAllRegions(r *parser.Resource) defsecTypes.BoolValue { + accountProp := r.GetProperty("AccountAggregationSources") + + if accountProp.IsNotNil() && accountProp.IsList() { + for _, a := range accountProp.AsList() { + regionsProp := a.GetProperty("AllAwsRegions") + if regionsProp.IsNotNil() { + return a.GetBoolProperty("AllAwsRegions") + } + } + } + + return r.GetBoolProperty("OrganizationAggregationSource.AllAwsRegions") +} diff --git a/internal/adapters/cloudformation/aws/config/config.go b/internal/adapters/cloudformation/aws/config/config.go new file mode 100644 index 000000000000..1e19585b2c88 --- /dev/null +++ b/internal/adapters/cloudformation/aws/config/config.go @@ -0,0 +1,13 @@ +package config + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/config" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) config.Config { + return config.Config{ + ConfigurationAggregrator: getConfigurationAggregator(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/documentdb/cluster.go b/internal/adapters/cloudformation/aws/documentdb/cluster.go new file mode 100644 index 000000000000..8f19241e68da --- /dev/null +++ b/internal/adapters/cloudformation/aws/documentdb/cluster.go @@ -0,0 +1,58 @@ +package documentdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []documentdb.Cluster) { + + clusterResources := ctx.GetResourcesByType("AWS::DocDB::DBCluster") + + for _, r := range clusterResources { + cluster := documentdb.Cluster{ + Metadata: r.Metadata(), + Identifier: r.GetStringProperty("DBClusterIdentifier"), + EnabledLogExports: getLogExports(r), + Instances: nil, + BackupRetentionPeriod: r.GetIntProperty("BackupRetentionPeriod", 1), + StorageEncrypted: r.GetBoolProperty("StorageEncrypted"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + } + + updateInstancesOnCluster(&cluster, ctx) + + clusters = append(clusters, cluster) + } + return clusters +} + +func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser.FileContext) { + + instanceResources := ctx.GetResourcesByType("AWS::DocDB::DBInstance") + + for _, r := range instanceResources { + clusterIdentifier := r.GetStringProperty("DBClusterIdentifier") + if clusterIdentifier == cluster.Identifier { + cluster.Instances = append(cluster.Instances, documentdb.Instance{ + Metadata: r.Metadata(), + KMSKeyID: cluster.KMSKeyID, + }) + } + } +} + +func getLogExports(r *parser.Resource) (logExports []types.StringValue) { + + exportsList := r.GetProperty("EnableCloudwatchLogsExports") + + if exportsList.IsNil() || exportsList.IsNotList() { + return logExports + } + + for _, export := range exportsList.AsList() { + logExports = append(logExports, export.AsStringValue()) + } + return logExports +} diff --git a/internal/adapters/cloudformation/aws/documentdb/documentdb.go b/internal/adapters/cloudformation/aws/documentdb/documentdb.go new file mode 100644 index 000000000000..e7a6ac47d85a --- /dev/null +++ b/internal/adapters/cloudformation/aws/documentdb/documentdb.go @@ -0,0 +1,13 @@ +package documentdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) documentdb.DocumentDB { + return documentdb.DocumentDB{ + Clusters: getClusters(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/dynamodb/cluster.go b/internal/adapters/cloudformation/aws/dynamodb/cluster.go new file mode 100644 index 000000000000..d2a38b59c39b --- /dev/null +++ b/internal/adapters/cloudformation/aws/dynamodb/cluster.go @@ -0,0 +1,36 @@ +package dynamodb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getClusters(file parser.FileContext) (clusters []dynamodb.DAXCluster) { + + clusterResources := file.GetResourcesByType("AWS::DAX::Cluster") + + for _, r := range clusterResources { + cluster := dynamodb.DAXCluster{ + Metadata: r.Metadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + KMSKeyID: defsecTypes.StringDefault("", r.Metadata()), + }, + PointInTimeRecovery: defsecTypes.BoolUnresolvable(r.Metadata()), + } + + if sseProp := r.GetProperty("SSESpecification"); sseProp.IsNotNil() { + cluster.ServerSideEncryption = dynamodb.ServerSideEncryption{ + Metadata: sseProp.Metadata(), + Enabled: r.GetBoolProperty("SSESpecification.SSEEnabled"), + KMSKeyID: defsecTypes.StringUnresolvable(sseProp.Metadata()), + } + } + + clusters = append(clusters, cluster) + } + + return clusters +} diff --git a/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go b/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go new file mode 100644 index 000000000000..67cb9b9b264d --- /dev/null +++ b/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go @@ -0,0 +1,13 @@ +package dynamodb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) dynamodb.DynamoDB { + return dynamodb.DynamoDB{ + DAXClusters: getClusters(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/ec2/adapt_test.go b/internal/adapters/cloudformation/aws/ec2/adapt_test.go new file mode 100644 index 000000000000..735b2fbca80f --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/adapt_test.go @@ -0,0 +1,176 @@ +package ec2 + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected ec2.EC2 + }{ + { + name: "ec2 instance", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyEC2Instance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + KeyName: "testkey" + BlockDeviceMappings: + - DeviceName: "/dev/sdm" + Ebs: + VolumeType: "io1" + Iops: "200" + DeleteOnTermination: "false" + VolumeSize: "20" + Encrypted: true + - DeviceName: "/dev/sdk" + NoDevice: {} +`, + expected: ec2.EC2{ + Instances: []ec2.Instance{ + { + Metadata: types.NewTestMetadata(), + MetadataOptions: ec2.MetadataOptions{ + HttpEndpoint: types.StringDefault("enabled", types.NewTestMetadata()), + HttpTokens: types.StringDefault("optional", types.NewTestMetadata()), + }, + RootBlockDevice: &ec2.BlockDevice{ + Metadata: types.NewTestMetadata(), + Encrypted: types.BoolDefault(true, types.NewTestMetadata()), + }, + EBSBlockDevices: []*ec2.BlockDevice{ + { + Metadata: types.NewTestMetadata(), + Encrypted: types.BoolDefault(false, types.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + { + name: "ec2 instance with launch template, ref to name", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyLaunchTemplate: + Type: AWS::EC2::LaunchTemplate + Properties: + LaunchTemplateName: MyTemplate + LaunchTemplateData: + MetadataOptions: + HttpEndpoint: enabled + HttpTokens: required + MyEC2Instance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + LaunchTemplate: + LaunchTemplateName: MyTemplate +`, + expected: ec2.EC2{ + LaunchTemplates: []ec2.LaunchTemplate{ + { + Metadata: types.NewTestMetadata(), + Name: types.String("MyTemplate", types.NewTestMetadata()), + Instance: ec2.Instance{ + Metadata: types.NewTestMetadata(), + MetadataOptions: ec2.MetadataOptions{ + HttpEndpoint: types.String("enabled", types.NewTestMetadata()), + HttpTokens: types.String("required", types.NewTestMetadata()), + }, + }, + }, + }, + Instances: []ec2.Instance{ + { + Metadata: types.NewTestMetadata(), + MetadataOptions: ec2.MetadataOptions{ + HttpEndpoint: types.String("enabled", types.NewTestMetadata()), + HttpTokens: types.String("required", types.NewTestMetadata()), + }, + RootBlockDevice: &ec2.BlockDevice{ + Metadata: types.NewTestMetadata(), + Encrypted: types.Bool(false, types.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "ec2 instance with launch template, ref to id", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyLaunchTemplate: + Type: AWS::EC2::LaunchTemplate + Properties: + LaunchTemplateName: MyTemplate + LaunchTemplateData: + MetadataOptions: + HttpEndpoint: enabled + HttpTokens: required + MyEC2Instance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + LaunchTemplate: + LaunchTemplateId: !Ref MyLaunchTemplate +`, + expected: ec2.EC2{ + LaunchTemplates: []ec2.LaunchTemplate{ + { + Metadata: types.NewTestMetadata(), + Name: types.String("MyTemplate", types.NewTestMetadata()), + Instance: ec2.Instance{ + Metadata: types.NewTestMetadata(), + MetadataOptions: ec2.MetadataOptions{ + HttpEndpoint: types.String("enabled", types.NewTestMetadata()), + HttpTokens: types.String("required", types.NewTestMetadata()), + }, + }, + }, + }, + Instances: []ec2.Instance{ + { + Metadata: types.NewTestMetadata(), + MetadataOptions: ec2.MetadataOptions{ + HttpEndpoint: types.String("enabled", types.NewTestMetadata()), + HttpTokens: types.String("required", types.NewTestMetadata()), + }, + RootBlockDevice: &ec2.BlockDevice{ + Metadata: types.NewTestMetadata(), + Encrypted: types.Bool(false, types.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + + fsys := testutil.CreateFS(t, map[string]string{ + "main.yaml": tt.source, + }) + + fctx, err := parser.New().ParseFile(context.TODO(), fsys, "main.yaml") + require.NoError(t, err) + + adapted := Adapt(*fctx) + testutil.AssertDefsecEqual(t, tt.expected, adapted) + }) + } + +} diff --git a/internal/adapters/cloudformation/aws/ec2/ec2.go b/internal/adapters/cloudformation/aws/ec2/ec2.go new file mode 100644 index 000000000000..40173b39fded --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/ec2.go @@ -0,0 +1,20 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) ec2.EC2 { + return ec2.EC2{ + LaunchConfigurations: getLaunchConfigurations(cfFile), + LaunchTemplates: getLaunchTemplates(cfFile), + Instances: getInstances(cfFile), + VPCs: nil, + NetworkACLs: getNetworkACLs(cfFile), + SecurityGroups: getSecurityGroups(cfFile), + Subnets: getSubnets(cfFile), + Volumes: getVolumes(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/ec2/instance.go b/internal/adapters/cloudformation/aws/ec2/instance.go new file mode 100644 index 000000000000..7bb3637e9bfe --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/instance.go @@ -0,0 +1,106 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getInstances(ctx parser.FileContext) (instances []ec2.Instance) { + instanceResources := ctx.GetResourcesByType("AWS::EC2::Instance") + + for _, r := range instanceResources { + instance := ec2.Instance{ + Metadata: r.Metadata(), + // metadata not supported by CloudFormation at the moment - + // https://github.com/aws-cloudformation/cloudformation-coverage-roadmap/issues/655 + MetadataOptions: ec2.MetadataOptions{ + Metadata: r.Metadata(), + HttpTokens: defsecTypes.StringDefault("optional", r.Metadata()), + HttpEndpoint: defsecTypes.StringDefault("enabled", r.Metadata()), + }, + UserData: r.GetStringProperty("UserData"), + } + + if launchTemplate, ok := findRelatedLaunchTemplate(ctx, r); ok { + instance = launchTemplate.Instance + } + + if instance.RootBlockDevice == nil { + instance.RootBlockDevice = &ec2.BlockDevice{ + Metadata: r.Metadata(), + Encrypted: defsecTypes.BoolDefault(false, r.Metadata()), + } + } + + blockDevices := getBlockDevices(r) + for i, device := range blockDevices { + copyDevice := device + if i == 0 { + instance.RootBlockDevice = copyDevice + continue + } + instance.EBSBlockDevices = append(instance.EBSBlockDevices, device) + } + instances = append(instances, instance) + } + + return instances +} + +func findRelatedLaunchTemplate(fctx parser.FileContext, r *parser.Resource) (ec2.LaunchTemplate, bool) { + launchTemplateRef := r.GetProperty("LaunchTemplate.LaunchTemplateName") + if launchTemplateRef.IsString() { + res := findLaunchTemplateByName(fctx, launchTemplateRef) + if res != nil { + return adaptLaunchTemplate(res), true + } + } + + launchTemplateRef = r.GetProperty("LaunchTemplate.LaunchTemplateId") + if !launchTemplateRef.IsString() { + return ec2.LaunchTemplate{}, false + } + + resource := fctx.GetResourceByLogicalID(launchTemplateRef.AsString()) + if resource == nil { + return ec2.LaunchTemplate{}, false + } + return adaptLaunchTemplate(resource), true +} + +func findLaunchTemplateByName(fctx parser.FileContext, prop *parser.Property) *parser.Resource { + for _, res := range fctx.GetResourcesByType("AWS::EC2::LaunchTemplate") { + templateName := res.GetProperty("LaunchTemplateName") + if templateName.IsNotString() { + continue + } + + if prop.EqualTo(templateName.AsString()) { + return res + } + } + + return nil +} + +func getBlockDevices(r *parser.Resource) []*ec2.BlockDevice { + var blockDevices []*ec2.BlockDevice + + devicesProp := r.GetProperty("BlockDeviceMappings") + + if devicesProp.IsNil() { + return blockDevices + } + + for _, d := range devicesProp.AsList() { + device := &ec2.BlockDevice{ + Metadata: d.Metadata(), + Encrypted: d.GetBoolProperty("Ebs.Encrypted"), + } + + blockDevices = append(blockDevices, device) + } + + return blockDevices +} diff --git a/internal/adapters/cloudformation/aws/ec2/launch_configuration.go b/internal/adapters/cloudformation/aws/ec2/launch_configuration.go new file mode 100644 index 000000000000..d9a9137d2deb --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/launch_configuration.go @@ -0,0 +1,48 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getLaunchConfigurations(file parser.FileContext) (launchConfigurations []ec2.LaunchConfiguration) { + launchConfigResources := file.GetResourcesByType("AWS::AutoScaling::LaunchConfiguration") + + for _, r := range launchConfigResources { + + launchConfig := ec2.LaunchConfiguration{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("Name"), + AssociatePublicIP: r.GetBoolProperty("AssociatePublicIpAddress"), + MetadataOptions: ec2.MetadataOptions{ + Metadata: r.Metadata(), + HttpTokens: types.StringDefault("optional", r.Metadata()), + HttpEndpoint: types.StringDefault("enabled", r.Metadata()), + }, + UserData: r.GetStringProperty("UserData", ""), + } + + if opts := r.GetProperty("MetadataOptions"); opts.IsNotNil() { + launchConfig.MetadataOptions = ec2.MetadataOptions{ + Metadata: opts.Metadata(), + HttpTokens: opts.GetStringProperty("HttpTokens", "optional"), + HttpEndpoint: opts.GetStringProperty("HttpEndpoint", "enabled"), + } + } + + blockDevices := getBlockDevices(r) + for i, device := range blockDevices { + copyDevice := device + if i == 0 { + launchConfig.RootBlockDevice = copyDevice + continue + } + launchConfig.EBSBlockDevices = append(launchConfig.EBSBlockDevices, device) + } + + launchConfigurations = append(launchConfigurations, launchConfig) + + } + return launchConfigurations +} diff --git a/internal/adapters/cloudformation/aws/ec2/launch_template.go b/internal/adapters/cloudformation/aws/ec2/launch_template.go new file mode 100644 index 000000000000..08c899576f68 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/launch_template.go @@ -0,0 +1,56 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getLaunchTemplates(file parser.FileContext) (templates []ec2.LaunchTemplate) { + launchConfigResources := file.GetResourcesByType("AWS::EC2::LaunchTemplate") + + for _, r := range launchConfigResources { + templates = append(templates, adaptLaunchTemplate(r)) + } + return templates +} + +func adaptLaunchTemplate(r *parser.Resource) ec2.LaunchTemplate { + launchTemplate := ec2.LaunchTemplate{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("LaunchTemplateName", ""), + Instance: ec2.Instance{ + Metadata: r.Metadata(), + MetadataOptions: ec2.MetadataOptions{ + Metadata: r.Metadata(), + HttpTokens: types.StringDefault("optional", r.Metadata()), + HttpEndpoint: types.StringDefault("enabled", r.Metadata()), + }, + UserData: types.StringDefault("", r.Metadata()), + }, + } + + if data := r.GetProperty("LaunchTemplateData"); data.IsNotNil() { + if opts := data.GetProperty("MetadataOptions"); opts.IsNotNil() { + launchTemplate.MetadataOptions = ec2.MetadataOptions{ + Metadata: opts.Metadata(), + HttpTokens: opts.GetStringProperty("HttpTokens", "optional"), + HttpEndpoint: opts.GetStringProperty("HttpEndpoint", "enabled"), + } + } + + launchTemplate.Instance.UserData = data.GetStringProperty("UserData", "") + + blockDevices := getBlockDevices(r) + for i, device := range blockDevices { + copyDevice := device + if i == 0 { + launchTemplate.RootBlockDevice = copyDevice + } else { + launchTemplate.EBSBlockDevices = append(launchTemplate.EBSBlockDevices, device) + } + } + } + + return launchTemplate +} diff --git a/internal/adapters/cloudformation/aws/ec2/nacl.go b/internal/adapters/cloudformation/aws/ec2/nacl.go new file mode 100644 index 000000000000..2bd6190f6c5e --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/nacl.go @@ -0,0 +1,71 @@ +package ec2 + +import ( + "strconv" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getNetworkACLs(ctx parser.FileContext) (acls []ec2.NetworkACL) { + for _, aclResource := range ctx.GetResourcesByType("AWS::EC2::NetworkAcl") { + acl := ec2.NetworkACL{ + Metadata: aclResource.Metadata(), + Rules: getRules(aclResource.ID(), ctx), + IsDefaultRule: defsecTypes.BoolDefault(false, aclResource.Metadata()), + } + acls = append(acls, acl) + } + return acls +} + +func getRules(id string, ctx parser.FileContext) (rules []ec2.NetworkACLRule) { + for _, ruleResource := range ctx.GetResourcesByType("AWS::EC2::NetworkAclEntry") { + aclID := ruleResource.GetProperty("NetworkAclId") + if aclID.IsString() && aclID.AsString() == id { + + rule := ec2.NetworkACLRule{ + Metadata: ruleResource.Metadata(), + Type: defsecTypes.StringDefault(ec2.TypeIngress, ruleResource.Metadata()), + Action: defsecTypes.StringDefault(ec2.ActionAllow, ruleResource.Metadata()), + Protocol: defsecTypes.String("-1", ruleResource.Metadata()), + CIDRs: nil, + } + + if egressProperty := ruleResource.GetProperty("Egress"); egressProperty.IsBool() { + if egressProperty.AsBool() { + rule.Type = defsecTypes.String(ec2.TypeEgress, egressProperty.Metadata()) + } else { + rule.Type = defsecTypes.String(ec2.TypeIngress, egressProperty.Metadata()) + } + } + + if actionProperty := ruleResource.GetProperty("RuleAction"); actionProperty.IsString() { + if actionProperty.AsString() == ec2.ActionAllow { + rule.Action = defsecTypes.String(ec2.ActionAllow, actionProperty.Metadata()) + } else { + rule.Action = defsecTypes.String(ec2.ActionDeny, actionProperty.Metadata()) + } + } + + if protocolProperty := ruleResource.GetProperty("Protocol"); protocolProperty.IsInt() { + protocol := protocolProperty.AsIntValue().Value() + rule.Protocol = defsecTypes.String(strconv.Itoa(protocol), protocolProperty.Metadata()) + } + + if ipv4Cidr := ruleResource.GetProperty("CidrBlock"); ipv4Cidr.IsString() { + rule.CIDRs = append(rule.CIDRs, ipv4Cidr.AsStringValue()) + } + + if ipv6Cidr := ruleResource.GetProperty("Ipv6CidrBlock"); ipv6Cidr.IsString() { + rule.CIDRs = append(rule.CIDRs, ipv6Cidr.AsStringValue()) + } + + rules = append(rules, rule) + } + } + return rules +} diff --git a/internal/adapters/cloudformation/aws/ec2/security_group.go b/internal/adapters/cloudformation/aws/ec2/security_group.go new file mode 100644 index 000000000000..7989c62fab2d --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/security_group.go @@ -0,0 +1,68 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getSecurityGroups(ctx parser.FileContext) (groups []ec2.SecurityGroup) { + for _, r := range ctx.GetResourcesByType("AWS::EC2::SecurityGroup") { + group := ec2.SecurityGroup{ + Metadata: r.Metadata(), + Description: r.GetStringProperty("GroupDescription"), + IngressRules: getIngressRules(r), + EgressRules: getEgressRules(r), + IsDefault: types.Bool(r.GetStringProperty("GroupName").EqualTo("default"), r.Metadata()), + VPCID: r.GetStringProperty("VpcId"), + } + + groups = append(groups, group) + } + return groups +} + +func getIngressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { + if ingressProp := r.GetProperty("SecurityGroupIngress"); ingressProp.IsList() { + for _, ingress := range ingressProp.AsList() { + rule := ec2.SecurityGroupRule{ + Metadata: ingress.Metadata(), + Description: ingress.GetStringProperty("Description"), + CIDRs: nil, + } + v4Cidr := ingress.GetProperty("CidrIp") + if v4Cidr.IsString() && v4Cidr.AsStringValue().IsNotEmpty() { + rule.CIDRs = append(rule.CIDRs, types.StringExplicit(v4Cidr.AsString(), v4Cidr.Metadata())) + } + v6Cidr := ingress.GetProperty("CidrIpv6") + if v6Cidr.IsString() && v6Cidr.AsStringValue().IsNotEmpty() { + rule.CIDRs = append(rule.CIDRs, types.StringExplicit(v6Cidr.AsString(), v6Cidr.Metadata())) + } + + sgRules = append(sgRules, rule) + } + } + return sgRules +} + +func getEgressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { + if egressProp := r.GetProperty("SecurityGroupEgress"); egressProp.IsList() { + for _, egress := range egressProp.AsList() { + rule := ec2.SecurityGroupRule{ + Metadata: egress.Metadata(), + Description: egress.GetStringProperty("Description"), + } + v4Cidr := egress.GetProperty("CidrIp") + if v4Cidr.IsString() && v4Cidr.AsStringValue().IsNotEmpty() { + rule.CIDRs = append(rule.CIDRs, types.StringExplicit(v4Cidr.AsString(), v4Cidr.Metadata())) + } + v6Cidr := egress.GetProperty("CidrIpv6") + if v6Cidr.IsString() && v6Cidr.AsStringValue().IsNotEmpty() { + rule.CIDRs = append(rule.CIDRs, types.StringExplicit(v6Cidr.AsString(), v6Cidr.Metadata())) + } + + sgRules = append(sgRules, rule) + } + } + return sgRules +} diff --git a/internal/adapters/cloudformation/aws/ec2/subnet.go b/internal/adapters/cloudformation/aws/ec2/subnet.go new file mode 100644 index 000000000000..364a904a3d24 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/subnet.go @@ -0,0 +1,21 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getSubnets(ctx parser.FileContext) (subnets []ec2.Subnet) { + + subnetResources := ctx.GetResourcesByType("AWS::EC2::Subnet") + for _, r := range subnetResources { + + subnet := ec2.Subnet{ + Metadata: r.Metadata(), + MapPublicIpOnLaunch: r.GetBoolProperty("MapPublicIpOnLaunch"), + } + + subnets = append(subnets, subnet) + } + return subnets +} diff --git a/internal/adapters/cloudformation/aws/ec2/volume.go b/internal/adapters/cloudformation/aws/ec2/volume.go new file mode 100644 index 000000000000..d45913f01e22 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/volume.go @@ -0,0 +1,25 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getVolumes(ctx parser.FileContext) (volumes []ec2.Volume) { + + volumeResources := ctx.GetResourcesByType("AWS::EC2::Volume") + for _, r := range volumeResources { + + volume := ec2.Volume{ + Metadata: r.Metadata(), + Encryption: ec2.Encryption{ + Metadata: r.Metadata(), + Enabled: r.GetBoolProperty("Encrypted"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + }, + } + + volumes = append(volumes, volume) + } + return volumes +} diff --git a/internal/adapters/cloudformation/aws/ecr/ecr.go b/internal/adapters/cloudformation/aws/ecr/ecr.go new file mode 100644 index 000000000000..38ae46872cd1 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ecr/ecr.go @@ -0,0 +1,13 @@ +package ecr + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) ecr.ECR { + return ecr.ECR{ + Repositories: getRepositories(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/ecr/repository.go b/internal/adapters/cloudformation/aws/ecr/repository.go new file mode 100644 index 000000000000..3f51a443700b --- /dev/null +++ b/internal/adapters/cloudformation/aws/ecr/repository.go @@ -0,0 +1,93 @@ +package ecr + +import ( + "fmt" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + + "github.com/liamg/iamgo" +) + +func getRepositories(ctx parser.FileContext) (repositories []ecr.Repository) { + + repositoryResources := ctx.GetResourcesByType("AWS::ECR::Repository") + + for _, r := range repositoryResources { + + repository := ecr.Repository{ + Metadata: r.Metadata(), + ImageScanning: ecr.ImageScanning{ + Metadata: r.Metadata(), + ScanOnPush: defsecTypes.BoolDefault(false, r.Metadata()), + }, + ImageTagsImmutable: hasImmutableImageTags(r), + Policies: nil, + Encryption: ecr.Encryption{ + Metadata: r.Metadata(), + Type: defsecTypes.StringDefault(ecr.EncryptionTypeAES256, r.Metadata()), + KMSKeyID: defsecTypes.StringDefault("", r.Metadata()), + }, + } + + if imageScanningProp := r.GetProperty("ImageScanningConfiguration"); imageScanningProp.IsNotNil() { + repository.ImageScanning = ecr.ImageScanning{ + Metadata: imageScanningProp.Metadata(), + ScanOnPush: imageScanningProp.GetBoolProperty("ScanOnPush", false), + } + } + + if encProp := r.GetProperty("EncryptionConfiguration"); encProp.IsNotNil() { + repository.Encryption = ecr.Encryption{ + Metadata: encProp.Metadata(), + Type: encProp.GetStringProperty("EncryptionType", ecr.EncryptionTypeAES256), + KMSKeyID: encProp.GetStringProperty("KmsKey", ""), + } + } + + if policy, err := getPolicy(r); err == nil { + repository.Policies = append(repository.Policies, *policy) + } + + repositories = append(repositories, repository) + } + + return repositories +} + +func getPolicy(r *parser.Resource) (*iam.Policy, error) { + policyProp := r.GetProperty("RepositoryPolicyText") + if policyProp.IsNil() { + return nil, fmt.Errorf("missing policy") + } + + parsed, err := iamgo.Parse(policyProp.GetJsonBytes()) + if err != nil { + return nil, err + } + + return &iam.Policy{ + Metadata: policyProp.Metadata(), + Name: defsecTypes.StringDefault("", policyProp.Metadata()), + Document: iam.Document{ + Metadata: policyProp.Metadata(), + Parsed: *parsed, + }, + Builtin: defsecTypes.Bool(false, policyProp.Metadata()), + }, nil +} + +func hasImmutableImageTags(r *parser.Resource) defsecTypes.BoolValue { + mutabilityProp := r.GetProperty("ImageTagMutability") + if mutabilityProp.IsNil() { + return defsecTypes.BoolDefault(false, r.Metadata()) + } + if !mutabilityProp.EqualTo("IMMUTABLE") { + return defsecTypes.Bool(false, mutabilityProp.Metadata()) + } + return defsecTypes.Bool(true, mutabilityProp.Metadata()) +} diff --git a/internal/adapters/cloudformation/aws/ecs/cluster.go b/internal/adapters/cloudformation/aws/ecs/cluster.go new file mode 100644 index 000000000000..1a512eee4226 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ecs/cluster.go @@ -0,0 +1,57 @@ +package ecs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []ecs.Cluster) { + + clusterResources := ctx.GetResourcesByType("AWS::ECS::Cluster") + + for _, r := range clusterResources { + + cluster := ecs.Cluster{ + Metadata: r.Metadata(), + Settings: getClusterSettings(r), + } + + clusters = append(clusters, cluster) + + } + + return clusters +} + +func getClusterSettings(r *parser.Resource) ecs.ClusterSettings { + + clusterSettings := ecs.ClusterSettings{ + Metadata: r.Metadata(), + ContainerInsightsEnabled: types.BoolDefault(false, r.Metadata()), + } + + clusterSettingMap := r.GetProperty("ClusterSettings") + if clusterSettingMap.IsNil() || clusterSettingMap.IsNotList() { + return clusterSettings + } + + clusterSettings.Metadata = clusterSettingMap.Metadata() + + for _, setting := range clusterSettingMap.AsList() { + checkProperty(setting, &clusterSettings) + } + + return clusterSettings +} + +func checkProperty(setting *parser.Property, clusterSettings *ecs.ClusterSettings) { + settingMap := setting.AsMap() + name := settingMap["Name"] + if name.IsNotNil() && name.EqualTo("containerInsights") { + value := settingMap["Value"] + if value.IsNotNil() && value.EqualTo("enabled") { + clusterSettings.ContainerInsightsEnabled = types.Bool(true, value.Metadata()) + } + } +} diff --git a/internal/adapters/cloudformation/aws/ecs/ecs.go b/internal/adapters/cloudformation/aws/ecs/ecs.go new file mode 100644 index 000000000000..0697a15648df --- /dev/null +++ b/internal/adapters/cloudformation/aws/ecs/ecs.go @@ -0,0 +1,14 @@ +package ecs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) ecs.ECS { + return ecs.ECS{ + Clusters: getClusters(cfFile), + TaskDefinitions: getTaskDefinitions(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/ecs/task_definition.go b/internal/adapters/cloudformation/aws/ecs/task_definition.go new file mode 100644 index 000000000000..e1855353a228 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ecs/task_definition.go @@ -0,0 +1,86 @@ +package ecs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getTaskDefinitions(ctx parser.FileContext) (taskDefinitions []ecs.TaskDefinition) { + + taskDefResources := ctx.GetResourcesByType("AWS::ECS::TaskDefinition") + + for _, r := range taskDefResources { + definitions, _ := getContainerDefinitions(r) + taskDef := ecs.TaskDefinition{ + Metadata: r.Metadata(), + Volumes: getVolumes(r), + ContainerDefinitions: definitions, + } + taskDefinitions = append(taskDefinitions, taskDef) + } + + return taskDefinitions +} + +func getContainerDefinitions(r *parser.Resource) ([]ecs.ContainerDefinition, error) { + var definitions []ecs.ContainerDefinition + containerDefs := r.GetProperty("ContainerDefinitions") + if containerDefs.IsNil() || containerDefs.IsNotList() { + return definitions, nil + } + for _, containerDef := range containerDefs.AsList() { + + var envVars []ecs.EnvVar + envVarsList := containerDef.GetProperty("Environment") + if envVarsList.IsNotNil() && envVarsList.IsList() { + for _, envVar := range envVarsList.AsList() { + envVars = append(envVars, ecs.EnvVar{ + Name: envVar.GetStringProperty("Name", "").Value(), + Value: envVar.GetStringProperty("Value", "").Value(), + }) + } + } + definition := ecs.ContainerDefinition{ + Metadata: containerDef.Metadata(), + Name: containerDef.GetStringProperty("Name", ""), + Image: containerDef.GetStringProperty("Image", ""), + CPU: containerDef.GetIntProperty("CPU", 1), + Memory: containerDef.GetIntProperty("Memory", 128), + Essential: containerDef.GetBoolProperty("Essential", false), + Privileged: containerDef.GetBoolProperty("Privileged", false), + Environment: envVars, + PortMappings: nil, + } + definitions = append(definitions, definition) + } + if containerDefs.IsNotNil() && containerDefs.IsString() { + return ecs.CreateDefinitionsFromString(r.Metadata(), containerDefs.AsString()) + } + return definitions, nil +} + +func getVolumes(r *parser.Resource) (volumes []ecs.Volume) { + + volumesList := r.GetProperty("Volumes") + if volumesList.IsNil() || volumesList.IsNotList() { + return volumes + } + + for _, v := range volumesList.AsList() { + volume := ecs.Volume{ + Metadata: r.Metadata(), + EFSVolumeConfiguration: ecs.EFSVolumeConfiguration{ + Metadata: r.Metadata(), + TransitEncryptionEnabled: types.BoolDefault(false, r.Metadata()), + }, + } + transitProp := v.GetProperty("EFSVolumeConfiguration.TransitEncryption") + if transitProp.IsNotNil() && transitProp.EqualTo("enabled", parser.IgnoreCase) { + volume.EFSVolumeConfiguration.TransitEncryptionEnabled = types.Bool(true, transitProp.Metadata()) + } + + volumes = append(volumes, volume) + } + return volumes +} diff --git a/internal/adapters/cloudformation/aws/efs/efs.go b/internal/adapters/cloudformation/aws/efs/efs.go new file mode 100644 index 000000000000..1a751374f3ba --- /dev/null +++ b/internal/adapters/cloudformation/aws/efs/efs.go @@ -0,0 +1,13 @@ +package efs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/efs" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) efs.EFS { + return efs.EFS{ + FileSystems: getFileSystems(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/efs/filesystem.go b/internal/adapters/cloudformation/aws/efs/filesystem.go new file mode 100644 index 000000000000..372a6fd9e03f --- /dev/null +++ b/internal/adapters/cloudformation/aws/efs/filesystem.go @@ -0,0 +1,23 @@ +package efs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/efs" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getFileSystems(ctx parser.FileContext) (filesystems []efs.FileSystem) { + + filesystemResources := ctx.GetResourcesByType("AWS::EFS::FileSystem") + + for _, r := range filesystemResources { + + filesystem := efs.FileSystem{ + Metadata: r.Metadata(), + Encrypted: r.GetBoolProperty("Encrypted"), + } + + filesystems = append(filesystems, filesystem) + } + + return filesystems +} diff --git a/internal/adapters/cloudformation/aws/eks/cluster.go b/internal/adapters/cloudformation/aws/eks/cluster.go new file mode 100644 index 000000000000..ae814c7529e0 --- /dev/null +++ b/internal/adapters/cloudformation/aws/eks/cluster.go @@ -0,0 +1,56 @@ +package eks + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/eks" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []eks.Cluster) { + + clusterResources := ctx.GetResourcesByType("AWS::EKS::Cluster") + + for _, r := range clusterResources { + cluster := eks.Cluster{ + Metadata: r.Metadata(), + // Logging not supported for cloudformation https://github.com/aws/containers-roadmap/issues/242 + Logging: eks.Logging{ + Metadata: r.Metadata(), + API: defsecTypes.BoolUnresolvable(r.Metadata()), + Audit: defsecTypes.BoolUnresolvable(r.Metadata()), + Authenticator: defsecTypes.BoolUnresolvable(r.Metadata()), + ControllerManager: defsecTypes.BoolUnresolvable(r.Metadata()), + Scheduler: defsecTypes.BoolUnresolvable(r.Metadata()), + }, + Encryption: getEncryptionConfig(r), + // endpoint protection not supported - https://github.com/aws/containers-roadmap/issues/242 + PublicAccessEnabled: defsecTypes.BoolUnresolvable(r.Metadata()), + PublicAccessCIDRs: nil, + } + + clusters = append(clusters, cluster) + } + return clusters +} + +func getEncryptionConfig(r *parser.Resource) eks.Encryption { + + encryption := eks.Encryption{ + Metadata: r.Metadata(), + Secrets: defsecTypes.BoolDefault(false, r.Metadata()), + KMSKeyID: defsecTypes.StringDefault("", r.Metadata()), + } + + if encProp := r.GetProperty("EncryptionConfig"); encProp.IsNotNil() { + encryption.Metadata = encProp.Metadata() + encryption.KMSKeyID = encProp.GetStringProperty("Provider.KeyArn") + resourcesProp := encProp.GetProperty("Resources") + if resourcesProp.IsList() { + if resourcesProp.Contains("secrets") { + encryption.Secrets = defsecTypes.Bool(true, resourcesProp.Metadata()) + } + } + } + + return encryption +} diff --git a/internal/adapters/cloudformation/aws/eks/eks.go b/internal/adapters/cloudformation/aws/eks/eks.go new file mode 100644 index 000000000000..64ea051c6a25 --- /dev/null +++ b/internal/adapters/cloudformation/aws/eks/eks.go @@ -0,0 +1,13 @@ +package eks + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/eks" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) eks.EKS { + return eks.EKS{ + Clusters: getClusters(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/elasticache/cluster.go b/internal/adapters/cloudformation/aws/elasticache/cluster.go new file mode 100644 index 000000000000..572fe4c02043 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticache/cluster.go @@ -0,0 +1,24 @@ +package elasticache + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getClusterGroups(ctx parser.FileContext) (clusters []elasticache.Cluster) { + + clusterResources := ctx.GetResourcesByType("AWS::ElastiCache::CacheCluster") + + for _, r := range clusterResources { + cluster := elasticache.Cluster{ + Metadata: r.Metadata(), + Engine: r.GetStringProperty("Engine"), + NodeType: r.GetStringProperty("CacheNodeType"), + SnapshotRetentionLimit: r.GetIntProperty("SnapshotRetentionLimit"), + } + + clusters = append(clusters, cluster) + } + + return clusters +} diff --git a/internal/adapters/cloudformation/aws/elasticache/elasticache.go b/internal/adapters/cloudformation/aws/elasticache/elasticache.go new file mode 100644 index 000000000000..ceede8215187 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticache/elasticache.go @@ -0,0 +1,15 @@ +package elasticache + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) elasticache.ElastiCache { + return elasticache.ElastiCache{ + Clusters: getClusterGroups(cfFile), + ReplicationGroups: getReplicationGroups(cfFile), + SecurityGroups: getSecurityGroups(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/elasticache/replication_group.go b/internal/adapters/cloudformation/aws/elasticache/replication_group.go new file mode 100644 index 000000000000..12e22ff31314 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticache/replication_group.go @@ -0,0 +1,23 @@ +package elasticache + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getReplicationGroups(ctx parser.FileContext) (replicationGroups []elasticache.ReplicationGroup) { + + replicationGroupResources := ctx.GetResourcesByType("AWS::ElastiCache::ReplicationGroup") + + for _, r := range replicationGroupResources { + replicationGroup := elasticache.ReplicationGroup{ + Metadata: r.Metadata(), + TransitEncryptionEnabled: r.GetBoolProperty("TransitEncryptionEnabled"), + AtRestEncryptionEnabled: r.GetBoolProperty("AtRestEncryptionEnabled"), + } + + replicationGroups = append(replicationGroups, replicationGroup) + } + + return replicationGroups +} diff --git a/internal/adapters/cloudformation/aws/elasticache/security_group.go b/internal/adapters/cloudformation/aws/elasticache/security_group.go new file mode 100644 index 000000000000..c25b02f158b2 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticache/security_group.go @@ -0,0 +1,22 @@ +package elasticache + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getSecurityGroups(ctx parser.FileContext) (securityGroups []elasticache.SecurityGroup) { + + sgResources := ctx.GetResourcesByType("AWS::ElastiCache::SecurityGroup") + + for _, r := range sgResources { + + sg := elasticache.SecurityGroup{ + Metadata: r.Metadata(), + Description: r.GetStringProperty("Description"), + } + securityGroups = append(securityGroups, sg) + } + + return securityGroups +} diff --git a/internal/adapters/cloudformation/aws/elasticsearch/domain.go b/internal/adapters/cloudformation/aws/elasticsearch/domain.go new file mode 100644 index 000000000000..f78df7882bdf --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticsearch/domain.go @@ -0,0 +1,84 @@ +package elasticsearch + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticsearch" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { + + domainResources := ctx.GetResourcesByType("AWS::Elasticsearch::Domain", "AWS::OpenSearchService::Domain") + + for _, r := range domainResources { + + domain := elasticsearch.Domain{ + Metadata: r.Metadata(), + DomainName: r.GetStringProperty("DomainName"), + AccessPolicies: r.GetStringProperty("AccessPolicies"), + DedicatedMasterEnabled: r.GetBoolProperty("ElasticsearchClusterConfig.DedicatedMasterEnabled"), + VpcId: defsecTypes.String("", r.Metadata()), + LogPublishing: elasticsearch.LogPublishing{ + Metadata: r.Metadata(), + AuditEnabled: defsecTypes.BoolDefault(false, r.Metadata()), + CloudWatchLogGroupArn: defsecTypes.String("", r.Metadata()), + }, + TransitEncryption: elasticsearch.TransitEncryption{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + AtRestEncryption: elasticsearch.AtRestEncryption{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + KmsKeyId: defsecTypes.String("", r.Metadata()), + }, + Endpoint: elasticsearch.Endpoint{ + Metadata: r.Metadata(), + EnforceHTTPS: defsecTypes.BoolDefault(false, r.Metadata()), + TLSPolicy: defsecTypes.StringDefault("Policy-Min-TLS-1-0-2019-07", r.Metadata()), + }, + ServiceSoftwareOptions: elasticsearch.ServiceSoftwareOptions{ + Metadata: r.Metadata(), + CurrentVersion: defsecTypes.String("", r.Metadata()), + NewVersion: defsecTypes.String("", r.Metadata()), + UpdateStatus: defsecTypes.String("", r.Metadata()), + UpdateAvailable: defsecTypes.Bool(false, r.Metadata()), + }, + } + + if prop := r.GetProperty("LogPublishingOptions"); prop.IsNotNil() { + domain.LogPublishing = elasticsearch.LogPublishing{ + Metadata: prop.Metadata(), + AuditEnabled: prop.GetBoolProperty("AUDIT_LOGS.Enabled", false), + CloudWatchLogGroupArn: prop.GetStringProperty("CloudWatchLogsLogGroupArn"), + } + } + + if prop := r.GetProperty("NodeToNodeEncryptionOptions"); prop.IsNotNil() { + domain.TransitEncryption = elasticsearch.TransitEncryption{ + Metadata: prop.Metadata(), + Enabled: prop.GetBoolProperty("Enabled", false), + } + } + + if prop := r.GetProperty("EncryptionAtRestOptions"); prop.IsNotNil() { + domain.AtRestEncryption = elasticsearch.AtRestEncryption{ + Metadata: prop.Metadata(), + Enabled: prop.GetBoolProperty("Enabled", false), + KmsKeyId: prop.GetStringProperty("KmsKeyId"), + } + } + + if prop := r.GetProperty("DomainEndpointOptions"); prop.IsNotNil() { + domain.Endpoint = elasticsearch.Endpoint{ + Metadata: prop.Metadata(), + EnforceHTTPS: prop.GetBoolProperty("EnforceHTTPS", false), + TLSPolicy: prop.GetStringProperty("TLSSecurityPolicy", "Policy-Min-TLS-1-0-2019-07"), + } + } + + domains = append(domains, domain) + } + + return domains +} diff --git a/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go b/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go new file mode 100644 index 000000000000..5546215a7df8 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go @@ -0,0 +1,13 @@ +package elasticsearch + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticsearch" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) elasticsearch.Elasticsearch { + return elasticsearch.Elasticsearch{ + Domains: getDomains(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/elb/adapt_test.go b/internal/adapters/cloudformation/aws/elb/adapt_test.go new file mode 100644 index 000000000000..c975145b8426 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elb/adapt_test.go @@ -0,0 +1,73 @@ +package elb + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/aws/elb" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected elb.ELB + }{ + { + name: "LoadBalancer", + source: `AWSTemplateFormatVersion: "2010-09-09" +Resources: + LoadBalancer: + Type: AWS::ElasticLoadBalancingV2::LoadBalancer + DependsOn: + - ALBLogsBucketPermission + Properties: + Name: "k8s-dev" + IpAddressType: ipv4 + LoadBalancerAttributes: + - Key: routing.http2.enabled + Value: "true" + - Key: deletion_protection.enabled + Value: "true" + - Key: routing.http.drop_invalid_header_fields.enabled + Value: "true" + - Key: access_logs.s3.enabled + Value: "true" + Tags: + - Key: ingress.k8s.aws/resource + Value: LoadBalancer + - Key: elbv2.k8s.aws/cluster + Value: "biomage-dev" + Type: application +`, + expected: elb.ELB{ + LoadBalancers: []elb.LoadBalancer{ + { + Metadata: types.NewTestMetadata(), + Type: types.String("application", types.NewTestMetadata()), + DropInvalidHeaderFields: types.Bool(true, types.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "template.yaml": tt.source, + }) + + p := parser.New() + fctx, err := p.ParseFile(context.TODO(), fs, "template.yaml") + require.NoError(t, err) + + testutil.AssertDefsecEqual(t, tt.expected, Adapt(*fctx)) + }) + } +} diff --git a/internal/adapters/cloudformation/aws/elb/elb.go b/internal/adapters/cloudformation/aws/elb/elb.go new file mode 100644 index 000000000000..06159b9dda7b --- /dev/null +++ b/internal/adapters/cloudformation/aws/elb/elb.go @@ -0,0 +1,13 @@ +package elb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elb" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) elb.ELB { + return elb.ELB{ + LoadBalancers: getLoadBalancers(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/elb/loadbalancer.go b/internal/adapters/cloudformation/aws/elb/loadbalancer.go new file mode 100644 index 000000000000..51c62ef31d7d --- /dev/null +++ b/internal/adapters/cloudformation/aws/elb/loadbalancer.go @@ -0,0 +1,81 @@ +package elb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elb" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getLoadBalancers(ctx parser.FileContext) (loadbalancers []elb.LoadBalancer) { + + loadBalanacerResources := ctx.GetResourcesByType("AWS::ElasticLoadBalancingV2::LoadBalancer") + + for _, r := range loadBalanacerResources { + lb := elb.LoadBalancer{ + Metadata: r.Metadata(), + Type: r.GetStringProperty("Type", "application"), + DropInvalidHeaderFields: checkForDropInvalidHeaders(r), + Internal: isInternal(r), + Listeners: getListeners(r, ctx), + } + loadbalancers = append(loadbalancers, lb) + } + + return loadbalancers +} + +func getListeners(lbr *parser.Resource, ctx parser.FileContext) (listeners []elb.Listener) { + + listenerResources := ctx.GetResourcesByType("AWS::ElasticLoadBalancingV2::Listener") + + for _, r := range listenerResources { + if r.GetStringProperty("LoadBalancerArn").Value() == lbr.ID() { + listener := elb.Listener{ + Metadata: r.Metadata(), + Protocol: r.GetStringProperty("Protocol", "HTTP"), + TLSPolicy: r.GetStringProperty("SslPolicy", ""), + DefaultActions: getDefaultListenerActions(r), + } + + listeners = append(listeners, listener) + } + } + return listeners +} + +func getDefaultListenerActions(r *parser.Resource) (actions []elb.Action) { + defaultActionsProp := r.GetProperty("DefaultActions") + if defaultActionsProp.IsNotList() { + return actions + } + for _, action := range defaultActionsProp.AsList() { + actions = append(actions, elb.Action{ + Metadata: action.Metadata(), + Type: action.GetProperty("Type").AsStringValue(), + }) + } + return actions +} + +func isInternal(r *parser.Resource) types.BoolValue { + schemeProp := r.GetProperty("Scheme") + if schemeProp.IsNotString() { + return r.BoolDefault(false) + } + return types.Bool(schemeProp.EqualTo("internal", parser.IgnoreCase), schemeProp.Metadata()) +} + +func checkForDropInvalidHeaders(r *parser.Resource) types.BoolValue { + attributesProp := r.GetProperty("LoadBalancerAttributes") + if attributesProp.IsNotList() { + return types.BoolDefault(false, r.Metadata()) + } + + for _, attr := range attributesProp.AsList() { + if attr.GetStringProperty("Key").Value() == "routing.http.drop_invalid_header_fields.enabled" { + return attr.GetBoolProperty("Value") + } + } + + return r.BoolDefault(false) +} diff --git a/internal/adapters/cloudformation/aws/iam/iam.go b/internal/adapters/cloudformation/aws/iam/iam.go new file mode 100644 index 000000000000..7774c4d7d2e8 --- /dev/null +++ b/internal/adapters/cloudformation/aws/iam/iam.go @@ -0,0 +1,27 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) iam.IAM { + return iam.IAM{ + PasswordPolicy: iam.PasswordPolicy{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + ReusePreventionCount: defsecTypes.IntDefault(0, defsecTypes.NewUnmanagedMetadata()), + RequireLowercase: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + RequireUppercase: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + RequireNumbers: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + RequireSymbols: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MaxAgeDays: defsecTypes.IntDefault(0, defsecTypes.NewUnmanagedMetadata()), + MinimumLength: defsecTypes.IntDefault(0, defsecTypes.NewUnmanagedMetadata()), + }, + Policies: getPolicies(cfFile), + Groups: getGroups(cfFile), + Users: getUsers(cfFile), + Roles: getRoles(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/iam/policy.go b/internal/adapters/cloudformation/aws/iam/policy.go new file mode 100644 index 000000000000..f9eb365dd73c --- /dev/null +++ b/internal/adapters/cloudformation/aws/iam/policy.go @@ -0,0 +1,125 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/liamg/iamgo" +) + +func getPolicies(ctx parser.FileContext) (policies []iam.Policy) { + for _, policyResource := range ctx.GetResourcesByType("AWS::IAM::Policy") { + + policy := iam.Policy{ + Metadata: policyResource.Metadata(), + Name: policyResource.GetStringProperty("PolicyName"), + Document: iam.Document{ + Metadata: policyResource.Metadata(), + Parsed: iamgo.Document{}, + }, + Builtin: defsecTypes.Bool(false, policyResource.Metadata()), + } + + if policyProp := policyResource.GetProperty("PolicyDocument"); policyProp.IsNotNil() { + doc, err := iamgo.Parse(policyProp.GetJsonBytes()) + if err != nil { + continue + } + policy.Document.Parsed = *doc + } + + policies = append(policies, policy) + } + return policies +} + +func getRoles(ctx parser.FileContext) (roles []iam.Role) { + for _, roleResource := range ctx.GetResourcesByType("AWS::IAM::Role") { + policyProp := roleResource.GetProperty("Policies") + roleName := roleResource.GetStringProperty("RoleName") + + roles = append(roles, iam.Role{ + Metadata: roleResource.Metadata(), + Name: roleName, + Policies: getPoliciesDocs(policyProp), + }) + } + return roles +} + +func getUsers(ctx parser.FileContext) (users []iam.User) { + for _, userResource := range ctx.GetResourcesByType("AWS::IAM::User") { + policyProp := userResource.GetProperty("Policies") + userName := userResource.GetStringProperty("GroupName") + + users = append(users, iam.User{ + Metadata: userResource.Metadata(), + Name: userName, + LastAccess: defsecTypes.TimeUnresolvable(userResource.Metadata()), + Policies: getPoliciesDocs(policyProp), + AccessKeys: getAccessKeys(ctx, userName.Value()), + }) + } + return users +} + +func getAccessKeys(ctx parser.FileContext, username string) (accessKeys []iam.AccessKey) { + for _, keyResource := range ctx.GetResourcesByType("AWS::IAM::AccessKey") { + keyUsername := keyResource.GetStringProperty("UserName") + if !keyUsername.EqualTo(username) { + continue + } + active := defsecTypes.BoolDefault(false, keyResource.Metadata()) + if statusProp := keyResource.GetProperty("Status"); statusProp.IsString() { + active = defsecTypes.Bool(statusProp.AsString() == "Active", statusProp.Metadata()) + } + + accessKeys = append(accessKeys, iam.AccessKey{ + Metadata: keyResource.Metadata(), + AccessKeyId: defsecTypes.StringUnresolvable(keyResource.Metadata()), + CreationDate: defsecTypes.TimeUnresolvable(keyResource.Metadata()), + LastAccess: defsecTypes.TimeUnresolvable(keyResource.Metadata()), + Active: active, + }) + } + return accessKeys +} + +func getGroups(ctx parser.FileContext) (groups []iam.Group) { + for _, groupResource := range ctx.GetResourcesByType("AWS::IAM::Group") { + policyProp := groupResource.GetProperty("Policies") + groupName := groupResource.GetStringProperty("GroupName") + + groups = append(groups, iam.Group{ + Metadata: groupResource.Metadata(), + Name: groupName, + Policies: getPoliciesDocs(policyProp), + }) + } + return groups +} + +func getPoliciesDocs(policiesProp *parser.Property) []iam.Policy { + var policies []iam.Policy + + for _, policy := range policiesProp.AsList() { + policyProp := policy.GetProperty("PolicyDocument") + policyName := policy.GetStringProperty("PolicyName") + + doc, err := iamgo.Parse(policyProp.GetJsonBytes()) + if err != nil { + continue + } + + policies = append(policies, iam.Policy{ + Metadata: policyProp.Metadata(), + Name: policyName, + Document: iam.Document{ + Metadata: policyProp.Metadata(), + Parsed: *doc, + }, + Builtin: defsecTypes.Bool(false, policyProp.Metadata()), + }) + } + return policies +} diff --git a/internal/adapters/cloudformation/aws/kinesis/kinesis.go b/internal/adapters/cloudformation/aws/kinesis/kinesis.go new file mode 100644 index 000000000000..921027ced49a --- /dev/null +++ b/internal/adapters/cloudformation/aws/kinesis/kinesis.go @@ -0,0 +1,13 @@ +package kinesis + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/kinesis" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) kinesis.Kinesis { + return kinesis.Kinesis{ + Streams: getStreams(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/kinesis/stream.go b/internal/adapters/cloudformation/aws/kinesis/stream.go new file mode 100644 index 000000000000..41a0889332a6 --- /dev/null +++ b/internal/adapters/cloudformation/aws/kinesis/stream.go @@ -0,0 +1,36 @@ +package kinesis + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/kinesis" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getStreams(ctx parser.FileContext) (streams []kinesis.Stream) { + + streamResources := ctx.GetResourcesByType("AWS::Kinesis::Stream") + + for _, r := range streamResources { + + stream := kinesis.Stream{ + Metadata: r.Metadata(), + Encryption: kinesis.Encryption{ + Metadata: r.Metadata(), + Type: types.StringDefault("KMS", r.Metadata()), + KMSKeyID: types.StringDefault("", r.Metadata()), + }, + } + + if prop := r.GetProperty("StreamEncryption"); prop.IsNotNil() { + stream.Encryption = kinesis.Encryption{ + Metadata: prop.Metadata(), + Type: prop.GetStringProperty("EncryptionType", "KMS"), + KMSKeyID: prop.GetStringProperty("KeyId"), + } + } + + streams = append(streams, stream) + } + + return streams +} diff --git a/internal/adapters/cloudformation/aws/lambda/function.go b/internal/adapters/cloudformation/aws/lambda/function.go new file mode 100644 index 000000000000..e3ba43fbffff --- /dev/null +++ b/internal/adapters/cloudformation/aws/lambda/function.go @@ -0,0 +1,53 @@ +package lambda + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/lambda" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getFunctions(ctx parser.FileContext) (functions []lambda.Function) { + + functionResources := ctx.GetResourcesByType("AWS::Lambda::Function") + + for _, r := range functionResources { + + function := lambda.Function{ + Metadata: r.Metadata(), + Tracing: lambda.Tracing{ + Metadata: r.Metadata(), + Mode: types.StringDefault("PassThrough", r.Metadata()), + }, + Permissions: getPermissions(r, ctx), + } + + if prop := r.GetProperty("TracingConfig"); prop.IsNotNil() { + function.Tracing = lambda.Tracing{ + Metadata: prop.Metadata(), + Mode: prop.GetStringProperty("Mode", "PassThrough"), + } + } + + functions = append(functions, function) + } + + return functions +} + +func getPermissions(funcR *parser.Resource, ctx parser.FileContext) (perms []lambda.Permission) { + + permissionResources := ctx.GetResourcesByType("AWS::Lambda::Permission") + + for _, r := range permissionResources { + if prop := r.GetStringProperty("FunctionName"); prop.EqualTo(funcR.ID()) { + perm := lambda.Permission{ + Metadata: r.Metadata(), + Principal: r.GetStringProperty("Principal"), + SourceARN: r.GetStringProperty("SourceArn"), + } + perms = append(perms, perm) + } + } + + return perms +} diff --git a/internal/adapters/cloudformation/aws/lambda/lambda.go b/internal/adapters/cloudformation/aws/lambda/lambda.go new file mode 100644 index 000000000000..d848296629a4 --- /dev/null +++ b/internal/adapters/cloudformation/aws/lambda/lambda.go @@ -0,0 +1,13 @@ +package lambda + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/lambda" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) lambda.Lambda { + return lambda.Lambda{ + Functions: getFunctions(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/mq/broker.go b/internal/adapters/cloudformation/aws/mq/broker.go new file mode 100644 index 000000000000..aed216e74291 --- /dev/null +++ b/internal/adapters/cloudformation/aws/mq/broker.go @@ -0,0 +1,33 @@ +package mq + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/mq" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getBrokers(ctx parser.FileContext) (brokers []mq.Broker) { + for _, r := range ctx.GetResourcesByType("AWS::AmazonMQ::Broker") { + + broker := mq.Broker{ + Metadata: r.Metadata(), + PublicAccess: r.GetBoolProperty("PubliclyAccessible"), + Logging: mq.Logging{ + Metadata: r.Metadata(), + General: types.BoolDefault(false, r.Metadata()), + Audit: types.BoolDefault(false, r.Metadata()), + }, + } + + if prop := r.GetProperty("Logs"); prop.IsNotNil() { + broker.Logging = mq.Logging{ + Metadata: prop.Metadata(), + General: prop.GetBoolProperty("General"), + Audit: prop.GetBoolProperty("Audit"), + } + } + + brokers = append(brokers, broker) + } + return brokers +} diff --git a/internal/adapters/cloudformation/aws/mq/mq.go b/internal/adapters/cloudformation/aws/mq/mq.go new file mode 100644 index 000000000000..8cad86d007b8 --- /dev/null +++ b/internal/adapters/cloudformation/aws/mq/mq.go @@ -0,0 +1,13 @@ +package mq + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/mq" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) mq.MQ { + return mq.MQ{ + Brokers: getBrokers(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/msk/cluster.go b/internal/adapters/cloudformation/aws/msk/cluster.go new file mode 100644 index 000000000000..7de4d181a50c --- /dev/null +++ b/internal/adapters/cloudformation/aws/msk/cluster.go @@ -0,0 +1,80 @@ +package msk + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/msk" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []msk.Cluster) { + for _, r := range ctx.GetResourcesByType("AWS::MSK::Cluster") { + + cluster := msk.Cluster{ + Metadata: r.Metadata(), + EncryptionInTransit: msk.EncryptionInTransit{ + Metadata: r.Metadata(), + ClientBroker: defsecTypes.StringDefault("TLS", r.Metadata()), + }, + EncryptionAtRest: msk.EncryptionAtRest{ + Metadata: r.Metadata(), + KMSKeyARN: defsecTypes.StringDefault("", r.Metadata()), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + Logging: msk.Logging{ + Metadata: r.Metadata(), + Broker: msk.BrokerLogging{ + Metadata: r.Metadata(), + S3: msk.S3Logging{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + Cloudwatch: msk.CloudwatchLogging{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + Firehose: msk.FirehoseLogging{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + }, + }, + } + + if encProp := r.GetProperty("EncryptionInfo.EncryptionInTransit"); encProp.IsNotNil() { + cluster.EncryptionInTransit = msk.EncryptionInTransit{ + Metadata: encProp.Metadata(), + ClientBroker: encProp.GetStringProperty("ClientBroker", "TLS"), + } + } + + if encAtRestProp := r.GetProperty("EncryptionInfo.EncryptionAtRest"); encAtRestProp.IsNotNil() { + cluster.EncryptionAtRest = msk.EncryptionAtRest{ + Metadata: encAtRestProp.Metadata(), + KMSKeyARN: encAtRestProp.GetStringProperty("DataVolumeKMSKeyId", ""), + Enabled: defsecTypes.BoolDefault(true, encAtRestProp.Metadata()), + } + } + + if loggingProp := r.GetProperty("LoggingInfo"); loggingProp.IsNotNil() { + cluster.Logging.Metadata = loggingProp.Metadata() + if brokerLoggingProp := loggingProp.GetProperty("BrokerLogs"); brokerLoggingProp.IsNotNil() { + cluster.Logging.Broker.Metadata = brokerLoggingProp.Metadata() + if s3Prop := brokerLoggingProp.GetProperty("S3"); s3Prop.IsNotNil() { + cluster.Logging.Broker.S3.Metadata = s3Prop.Metadata() + cluster.Logging.Broker.S3.Enabled = s3Prop.GetBoolProperty("Enabled", false) + } + if cwProp := brokerLoggingProp.GetProperty("CloudWatchLogs"); cwProp.IsNotNil() { + cluster.Logging.Broker.Cloudwatch.Metadata = cwProp.Metadata() + cluster.Logging.Broker.Cloudwatch.Enabled = cwProp.GetBoolProperty("Enabled", false) + } + if fhProp := brokerLoggingProp.GetProperty("Firehose"); fhProp.IsNotNil() { + cluster.Logging.Broker.Firehose.Metadata = fhProp.Metadata() + cluster.Logging.Broker.Firehose.Enabled = fhProp.GetBoolProperty("Enabled", false) + } + } + } + + clusters = append(clusters, cluster) + } + return clusters +} diff --git a/internal/adapters/cloudformation/aws/msk/msk.go b/internal/adapters/cloudformation/aws/msk/msk.go new file mode 100644 index 000000000000..c6ed253b2466 --- /dev/null +++ b/internal/adapters/cloudformation/aws/msk/msk.go @@ -0,0 +1,13 @@ +package msk + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/msk" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) msk.MSK { + return msk.MSK{ + Clusters: getClusters(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/neptune/cluster.go b/internal/adapters/cloudformation/aws/neptune/cluster.go new file mode 100644 index 000000000000..b24353881bfa --- /dev/null +++ b/internal/adapters/cloudformation/aws/neptune/cluster.go @@ -0,0 +1,34 @@ +package neptune + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []neptune.Cluster) { + for _, r := range ctx.GetResourcesByType("AWS::Neptune::DBCluster") { + + cluster := neptune.Cluster{ + Metadata: r.Metadata(), + Logging: neptune.Logging{ + Metadata: r.Metadata(), + Audit: getAuditLog(r), + }, + StorageEncrypted: r.GetBoolProperty("StorageEncrypted"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + } + clusters = append(clusters, cluster) + } + return clusters +} + +func getAuditLog(r *parser.Resource) types.BoolValue { + if logsProp := r.GetProperty("EnableCloudwatchLogsExports"); logsProp.IsList() { + if logsProp.Contains("audit") { + return types.Bool(true, logsProp.Metadata()) + } + } + + return types.BoolDefault(false, r.Metadata()) +} diff --git a/internal/adapters/cloudformation/aws/neptune/neptune.go b/internal/adapters/cloudformation/aws/neptune/neptune.go new file mode 100644 index 000000000000..798836230816 --- /dev/null +++ b/internal/adapters/cloudformation/aws/neptune/neptune.go @@ -0,0 +1,13 @@ +package neptune + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) neptune.Neptune { + return neptune.Neptune{ + Clusters: getClusters(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/rds/adapt_test.go b/internal/adapters/cloudformation/aws/rds/adapt_test.go new file mode 100644 index 000000000000..09dbd9fb7d09 --- /dev/null +++ b/internal/adapters/cloudformation/aws/rds/adapt_test.go @@ -0,0 +1,158 @@ +package rds + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected rds.RDS + }{ + { + name: "cluster with instances", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + RDSCluster: + Type: 'AWS::RDS::DBCluster' + Properties: + DBClusterIdentifier: my-cluster1 + Engine: aurora-postgresql + StorageEncrypted: true + KmsKeyId: "your-kms-key-id" + PerformanceInsightsEnabled: true + PerformanceInsightsKmsKeyId: "test-kms-key-id" + PublicAccess: true + DeletionProtection: true + BackupRetentionPeriod: 2 + RDSDBInstance1: + Type: 'AWS::RDS::DBInstance' + Properties: + Engine: aurora-mysql + EngineVersion: "5.7.12" + DBInstanceIdentifier: test + DBClusterIdentifier: + Ref: RDSCluster + PubliclyAccessible: 'false' + DBInstanceClass: db.r3.xlarge + StorageEncrypted: true + KmsKeyId: "your-kms-key-id" + EnablePerformanceInsights: true + PerformanceInsightsKMSKeyId: "test-kms-key-id2" + MultiAZ: true + AutoMinorVersionUpgrade: true + DBInstanceArn: "arn:aws:rds:us-east-2:123456789012:db:my-mysql-instance-1" + EnableIAMDatabaseAuthentication: true + EnableCloudwatchLogsExports: + - "error" + - "general" + DBParameterGroupName: "testgroup" + Tags: + - Key: "keyname1" + Value: "value1" + - Key: "keyname2" + Value: "value2" + RDSDBParameterGroup: + Type: 'AWS::RDS::DBParameterGroup' + Properties: + Description: "CloudFormation Sample MySQL Parameter Group" + DBParameterGroupName: "testgroup" +`, + expected: rds.RDS{ + ParameterGroups: []rds.ParameterGroups{ + { + Metadata: types.NewTestMetadata(), + DBParameterGroupName: types.String("testgroup", types.NewTestMetadata()), + }, + }, + Clusters: []rds.Cluster{ + { + Metadata: types.NewTestMetadata(), + BackupRetentionPeriodDays: types.Int(2, types.NewTestMetadata()), + Engine: types.String("aurora-postgresql", types.NewTestMetadata()), + Encryption: rds.Encryption{ + EncryptStorage: types.Bool(true, types.NewTestMetadata()), + KMSKeyID: types.String("your-kms-key-id", types.NewTestMetadata()), + }, + PerformanceInsights: rds.PerformanceInsights{ + Metadata: types.NewTestMetadata(), + Enabled: types.Bool(true, types.NewTestMetadata()), + KMSKeyID: types.String("test-kms-key-id", types.NewTestMetadata()), + }, + PublicAccess: types.Bool(false, types.NewTestMetadata()), + DeletionProtection: types.Bool(true, types.NewTestMetadata()), + Instances: []rds.ClusterInstance{ + { + Instance: rds.Instance{ + Metadata: types.NewTestMetadata(), + StorageEncrypted: types.Bool(true, types.NewTestMetadata()), + Encryption: rds.Encryption{ + EncryptStorage: types.Bool(true, types.NewTestMetadata()), + KMSKeyID: types.String("your-kms-key-id", types.NewTestMetadata()), + }, + DBInstanceIdentifier: types.String("test", types.NewTestMetadata()), + PubliclyAccessible: types.Bool(false, types.NewTestMetadata()), + PublicAccess: types.BoolDefault(false, types.NewTestMetadata()), + BackupRetentionPeriodDays: types.IntDefault(1, types.NewTestMetadata()), + Engine: types.StringDefault("aurora-mysql", types.NewTestMetadata()), + EngineVersion: types.String("5.7.12", types.NewTestMetadata()), + MultiAZ: types.Bool(true, types.NewTestMetadata()), + AutoMinorVersionUpgrade: types.Bool(true, types.NewTestMetadata()), + DBInstanceArn: types.String("arn:aws:rds:us-east-2:123456789012:db:my-mysql-instance-1", types.NewTestMetadata()), + IAMAuthEnabled: types.Bool(true, types.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: types.NewTestMetadata(), + Enabled: types.Bool(true, types.NewTestMetadata()), + KMSKeyID: types.String("test-kms-key-id2", types.NewTestMetadata()), + }, + EnabledCloudwatchLogsExports: []types.StringValue{ + types.String("error", types.NewTestMetadata()), + types.String("general", types.NewTestMetadata()), + }, + DBParameterGroups: []rds.DBParameterGroupsList{ + { + DBParameterGroupName: types.String("testgroup", types.NewTestMetadata()), + }, + }, + TagList: []rds.TagList{ + { + Metadata: types.NewTestMetadata(), + }, + { + Metadata: types.NewTestMetadata(), + }, + }, + }, + ClusterIdentifier: types.String("RDSCluster", types.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "template.yaml": tt.source, + }) + + p := parser.New() + fctx, err := p.ParseFile(context.TODO(), fs, "template.yaml") + require.NoError(t, err) + + testutil.AssertDefsecEqual(t, tt.expected, Adapt(*fctx)) + }) + } + +} diff --git a/internal/adapters/cloudformation/aws/rds/cluster.go b/internal/adapters/cloudformation/aws/rds/cluster.go new file mode 100644 index 000000000000..79457c57d3a3 --- /dev/null +++ b/internal/adapters/cloudformation/aws/rds/cluster.go @@ -0,0 +1,48 @@ +package rds + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters map[string]rds.Cluster) { + clusters = make(map[string]rds.Cluster) + for _, clusterResource := range ctx.GetResourcesByType("AWS::RDS::DBCluster") { + clusters[clusterResource.ID()] = rds.Cluster{ + Metadata: clusterResource.Metadata(), + BackupRetentionPeriodDays: clusterResource.GetIntProperty("BackupRetentionPeriod", 1), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: clusterResource.Metadata(), + Enabled: clusterResource.GetBoolProperty("PerformanceInsightsEnabled"), + KMSKeyID: clusterResource.GetStringProperty("PerformanceInsightsKmsKeyId"), + }, + Encryption: rds.Encryption{ + Metadata: clusterResource.Metadata(), + EncryptStorage: clusterResource.GetBoolProperty("StorageEncrypted"), + KMSKeyID: clusterResource.GetStringProperty("KmsKeyId"), + }, + PublicAccess: defsecTypes.BoolDefault(false, clusterResource.Metadata()), + Engine: clusterResource.GetStringProperty("Engine", rds.EngineAurora), + LatestRestorableTime: defsecTypes.TimeUnresolvable(clusterResource.Metadata()), + DeletionProtection: clusterResource.GetBoolProperty("DeletionProtection"), + } + } + return clusters +} + +func getClassic(ctx parser.FileContext) rds.Classic { + return rds.Classic{ + DBSecurityGroups: getClassicSecurityGroups(ctx), + } +} + +func getClassicSecurityGroups(ctx parser.FileContext) (groups []rds.DBSecurityGroup) { + for _, dbsgResource := range ctx.GetResourcesByType("AWS::RDS::DBSecurityGroup") { + group := rds.DBSecurityGroup{ + Metadata: dbsgResource.Metadata(), + } + groups = append(groups, group) + } + return groups +} diff --git a/internal/adapters/cloudformation/aws/rds/instance.go b/internal/adapters/cloudformation/aws/rds/instance.go new file mode 100644 index 000000000000..7f651bb6d154 --- /dev/null +++ b/internal/adapters/cloudformation/aws/rds/instance.go @@ -0,0 +1,130 @@ +package rds + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getClustersAndInstances(ctx parser.FileContext) ([]rds.Cluster, []rds.Instance) { + + clusterMap := getClusters(ctx) + + var orphans []rds.Instance + + for _, r := range ctx.GetResourcesByType("AWS::RDS::DBInstance") { + + instance := rds.Instance{ + Metadata: r.Metadata(), + BackupRetentionPeriodDays: r.GetIntProperty("BackupRetentionPeriod", 1), + ReplicationSourceARN: r.GetStringProperty("SourceDBInstanceIdentifier"), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: r.Metadata(), + Enabled: r.GetBoolProperty("EnablePerformanceInsights"), + KMSKeyID: r.GetStringProperty("PerformanceInsightsKMSKeyId"), + }, + Encryption: rds.Encryption{ + Metadata: r.Metadata(), + EncryptStorage: r.GetBoolProperty("StorageEncrypted"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + }, + PublicAccess: r.GetBoolProperty("PubliclyAccessible", true), + Engine: r.GetStringProperty("Engine"), + IAMAuthEnabled: r.GetBoolProperty("EnableIAMDatabaseAuthentication"), + DeletionProtection: r.GetBoolProperty("DeletionProtection", false), + DBInstanceArn: r.GetStringProperty("DBInstanceArn"), + StorageEncrypted: r.GetBoolProperty("StorageEncrypted", false), + DBInstanceIdentifier: r.GetStringProperty("DBInstanceIdentifier"), + DBParameterGroups: getDBParameterGroups(ctx, r), + TagList: getTagList(r), + EnabledCloudwatchLogsExports: getEnabledCloudwatchLogsExports(r), + EngineVersion: r.GetStringProperty("EngineVersion"), + AutoMinorVersionUpgrade: r.GetBoolProperty("AutoMinorVersionUpgrade"), + MultiAZ: r.GetBoolProperty("MultiAZ"), + PubliclyAccessible: r.GetBoolProperty("PubliclyAccessible"), + LatestRestorableTime: types.TimeUnresolvable(r.Metadata()), + ReadReplicaDBInstanceIdentifiers: getReadReplicaDBInstanceIdentifiers(r), + } + + if clusterID := r.GetProperty("DBClusterIdentifier"); clusterID.IsString() { + if cluster, exist := clusterMap[clusterID.AsString()]; exist { + cluster.Instances = append(cluster.Instances, rds.ClusterInstance{ + Instance: instance, + ClusterIdentifier: clusterID.AsStringValue(), + }) + clusterMap[clusterID.AsString()] = cluster + } + } else { + orphans = append(orphans, instance) + } + } + + clusters := make([]rds.Cluster, 0, len(clusterMap)) + + for _, cluster := range clusterMap { + clusters = append(clusters, cluster) + } + + return clusters, orphans +} + +func getDBParameterGroups(ctx parser.FileContext, r *parser.Resource) (dbParameterGroup []rds.DBParameterGroupsList) { + + dbParameterGroupName := r.GetStringProperty("DBParameterGroupName") + + for _, r := range ctx.GetResourcesByType("AWS::RDS::DBParameterGroup") { + name := r.GetStringProperty("DBParameterGroupName") + if !dbParameterGroupName.EqualTo(name.Value()) { + continue + } + dbpmgl := rds.DBParameterGroupsList{ + Metadata: r.Metadata(), + DBParameterGroupName: name, + KMSKeyID: types.StringUnresolvable(r.Metadata()), + } + dbParameterGroup = append(dbParameterGroup, dbpmgl) + } + + return dbParameterGroup +} + +func getEnabledCloudwatchLogsExports(r *parser.Resource) (enabledcloudwatchlogexportslist []types.StringValue) { + enabledCloudwatchLogExportList := r.GetProperty("EnableCloudwatchLogsExports") + + if enabledCloudwatchLogExportList.IsNil() || enabledCloudwatchLogExportList.IsNotList() { + return enabledcloudwatchlogexportslist + } + + for _, ecle := range enabledCloudwatchLogExportList.AsList() { + enabledcloudwatchlogexportslist = append(enabledcloudwatchlogexportslist, ecle.AsStringValue()) + } + return enabledcloudwatchlogexportslist +} + +func getTagList(r *parser.Resource) (taglist []rds.TagList) { + tagLists := r.GetProperty("Tags") + + if tagLists.IsNil() || tagLists.IsNotList() { + return taglist + } + + for _, tl := range tagLists.AsList() { + taglist = append(taglist, rds.TagList{ + Metadata: tl.Metadata(), + }) + } + return taglist +} + +func getReadReplicaDBInstanceIdentifiers(r *parser.Resource) (readreplicadbidentifier []types.StringValue) { + readReplicaDBIdentifier := r.GetProperty("SourceDBInstanceIdentifier") + + if readReplicaDBIdentifier.IsNil() || readReplicaDBIdentifier.IsNotList() { + return readreplicadbidentifier + } + + for _, rr := range readReplicaDBIdentifier.AsList() { + readreplicadbidentifier = append(readreplicadbidentifier, rr.AsStringValue()) + } + return readreplicadbidentifier +} diff --git a/internal/adapters/cloudformation/aws/rds/parameter_groups.go b/internal/adapters/cloudformation/aws/rds/parameter_groups.go new file mode 100644 index 000000000000..fda94da89c36 --- /dev/null +++ b/internal/adapters/cloudformation/aws/rds/parameter_groups.go @@ -0,0 +1,42 @@ +package rds + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getParameterGroups(ctx parser.FileContext) (parametergroups []rds.ParameterGroups) { + + for _, r := range ctx.GetResourcesByType("AWS::RDS::DBParameterGroup") { + + paramgroup := rds.ParameterGroups{ + Metadata: r.Metadata(), + DBParameterGroupName: r.GetStringProperty("DBParameterGroupName"), + DBParameterGroupFamily: r.GetStringProperty("DBParameterGroupFamily"), + Parameters: getParameters(r), + } + + parametergroups = append(parametergroups, paramgroup) + } + + return parametergroups +} + +func getParameters(r *parser.Resource) (parameters []rds.Parameters) { + + dBParam := r.GetProperty("Parameters") + + if dBParam.IsNil() || dBParam.IsNotList() { + return parameters + } + + for _, dbp := range dBParam.AsList() { + parameters = append(parameters, rds.Parameters{ + Metadata: dbp.Metadata(), + ParameterName: types.StringDefault("", dbp.Metadata()), + ParameterValue: types.StringDefault("", dbp.Metadata()), + }) + } + return parameters +} diff --git a/internal/adapters/cloudformation/aws/rds/rds.go b/internal/adapters/cloudformation/aws/rds/rds.go new file mode 100644 index 000000000000..cfa52ed16027 --- /dev/null +++ b/internal/adapters/cloudformation/aws/rds/rds.go @@ -0,0 +1,18 @@ +package rds + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) rds.RDS { + clusters, orphans := getClustersAndInstances(cfFile) + return rds.RDS{ + Instances: orphans, + Clusters: clusters, + Classic: getClassic(cfFile), + ParameterGroups: getParameterGroups(cfFile), + Snapshots: nil, + } +} diff --git a/internal/adapters/cloudformation/aws/redshift/cluster.go b/internal/adapters/cloudformation/aws/redshift/cluster.go new file mode 100644 index 000000000000..9624849326f1 --- /dev/null +++ b/internal/adapters/cloudformation/aws/redshift/cluster.go @@ -0,0 +1,54 @@ +package redshift + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []redshift.Cluster) { + for _, r := range ctx.GetResourcesByType("AWS::Redshift::Cluster") { + + cluster := redshift.Cluster{ + Metadata: r.Metadata(), + ClusterIdentifier: r.GetStringProperty("ClusterIdentifier"), + AllowVersionUpgrade: r.GetBoolProperty("AllowVersionUpgrade"), + NodeType: r.GetStringProperty("NodeType"), + NumberOfNodes: r.GetIntProperty("NumberOfNodes"), + PubliclyAccessible: r.GetBoolProperty("PubliclyAccessible"), + MasterUsername: r.GetStringProperty("MasterUsername"), + VpcId: types.String("", r.Metadata()), + LoggingEnabled: types.Bool(false, r.Metadata()), + AutomatedSnapshotRetentionPeriod: r.GetIntProperty("AutomatedSnapshotRetentionPeriod"), + Encryption: redshift.Encryption{ + Metadata: r.Metadata(), + Enabled: r.GetBoolProperty("Encrypted"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + }, + EndPoint: redshift.EndPoint{ + Metadata: r.Metadata(), + Port: r.GetIntProperty("Endpoint.Port"), + }, + SubnetGroupName: r.GetStringProperty("ClusterSubnetGroupName", ""), + } + + clusters = append(clusters, cluster) + } + return clusters +} + +func getParameters(ctx parser.FileContext) (parameter []redshift.ClusterParameter) { + + paraRes := ctx.GetResourcesByType("AWS::Redshift::ClusterParameterGroup") + var parameters []redshift.ClusterParameter + for _, r := range paraRes { + for _, par := range r.GetProperty("Parameters").AsList() { + parameters = append(parameters, redshift.ClusterParameter{ + Metadata: par.Metadata(), + ParameterName: par.GetStringProperty("ParameterName"), + ParameterValue: par.GetStringProperty("ParameterValue"), + }) + } + } + return parameters +} diff --git a/internal/adapters/cloudformation/aws/redshift/redshift.go b/internal/adapters/cloudformation/aws/redshift/redshift.go new file mode 100644 index 000000000000..43eccc3232f6 --- /dev/null +++ b/internal/adapters/cloudformation/aws/redshift/redshift.go @@ -0,0 +1,16 @@ +package redshift + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) redshift.Redshift { + return redshift.Redshift{ + Clusters: getClusters(cfFile), + SecurityGroups: getSecurityGroups(cfFile), + ClusterParameters: getParameters(cfFile), + ReservedNodes: nil, + } +} diff --git a/internal/adapters/cloudformation/aws/redshift/security_group.go b/internal/adapters/cloudformation/aws/redshift/security_group.go new file mode 100644 index 000000000000..345631e1a61b --- /dev/null +++ b/internal/adapters/cloudformation/aws/redshift/security_group.go @@ -0,0 +1,17 @@ +package redshift + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getSecurityGroups(ctx parser.FileContext) (groups []redshift.SecurityGroup) { + for _, groupResource := range ctx.GetResourcesByType("AWS::Redshift::ClusterSecurityGroup") { + group := redshift.SecurityGroup{ + Metadata: groupResource.Metadata(), + Description: groupResource.GetProperty("Description").AsStringValue(), + } + groups = append(groups, group) + } + return groups +} diff --git a/internal/adapters/cloudformation/aws/s3/bucket.go b/internal/adapters/cloudformation/aws/s3/bucket.go new file mode 100644 index 000000000000..45194abff553 --- /dev/null +++ b/internal/adapters/cloudformation/aws/s3/bucket.go @@ -0,0 +1,148 @@ +package s3 + +import ( + "regexp" + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +var aclConvertRegex = regexp.MustCompile(`[A-Z][^A-Z]*`) + +func getBuckets(cfFile parser.FileContext) []s3.Bucket { + var buckets []s3.Bucket + bucketResources := cfFile.GetResourcesByType("AWS::S3::Bucket") + + for _, r := range bucketResources { + s3b := s3.Bucket{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("BucketName"), + PublicAccessBlock: getPublicAccessBlock(r), + Encryption: getEncryption(r, cfFile), + Versioning: s3.Versioning{ + Metadata: r.Metadata(), + Enabled: hasVersioning(r), + MFADelete: defsecTypes.BoolUnresolvable(r.Metadata()), + }, + Logging: getLogging(r), + ACL: convertAclValue(r.GetStringProperty("AccessControl", "private")), + LifecycleConfiguration: getLifecycle(r), + AccelerateConfigurationStatus: r.GetStringProperty("AccelerateConfiguration.AccelerationStatus"), + Website: getWebsite(r), + BucketLocation: defsecTypes.String("", r.Metadata()), + Objects: nil, + } + + buckets = append(buckets, s3b) + } + return buckets +} + +func getPublicAccessBlock(r *parser.Resource) *s3.PublicAccessBlock { + if block := r.GetProperty("PublicAccessBlockConfiguration"); block.IsNil() { + return nil + } + + return &s3.PublicAccessBlock{ + Metadata: r.Metadata(), + BlockPublicACLs: r.GetBoolProperty("PublicAccessBlockConfiguration.BlockPublicAcls"), + BlockPublicPolicy: r.GetBoolProperty("PublicAccessBlockConfiguration.BlockPublicPolicy"), + IgnorePublicACLs: r.GetBoolProperty("PublicAccessBlockConfiguration.IgnorePublicAcls"), + RestrictPublicBuckets: r.GetBoolProperty("PublicAccessBlockConfiguration.RestrictPublicBuckets"), + } +} + +func convertAclValue(aclValue defsecTypes.StringValue) defsecTypes.StringValue { + matches := aclConvertRegex.FindAllString(aclValue.Value(), -1) + + return defsecTypes.String(strings.ToLower(strings.Join(matches, "-")), aclValue.GetMetadata()) +} + +func getLogging(r *parser.Resource) s3.Logging { + + logging := s3.Logging{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + TargetBucket: defsecTypes.StringDefault("", r.Metadata()), + } + + if config := r.GetProperty("LoggingConfiguration"); config.IsNotNil() { + logging.TargetBucket = config.GetStringProperty("DestinationBucketName") + if logging.TargetBucket.IsNotEmpty() || !logging.TargetBucket.GetMetadata().IsResolvable() { + logging.Enabled = defsecTypes.Bool(true, config.Metadata()) + } + } + return logging +} + +func hasVersioning(r *parser.Resource) defsecTypes.BoolValue { + versioningProp := r.GetProperty("VersioningConfiguration.Status") + + if versioningProp.IsNil() { + return defsecTypes.BoolDefault(false, r.Metadata()) + } + + versioningEnabled := false + if versioningProp.EqualTo("Enabled") { + versioningEnabled = true + + } + return defsecTypes.Bool(versioningEnabled, versioningProp.Metadata()) +} + +func getEncryption(r *parser.Resource, _ parser.FileContext) s3.Encryption { + + encryption := s3.Encryption{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + Algorithm: defsecTypes.StringDefault("", r.Metadata()), + KMSKeyId: defsecTypes.StringDefault("", r.Metadata()), + } + + if encryptProps := r.GetProperty("BucketEncryption.ServerSideEncryptionConfiguration"); encryptProps.IsNotNil() { + for _, rule := range encryptProps.AsList() { + if algo := rule.GetProperty("ServerSideEncryptionByDefault.SSEAlgorithm"); algo.EqualTo("AES256") { + encryption.Enabled = defsecTypes.Bool(true, algo.Metadata()) + } else if kmsKeyProp := rule.GetProperty("ServerSideEncryptionByDefault.KMSMasterKeyID"); !kmsKeyProp.IsEmpty() && kmsKeyProp.IsString() { + encryption.KMSKeyId = kmsKeyProp.AsStringValue() + } + if encryption.Enabled.IsFalse() { + encryption.Enabled = rule.GetBoolProperty("BucketKeyEnabled", false) + } + } + } + + return encryption +} + +func getLifecycle(resource *parser.Resource) []s3.Rules { + LifecycleProp := resource.GetProperty("LifecycleConfiguration") + RuleProp := LifecycleProp.GetProperty("Rules") + + var rule []s3.Rules + + if RuleProp.IsNil() || RuleProp.IsNotList() { + return rule + } + + for _, r := range RuleProp.AsList() { + rule = append(rule, s3.Rules{ + Metadata: r.Metadata(), + Status: r.GetStringProperty("Status"), + }) + } + return rule +} + +func getWebsite(r *parser.Resource) *s3.Website { + if block := r.GetProperty("WebsiteConfiguration"); block.IsNil() { + return nil + } else { + return &s3.Website{ + Metadata: block.Metadata(), + } + } +} diff --git a/internal/adapters/cloudformation/aws/s3/s3.go b/internal/adapters/cloudformation/aws/s3/s3.go new file mode 100644 index 000000000000..ad5c8410ab1c --- /dev/null +++ b/internal/adapters/cloudformation/aws/s3/s3.go @@ -0,0 +1,13 @@ +package s3 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) s3.S3 { + return s3.S3{ + Buckets: getBuckets(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/sam/api.go b/internal/adapters/cloudformation/aws/sam/api.go new file mode 100644 index 000000000000..ac123f28770e --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/api.go @@ -0,0 +1,96 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getApis(cfFile parser.FileContext) (apis []sam.API) { + + apiResources := cfFile.GetResourcesByType("AWS::Serverless::Api") + for _, r := range apiResources { + api := sam.API{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("Name", ""), + TracingEnabled: r.GetBoolProperty("TracingEnabled"), + DomainConfiguration: getDomainConfiguration(r), + AccessLogging: getAccessLogging(r), + RESTMethodSettings: getRestMethodSettings(r), + } + + apis = append(apis, api) + } + + return apis +} + +func getRestMethodSettings(r *parser.Resource) sam.RESTMethodSettings { + + settings := sam.RESTMethodSettings{ + Metadata: r.Metadata(), + CacheDataEncrypted: defsecTypes.BoolDefault(false, r.Metadata()), + LoggingEnabled: defsecTypes.BoolDefault(false, r.Metadata()), + DataTraceEnabled: defsecTypes.BoolDefault(false, r.Metadata()), + MetricsEnabled: defsecTypes.BoolDefault(false, r.Metadata()), + } + + settingsProp := r.GetProperty("MethodSettings") + if settingsProp.IsNotNil() { + + settings = sam.RESTMethodSettings{ + Metadata: settingsProp.Metadata(), + CacheDataEncrypted: settingsProp.GetBoolProperty("CacheDataEncrypted"), + LoggingEnabled: defsecTypes.BoolDefault(false, settingsProp.Metadata()), + DataTraceEnabled: settingsProp.GetBoolProperty("DataTraceEnabled"), + MetricsEnabled: settingsProp.GetBoolProperty("MetricsEnabled"), + } + + if loggingLevel := settingsProp.GetProperty("LoggingLevel"); loggingLevel.IsNotNil() { + if loggingLevel.EqualTo("OFF", parser.IgnoreCase) { + settings.LoggingEnabled = defsecTypes.Bool(false, loggingLevel.Metadata()) + } else { + settings.LoggingEnabled = defsecTypes.Bool(true, loggingLevel.Metadata()) + } + } + } + + return settings +} + +func getAccessLogging(r *parser.Resource) sam.AccessLogging { + + logging := sam.AccessLogging{ + Metadata: r.Metadata(), + CloudwatchLogGroupARN: defsecTypes.StringDefault("", r.Metadata()), + } + + if access := r.GetProperty("AccessLogSetting"); access.IsNotNil() { + logging = sam.AccessLogging{ + Metadata: access.Metadata(), + CloudwatchLogGroupARN: access.GetStringProperty("DestinationArn", ""), + } + } + + return logging +} + +func getDomainConfiguration(r *parser.Resource) sam.DomainConfiguration { + + domainConfig := sam.DomainConfiguration{ + Metadata: r.Metadata(), + Name: defsecTypes.StringDefault("", r.Metadata()), + SecurityPolicy: defsecTypes.StringDefault("TLS_1_0", r.Metadata()), + } + + if domain := r.GetProperty("Domain"); domain.IsNotNil() { + domainConfig = sam.DomainConfiguration{ + Metadata: domain.Metadata(), + Name: domain.GetStringProperty("DomainName", ""), + SecurityPolicy: domain.GetStringProperty("SecurityPolicy", "TLS_1_0"), + } + } + + return domainConfig + +} diff --git a/internal/adapters/cloudformation/aws/sam/function.go b/internal/adapters/cloudformation/aws/sam/function.go new file mode 100644 index 000000000000..2c07a05626cb --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/function.go @@ -0,0 +1,58 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/liamg/iamgo" +) + +func getFunctions(cfFile parser.FileContext) (functions []sam.Function) { + + functionResources := cfFile.GetResourcesByType("AWS::Serverless::Function") + for _, r := range functionResources { + function := sam.Function{ + Metadata: r.Metadata(), + FunctionName: r.GetStringProperty("FunctionName"), + Tracing: r.GetStringProperty("Tracing", sam.TracingModePassThrough), + ManagedPolicies: nil, + Policies: nil, + } + + setFunctionPolicies(r, &function) + functions = append(functions, function) + } + + return functions +} + +func setFunctionPolicies(r *parser.Resource, function *sam.Function) { + policies := r.GetProperty("Policies") + if policies.IsNotNil() { + if policies.IsString() { + function.ManagedPolicies = append(function.ManagedPolicies, policies.AsStringValue()) + } else if policies.IsList() { + for _, property := range policies.AsList() { + if property.IsMap() { + parsed, err := iamgo.Parse(property.GetJsonBytes(true)) + if err != nil { + continue + } + policy := iam.Policy{ + Metadata: property.Metadata(), + Name: defsecTypes.StringDefault("", property.Metadata()), + Document: iam.Document{ + Metadata: property.Metadata(), + Parsed: *parsed, + }, + Builtin: defsecTypes.Bool(false, property.Metadata()), + } + function.Policies = append(function.Policies, policy) + } else if property.IsString() { + function.ManagedPolicies = append(function.ManagedPolicies, property.AsStringValue()) + } + } + } + } +} diff --git a/internal/adapters/cloudformation/aws/sam/http_api.go b/internal/adapters/cloudformation/aws/sam/http_api.go new file mode 100644 index 000000000000..075cff115596 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/http_api.go @@ -0,0 +1,64 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getHttpApis(cfFile parser.FileContext) (apis []sam.HttpAPI) { + + apiResources := cfFile.GetResourcesByType("AWS::Serverless::HttpApi") + for _, r := range apiResources { + api := sam.HttpAPI{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("Name", ""), + DomainConfiguration: getDomainConfiguration(r), + AccessLogging: getAccessLoggingV2(r), + DefaultRouteSettings: getRouteSettings(r), + } + + apis = append(apis, api) + } + + return apis +} + +func getAccessLoggingV2(r *parser.Resource) sam.AccessLogging { + + logging := sam.AccessLogging{ + Metadata: r.Metadata(), + CloudwatchLogGroupARN: types.StringDefault("", r.Metadata()), + } + + if access := r.GetProperty("AccessLogSettings"); access.IsNotNil() { + logging = sam.AccessLogging{ + Metadata: access.Metadata(), + CloudwatchLogGroupARN: access.GetStringProperty("DestinationArn", ""), + } + } + + return logging +} + +func getRouteSettings(r *parser.Resource) sam.RouteSettings { + + routeSettings := sam.RouteSettings{ + Metadata: r.Metadata(), + LoggingEnabled: types.BoolDefault(false, r.Metadata()), + DataTraceEnabled: types.BoolDefault(false, r.Metadata()), + DetailedMetricsEnabled: types.BoolDefault(false, r.Metadata()), + } + + if route := r.GetProperty("DefaultRouteSettings"); route.IsNotNil() { + routeSettings = sam.RouteSettings{ + Metadata: route.Metadata(), + LoggingEnabled: route.GetBoolProperty("LoggingLevel"), + DataTraceEnabled: route.GetBoolProperty("DataTraceEnabled"), + DetailedMetricsEnabled: route.GetBoolProperty("DetailedMetricsEnabled"), + } + } + + return routeSettings + +} diff --git a/internal/adapters/cloudformation/aws/sam/sam.go b/internal/adapters/cloudformation/aws/sam/sam.go new file mode 100644 index 000000000000..dc684b2aa625 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/sam.go @@ -0,0 +1,17 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) sam.SAM { + return sam.SAM{ + APIs: getApis(cfFile), + HttpAPIs: getHttpApis(cfFile), + Functions: getFunctions(cfFile), + StateMachines: getStateMachines(cfFile), + SimpleTables: getSimpleTables(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/sam/state_machines.go b/internal/adapters/cloudformation/aws/sam/state_machines.go new file mode 100644 index 000000000000..8331cb907107 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/state_machines.go @@ -0,0 +1,80 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/liamg/iamgo" +) + +func getStateMachines(cfFile parser.FileContext) (stateMachines []sam.StateMachine) { + + stateMachineResources := cfFile.GetResourcesByType("AWS::Serverless::StateMachine") + for _, r := range stateMachineResources { + stateMachine := sam.StateMachine{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("Name"), + LoggingConfiguration: sam.LoggingConfiguration{ + Metadata: r.Metadata(), + LoggingEnabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + ManagedPolicies: nil, + Policies: nil, + Tracing: getTracingConfiguration(r), + } + + if logging := r.GetProperty("Logging"); logging.IsNotNil() { + stateMachine.LoggingConfiguration.Metadata = logging.Metadata() + if level := logging.GetProperty("Level"); level.IsNotNil() { + stateMachine.LoggingConfiguration.LoggingEnabled = defsecTypes.Bool(!level.EqualTo("OFF"), level.Metadata()) + } + } + + setStateMachinePolicies(r, &stateMachine) + stateMachines = append(stateMachines, stateMachine) + } + + return stateMachines +} + +func getTracingConfiguration(r *parser.Resource) sam.TracingConfiguration { + tracing := r.GetProperty("Tracing") + if tracing.IsNil() { + return sam.TracingConfiguration{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + } + } + + return sam.TracingConfiguration{ + Metadata: tracing.Metadata(), + Enabled: tracing.GetBoolProperty("Enabled"), + } +} + +func setStateMachinePolicies(r *parser.Resource, stateMachine *sam.StateMachine) { + policies := r.GetProperty("Policies") + if policies.IsNotNil() { + if policies.IsString() { + stateMachine.ManagedPolicies = append(stateMachine.ManagedPolicies, policies.AsStringValue()) + } else if policies.IsList() { + for _, property := range policies.AsList() { + parsed, err := iamgo.Parse(property.GetJsonBytes(true)) + if err != nil { + continue + } + policy := iam.Policy{ + Metadata: property.Metadata(), + Name: defsecTypes.StringDefault("", property.Metadata()), + Document: iam.Document{ + Metadata: property.Metadata(), + Parsed: *parsed, + }, + Builtin: defsecTypes.Bool(false, property.Metadata()), + } + stateMachine.Policies = append(stateMachine.Policies, policy) + } + } + } +} diff --git a/internal/adapters/cloudformation/aws/sam/tables.go b/internal/adapters/cloudformation/aws/sam/tables.go new file mode 100644 index 000000000000..1ee62a4ef90f --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/tables.go @@ -0,0 +1,42 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getSimpleTables(cfFile parser.FileContext) (tables []sam.SimpleTable) { + + tableResources := cfFile.GetResourcesByType("AWS::Serverless::SimpleTable") + for _, r := range tableResources { + table := sam.SimpleTable{ + Metadata: r.Metadata(), + TableName: r.GetStringProperty("TableName"), + SSESpecification: getSSESpecification(r), + } + + tables = append(tables, table) + } + + return tables +} + +func getSSESpecification(r *parser.Resource) sam.SSESpecification { + + spec := sam.SSESpecification{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + KMSMasterKeyID: defsecTypes.StringDefault("", r.Metadata()), + } + + if sse := r.GetProperty("SSESpecification"); sse.IsNotNil() { + spec = sam.SSESpecification{ + Metadata: sse.Metadata(), + Enabled: sse.GetBoolProperty("SSEEnabled"), + KMSMasterKeyID: sse.GetStringProperty("KMSMasterKeyID"), + } + } + + return spec +} diff --git a/internal/adapters/cloudformation/aws/sns/sns.go b/internal/adapters/cloudformation/aws/sns/sns.go new file mode 100644 index 000000000000..149571412c82 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sns/sns.go @@ -0,0 +1,13 @@ +package sns + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sns" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) sns.SNS { + return sns.SNS{ + Topics: getTopics(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/sns/topic.go b/internal/adapters/cloudformation/aws/sns/topic.go new file mode 100644 index 000000000000..07fb62a35763 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sns/topic.go @@ -0,0 +1,24 @@ +package sns + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sns" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getTopics(ctx parser.FileContext) (topics []sns.Topic) { + for _, r := range ctx.GetResourcesByType("AWS::SNS::Topic") { + + topic := sns.Topic{ + Metadata: r.Metadata(), + ARN: types.StringDefault("", r.Metadata()), + Encryption: sns.Encryption{ + Metadata: r.Metadata(), + KMSKeyID: r.GetStringProperty("KmsMasterKeyId"), + }, + } + + topics = append(topics, topic) + } + return topics +} diff --git a/internal/adapters/cloudformation/aws/sqs/queue.go b/internal/adapters/cloudformation/aws/sqs/queue.go new file mode 100644 index 000000000000..21081060c924 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sqs/queue.go @@ -0,0 +1,66 @@ +package sqs + +import ( + "fmt" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" + + "github.com/liamg/iamgo" +) + +func getQueues(ctx parser.FileContext) (queues []sqs.Queue) { + for _, r := range ctx.GetResourcesByType("AWS::SQS::Queue") { + queue := sqs.Queue{ + Metadata: r.Metadata(), + QueueURL: defsecTypes.StringDefault("", r.Metadata()), + Encryption: sqs.Encryption{ + Metadata: r.Metadata(), + ManagedEncryption: defsecTypes.Bool(false, r.Metadata()), + KMSKeyID: r.GetStringProperty("KmsMasterKeyId"), + }, + Policies: []iam.Policy{}, + } + if policy, err := getPolicy(r.ID(), ctx); err == nil { + queue.Policies = append(queue.Policies, *policy) + } + queues = append(queues, queue) + } + return queues +} + +func getPolicy(id string, ctx parser.FileContext) (*iam.Policy, error) { + for _, policyResource := range ctx.GetResourcesByType("AWS::SQS::QueuePolicy") { + documentProp := policyResource.GetProperty("PolicyDocument") + if documentProp.IsNil() { + continue + } + queuesProp := policyResource.GetProperty("Queues") + if queuesProp.IsNil() { + continue + } + for _, queueRef := range queuesProp.AsList() { + if queueRef.IsString() && queueRef.AsString() == id { + raw := documentProp.GetJsonBytes() + parsed, err := iamgo.Parse(raw) + if err != nil { + continue + } + return &iam.Policy{ + Metadata: documentProp.Metadata(), + Name: defsecTypes.StringDefault("", documentProp.Metadata()), + Document: iam.Document{ + Metadata: documentProp.Metadata(), + Parsed: *parsed, + }, + Builtin: defsecTypes.Bool(false, documentProp.Metadata()), + }, nil + } + } + } + return nil, fmt.Errorf("no matching policy found") +} diff --git a/internal/adapters/cloudformation/aws/sqs/sqs.go b/internal/adapters/cloudformation/aws/sqs/sqs.go new file mode 100644 index 000000000000..3528fae01bf8 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sqs/sqs.go @@ -0,0 +1,13 @@ +package sqs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) sqs.SQS { + return sqs.SQS{ + Queues: getQueues(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/ssm/secret.go b/internal/adapters/cloudformation/aws/ssm/secret.go new file mode 100644 index 000000000000..6145c950f7f4 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ssm/secret.go @@ -0,0 +1,18 @@ +package ssm + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ssm" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getSecrets(ctx parser.FileContext) (secrets []ssm.Secret) { + for _, r := range ctx.GetResourcesByType("AWS::SecretsManager::Secret") { + secret := ssm.Secret{ + Metadata: r.Metadata(), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + } + + secrets = append(secrets, secret) + } + return secrets +} diff --git a/internal/adapters/cloudformation/aws/ssm/ssm.go b/internal/adapters/cloudformation/aws/ssm/ssm.go new file mode 100644 index 000000000000..53dfeca789f2 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ssm/ssm.go @@ -0,0 +1,13 @@ +package ssm + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ssm" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) ssm.SSM { + return ssm.SSM{ + Secrets: getSecrets(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/workspaces/workspace.go b/internal/adapters/cloudformation/aws/workspaces/workspace.go new file mode 100644 index 000000000000..267c9aac46d3 --- /dev/null +++ b/internal/adapters/cloudformation/aws/workspaces/workspace.go @@ -0,0 +1,31 @@ +package workspaces + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/workspaces" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func getWorkSpaces(ctx parser.FileContext) (workSpaces []workspaces.WorkSpace) { + for _, r := range ctx.GetResourcesByType("AWS::WorkSpaces::Workspace") { + workspace := workspaces.WorkSpace{ + Metadata: r.Metadata(), + RootVolume: workspaces.Volume{ + Metadata: r.Metadata(), + Encryption: workspaces.Encryption{ + Metadata: r.Metadata(), + Enabled: r.GetBoolProperty("RootVolumeEncryptionEnabled"), + }, + }, + UserVolume: workspaces.Volume{ + Metadata: r.Metadata(), + Encryption: workspaces.Encryption{ + Metadata: r.Metadata(), + Enabled: r.GetBoolProperty("UserVolumeEncryptionEnabled"), + }, + }, + } + + workSpaces = append(workSpaces, workspace) + } + return workSpaces +} diff --git a/internal/adapters/cloudformation/aws/workspaces/workspaces.go b/internal/adapters/cloudformation/aws/workspaces/workspaces.go new file mode 100644 index 000000000000..58be41f4a05a --- /dev/null +++ b/internal/adapters/cloudformation/aws/workspaces/workspaces.go @@ -0,0 +1,13 @@ +package workspaces + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/workspaces" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) workspaces.WorkSpaces { + return workspaces.WorkSpaces{ + WorkSpaces: getWorkSpaces(cfFile), + } +} diff --git a/internal/adapters/terraform/adapt.go b/internal/adapters/terraform/adapt.go new file mode 100644 index 000000000000..bbd27e88279d --- /dev/null +++ b/internal/adapters/terraform/adapt.go @@ -0,0 +1,31 @@ +package terraform + +import ( + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws" + "github.com/aquasecurity/trivy/internal/adapters/terraform/azure" + "github.com/aquasecurity/trivy/internal/adapters/terraform/cloudstack" + "github.com/aquasecurity/trivy/internal/adapters/terraform/digitalocean" + "github.com/aquasecurity/trivy/internal/adapters/terraform/github" + "github.com/aquasecurity/trivy/internal/adapters/terraform/google" + "github.com/aquasecurity/trivy/internal/adapters/terraform/kubernetes" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud" + "github.com/aquasecurity/trivy/internal/adapters/terraform/openstack" + "github.com/aquasecurity/trivy/internal/adapters/terraform/oracle" +) + +func Adapt(modules terraform.Modules) *state.State { + return &state.State{ + AWS: aws.Adapt(modules), + Azure: azure.Adapt(modules), + CloudStack: cloudstack.Adapt(modules), + DigitalOcean: digitalocean.Adapt(modules), + GitHub: github.Adapt(modules), + Google: google.Adapt(modules), + Kubernetes: kubernetes.Adapt(modules), + Nifcloud: nifcloud.Adapt(modules), + OpenStack: openstack.Adapt(modules), + Oracle: oracle.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/aws/accessanalyzer/accessanalyzer.go b/internal/adapters/terraform/aws/accessanalyzer/accessanalyzer.go new file mode 100644 index 000000000000..97fcf38713c6 --- /dev/null +++ b/internal/adapters/terraform/aws/accessanalyzer/accessanalyzer.go @@ -0,0 +1,40 @@ +package accessanalyzer + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/accessanalyzer" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) accessanalyzer.AccessAnalyzer { + return accessanalyzer.AccessAnalyzer{ + Analyzers: adaptTrails(modules), + } +} + +func adaptTrails(modules terraform.Modules) []accessanalyzer.Analyzer { + var analyzer []accessanalyzer.Analyzer + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_accessanalyzer_analyzer") { + analyzer = append(analyzer, adaptAnalyzers(resource)) + } + } + return analyzer +} + +func adaptAnalyzers(resource *terraform.Block) accessanalyzer.Analyzer { + + analyzerName := resource.GetAttribute("analyzer_name") + analyzerNameAttr := analyzerName.AsStringValueOrDefault("", resource) + + arnAnalyzer := resource.GetAttribute("arn") + arnAnalyzerAttr := arnAnalyzer.AsStringValueOrDefault("", resource) + + return accessanalyzer.Analyzer{ + Metadata: resource.GetMetadata(), + Name: analyzerNameAttr, + ARN: arnAnalyzerAttr, + Active: types.BoolDefault(false, resource.GetMetadata()), + } +} diff --git a/internal/adapters/terraform/aws/adapt.go b/internal/adapters/terraform/aws/adapt.go new file mode 100644 index 000000000000..e18ec4dc1633 --- /dev/null +++ b/internal/adapters/terraform/aws/adapt.go @@ -0,0 +1,79 @@ +package aws + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/apigateway" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/athena" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/cloudfront" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/cloudtrail" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/cloudwatch" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/codebuild" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/config" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/documentdb" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/dynamodb" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ec2" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ecr" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ecs" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/efs" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/eks" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/elasticache" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/elasticsearch" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/elb" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/emr" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/kinesis" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/kms" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/lambda" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/mq" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/msk" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/neptune" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/provider" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/rds" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/redshift" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/s3" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/sns" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/sqs" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ssm" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/workspaces" +) + +func Adapt(modules terraform.Modules) aws.AWS { + return aws.AWS{ + Meta: aws.Meta{ + TFProviders: provider.Adapt(modules), + }, + APIGateway: apigateway.Adapt(modules), + Athena: athena.Adapt(modules), + Cloudfront: cloudfront.Adapt(modules), + CloudTrail: cloudtrail.Adapt(modules), + CloudWatch: cloudwatch.Adapt(modules), + CodeBuild: codebuild.Adapt(modules), + Config: config.Adapt(modules), + DocumentDB: documentdb.Adapt(modules), + DynamoDB: dynamodb.Adapt(modules), + EC2: ec2.Adapt(modules), + ECR: ecr.Adapt(modules), + ECS: ecs.Adapt(modules), + EFS: efs.Adapt(modules), + EKS: eks.Adapt(modules), + ElastiCache: elasticache.Adapt(modules), + Elasticsearch: elasticsearch.Adapt(modules), + ELB: elb.Adapt(modules), + EMR: emr.Adapt(modules), + IAM: iam.Adapt(modules), + Kinesis: kinesis.Adapt(modules), + KMS: kms.Adapt(modules), + Lambda: lambda.Adapt(modules), + MQ: mq.Adapt(modules), + MSK: msk.Adapt(modules), + Neptune: neptune.Adapt(modules), + RDS: rds.Adapt(modules), + Redshift: redshift.Adapt(modules), + S3: s3.Adapt(modules), + SNS: sns.Adapt(modules), + SQS: sqs.Adapt(modules), + SSM: ssm.Adapt(modules), + WorkSpaces: workspaces.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/aws/apigateway/adapt.go b/internal/adapters/terraform/aws/apigateway/adapt.go new file mode 100644 index 000000000000..2c6b2cb8d6a6 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/adapt.go @@ -0,0 +1,21 @@ +package apigateway + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway" + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) apigateway.APIGateway { + return apigateway.APIGateway{ + V1: v1.APIGateway{ + APIs: adaptAPIsV1(modules), + DomainNames: adaptDomainNamesV1(modules), + }, + V2: v2.APIGateway{ + APIs: adaptAPIsV2(modules), + DomainNames: adaptDomainNamesV2(modules), + }, + } +} diff --git a/internal/adapters/terraform/aws/apigateway/adapt_test.go b/internal/adapters/terraform/aws/apigateway/adapt_test.go new file mode 100644 index 000000000000..269068c3b159 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/adapt_test.go @@ -0,0 +1,233 @@ +package apigateway + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway" + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected apigateway.APIGateway + }{ + { + name: "basic", + terraform: ` +resource "aws_api_gateway_rest_api" "MyDemoAPI" { + name = "MyDemoAPI" + description = "This is my API for demonstration purposes" +} +resource "aws_api_gateway_resource" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id +} +resource "aws_api_gateway_method" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.example.id + http_method = "GET" + authorization = "NONE" +} +resource "aws_apigatewayv2_api" "example" { + name = "tfsec" + protocol_type = "HTTP" +} + + +resource "aws_apigatewayv2_stage" "example" { + api_id = aws_apigatewayv2_api.example.id + name = "tfsec" + access_log_settings { + destination_arn = "arn:123" + } +} + +resource "aws_api_gateway_domain_name" "example" { + domain_name = "v1.com" + security_policy = "TLS_1_0" +} + +resource "aws_apigatewayv2_domain_name" "example" { + domain_name = "v2.com" + domain_name_configuration { + security_policy = "TLS_1_2" + } +} +`, + expected: apigateway.APIGateway{ + V1: v1.APIGateway{ + APIs: []v1.API{ + { + Metadata: defsecTypes.Metadata{}, + Name: String("MyDemoAPI"), + Resources: []v1.Resource{ + { + Methods: []v1.Method{ + { + HTTPMethod: String("GET"), + AuthorizationType: String("NONE"), + APIKeyRequired: Bool(false), + }, + }, + }, + }, + }, + }, + DomainNames: []v1.DomainName{ + { + Name: String("v1.com"), + SecurityPolicy: String("TLS_1_0"), + }, + }, + }, + V2: v2.APIGateway{ + APIs: []v2.API{ + { + Name: String("tfsec"), + ProtocolType: String("HTTP"), + Stages: []v2.Stage{ + { + Name: String("tfsec"), + AccessLogging: v2.AccessLogging{ + CloudwatchLogGroupARN: String("arn:123"), + }, + }, + }, + }, + }, + DomainNames: []v2.DomainName{ + { + Name: String("v2.com"), + SecurityPolicy: String("TLS_1_2"), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Int(i int) defsecTypes.IntValue { + return defsecTypes.Int(i, defsecTypes.NewTestMetadata()) +} + +func Bool(b bool) defsecTypes.BoolValue { + return defsecTypes.Bool(b, defsecTypes.NewTestMetadata()) +} + +func String(s string) defsecTypes.StringValue { + return defsecTypes.String(s, defsecTypes.NewTestMetadata()) +} +func TestLines(t *testing.T) { + src := ` + resource "aws_api_gateway_rest_api" "MyDemoAPI" { + name = "MyDemoAPI" + description = "This is my API for demonstration purposes" + } + + resource "aws_api_gateway_resource" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + } + + resource "aws_api_gateway_method" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.example.id + http_method = "GET" + authorization = "NONE" + api_key_required = true + } + + resource "aws_apigatewayv2_api" "example" { + name = "tfsec" + protocol_type = "HTTP" + } + + resource "aws_apigatewayv2_stage" "example" { + api_id = aws_apigatewayv2_api.example.id + name = "tfsec" + access_log_settings { + destination_arn = "arn:123" + } + } + + resource "aws_api_gateway_domain_name" "example" { + domain_name = "v1.com" + security_policy = "TLS_1_0" + } + + ` + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.V1.APIs, 1) + require.Len(t, adapted.V2.APIs, 1) + require.Len(t, adapted.V1.DomainNames, 1) + + apiV1 := adapted.V1.APIs[0] + apiV2 := adapted.V2.APIs[0] + domainName := adapted.V1.DomainNames[0] + + assert.Equal(t, 2, apiV1.Metadata.Range().GetStartLine()) + assert.Equal(t, 5, apiV1.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, apiV1.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, apiV1.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, apiV1.Resources[0].Methods[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 17, apiV1.Resources[0].Methods[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, apiV1.Resources[0].Methods[0].HTTPMethod.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, apiV1.Resources[0].Methods[0].HTTPMethod.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 15, apiV1.Resources[0].Methods[0].AuthorizationType.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 15, apiV1.Resources[0].Methods[0].AuthorizationType.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 16, apiV1.Resources[0].Methods[0].APIKeyRequired.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 16, apiV1.Resources[0].Methods[0].APIKeyRequired.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 19, apiV2.Metadata.Range().GetStartLine()) + assert.Equal(t, 22, apiV2.Metadata.Range().GetEndLine()) + + assert.Equal(t, 20, apiV2.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 20, apiV2.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 21, apiV2.ProtocolType.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 21, apiV2.ProtocolType.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 24, apiV2.Stages[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 30, apiV2.Stages[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 26, apiV2.Stages[0].Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 26, apiV2.Stages[0].Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 27, apiV2.Stages[0].AccessLogging.Metadata.Range().GetStartLine()) + assert.Equal(t, 29, apiV2.Stages[0].AccessLogging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 28, apiV2.Stages[0].AccessLogging.CloudwatchLogGroupARN.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 28, apiV2.Stages[0].AccessLogging.CloudwatchLogGroupARN.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 32, domainName.Metadata.Range().GetStartLine()) + assert.Equal(t, 35, domainName.Metadata.Range().GetEndLine()) + + assert.Equal(t, 33, domainName.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 33, domainName.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 34, domainName.SecurityPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, domainName.SecurityPolicy.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/aws/apigateway/apiv1.go b/internal/adapters/terraform/aws/apigateway/apiv1.go new file mode 100644 index 000000000000..e01c9cbdbc94 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/apiv1.go @@ -0,0 +1,115 @@ +package apigateway + +import ( + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptAPIResourcesV1(modules terraform.Modules, apiBlock *terraform.Block) []v1.Resource { + var resources []v1.Resource + for _, resourceBlock := range modules.GetReferencingResources(apiBlock, "aws_api_gateway_resource", "rest_api_id") { + method := v1.Resource{ + Metadata: resourceBlock.GetMetadata(), + Methods: adaptAPIMethodsV1(modules, resourceBlock), + } + resources = append(resources, method) + } + return resources +} + +func adaptAPIMethodsV1(modules terraform.Modules, resourceBlock *terraform.Block) []v1.Method { + var methods []v1.Method + for _, methodBlock := range modules.GetReferencingResources(resourceBlock, "aws_api_gateway_method", "resource_id") { + method := v1.Method{ + Metadata: methodBlock.GetMetadata(), + HTTPMethod: methodBlock.GetAttribute("http_method").AsStringValueOrDefault("", methodBlock), + AuthorizationType: methodBlock.GetAttribute("authorization").AsStringValueOrDefault("", methodBlock), + APIKeyRequired: methodBlock.GetAttribute("api_key_required").AsBoolValueOrDefault(false, methodBlock), + } + methods = append(methods, method) + } + return methods +} + +func adaptAPIsV1(modules terraform.Modules) []v1.API { + + var apis []v1.API + apiStageIDs := modules.GetChildResourceIDMapByType("aws_api_gateway_stage") + + for _, apiBlock := range modules.GetResourcesByType("aws_api_gateway_rest_api") { + api := v1.API{ + Metadata: apiBlock.GetMetadata(), + Name: apiBlock.GetAttribute("name").AsStringValueOrDefault("", apiBlock), + Stages: nil, + Resources: adaptAPIResourcesV1(modules, apiBlock), + } + + for _, stageBlock := range modules.GetReferencingResources(apiBlock, "aws_api_gateway_stage", "rest_api_id") { + apiStageIDs.Resolve(stageBlock.ID()) + stage := adaptStageV1(stageBlock, modules) + + api.Stages = append(api.Stages, stage) + } + + apis = append(apis, api) + } + + orphanResources := modules.GetResourceByIDs(apiStageIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := v1.API{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Name: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + } + for _, stage := range orphanResources { + orphanage.Stages = append(orphanage.Stages, adaptStageV1(stage, modules)) + } + apis = append(apis, orphanage) + } + + return apis +} + +func adaptStageV1(stageBlock *terraform.Block, modules terraform.Modules) v1.Stage { + stage := v1.Stage{ + Metadata: stageBlock.GetMetadata(), + Name: stageBlock.GetAttribute("name").AsStringValueOrDefault("", stageBlock), + AccessLogging: v1.AccessLogging{ + Metadata: stageBlock.GetMetadata(), + CloudwatchLogGroupARN: defsecTypes.StringDefault("", stageBlock.GetMetadata()), + }, + XRayTracingEnabled: stageBlock.GetAttribute("xray_tracing_enabled").AsBoolValueOrDefault(false, stageBlock), + } + for _, methodSettings := range modules.GetReferencingResources(stageBlock, "aws_api_gateway_method_settings", "stage_name") { + + restMethodSettings := v1.RESTMethodSettings{ + Metadata: methodSettings.GetMetadata(), + Method: defsecTypes.String("", methodSettings.GetMetadata()), + CacheDataEncrypted: defsecTypes.BoolDefault(false, methodSettings.GetMetadata()), + CacheEnabled: defsecTypes.BoolDefault(false, methodSettings.GetMetadata()), + } + + if settings := methodSettings.GetBlock("settings"); settings.IsNotNil() { + if encrypted := settings.GetAttribute("cache_data_encrypted"); encrypted.IsNotNil() { + restMethodSettings.CacheDataEncrypted = settings.GetAttribute("cache_data_encrypted").AsBoolValueOrDefault(false, settings) + } + if encrypted := settings.GetAttribute("caching_enabled"); encrypted.IsNotNil() { + restMethodSettings.CacheEnabled = settings.GetAttribute("caching_enabled").AsBoolValueOrDefault(false, settings) + } + } + + stage.RESTMethodSettings = append(stage.RESTMethodSettings, restMethodSettings) + } + + stage.Name = stageBlock.GetAttribute("stage_name").AsStringValueOrDefault("", stageBlock) + if accessLogging := stageBlock.GetBlock("access_log_settings"); accessLogging.IsNotNil() { + stage.AccessLogging.Metadata = accessLogging.GetMetadata() + stage.AccessLogging.CloudwatchLogGroupARN = accessLogging.GetAttribute("destination_arn").AsStringValueOrDefault("", accessLogging) + } else { + stage.AccessLogging.Metadata = stageBlock.GetMetadata() + stage.AccessLogging.CloudwatchLogGroupARN = defsecTypes.StringDefault("", stageBlock.GetMetadata()) + } + + return stage +} diff --git a/internal/adapters/terraform/aws/apigateway/apiv1_test.go b/internal/adapters/terraform/aws/apigateway/apiv1_test.go new file mode 100644 index 000000000000..86b2677f24dc --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/apiv1_test.go @@ -0,0 +1,125 @@ +package apigateway + +import ( + "testing" + + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_adaptAPIMethodsV1(t *testing.T) { + tests := []struct { + name string + terraform string + expected []v1.Method + }{ + { + name: "defaults", + terraform: ` +resource "aws_api_gateway_rest_api" "MyDemoAPI" { + name = "MyDemoAPI" + description = "This is my API for demonstration purposes" +} + +resource "aws_api_gateway_resource" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id +} + +resource "aws_api_gateway_method" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.example.id + http_method = "GET" + authorization = "NONE" +} +`, + expected: []v1.Method{ + { + HTTPMethod: String("GET"), + AuthorizationType: String("NONE"), + APIKeyRequired: Bool(false), + }, + }, + }, + { + name: "basic", + terraform: ` +resource "aws_api_gateway_rest_api" "MyDemoAPI" { + name = "MyDemoAPI" + description = "This is my API for demonstration purposes" +} + +resource "aws_api_gateway_resource" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id +} + +resource "aws_api_gateway_method" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.example.id + http_method = "GET" + authorization = "NONE" + api_key_required = true +} +`, + expected: []v1.Method{ + { + HTTPMethod: String("GET"), + AuthorizationType: String("NONE"), + APIKeyRequired: Bool(true), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + restApiBlock := modules.GetBlocks()[1] + adapted := adaptAPIMethodsV1(modules, restApiBlock) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptAPIsV1(t *testing.T) { + tests := []struct { + name string + terraform string + expected []v1.API + }{ + { + name: "defaults", + terraform: ` +resource "aws_api_gateway_rest_api" "example" { + +} +`, + expected: []v1.API{ + { + Name: String(""), + }, + }, + }, + { + name: "full", + terraform: ` +resource "aws_api_gateway_rest_api" "example" { + name = "tfsec" +} +`, + expected: []v1.API{ + { + Name: String("tfsec"), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptAPIsV1(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/aws/apigateway/apiv2.go b/internal/adapters/terraform/aws/apigateway/apiv2.go new file mode 100644 index 000000000000..811abbc1b81c --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/apiv2.go @@ -0,0 +1,69 @@ +package apigateway + +import ( + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptAPIsV2(modules terraform.Modules) []v2.API { + + var apis []v2.API + apiStageIDs := modules.GetChildResourceIDMapByType("aws_apigatewayv2_stage") + + for _, module := range modules { + for _, apiBlock := range module.GetResourcesByType("aws_apigatewayv2_api") { + api := v2.API{ + Metadata: apiBlock.GetMetadata(), + Name: apiBlock.GetAttribute("name").AsStringValueOrDefault("", apiBlock), + ProtocolType: apiBlock.GetAttribute("protocol_type").AsStringValueOrDefault("", apiBlock), + Stages: nil, + } + + for _, stageBlock := range module.GetReferencingResources(apiBlock, "aws_apigatewayv2_stage", "api_id") { + apiStageIDs.Resolve(stageBlock.ID()) + + stage := adaptStageV2(stageBlock) + + api.Stages = append(api.Stages, stage) + } + + apis = append(apis, api) + } + } + + orphanResources := modules.GetResourceByIDs(apiStageIDs.Orphans()...) + if len(orphanResources) > 0 { + orphanage := v2.API{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Name: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + ProtocolType: defsecTypes.StringUnresolvable(defsecTypes.NewUnmanagedMetadata()), + Stages: nil, + } + for _, stage := range orphanResources { + orphanage.Stages = append(orphanage.Stages, adaptStageV2(stage)) + } + apis = append(apis, orphanage) + } + + return apis +} + +func adaptStageV2(stageBlock *terraform.Block) v2.Stage { + stage := v2.Stage{ + Metadata: stageBlock.GetMetadata(), + Name: stageBlock.GetAttribute("name").AsStringValueOrDefault("", stageBlock), + AccessLogging: v2.AccessLogging{ + Metadata: stageBlock.GetMetadata(), + CloudwatchLogGroupARN: defsecTypes.StringDefault("", stageBlock.GetMetadata()), + }, + } + if accessLogging := stageBlock.GetBlock("access_log_settings"); accessLogging.IsNotNil() { + stage.AccessLogging.Metadata = accessLogging.GetMetadata() + stage.AccessLogging.CloudwatchLogGroupARN = accessLogging.GetAttribute("destination_arn").AsStringValueOrDefault("", accessLogging) + } else { + stage.AccessLogging.Metadata = stageBlock.GetMetadata() + stage.AccessLogging.CloudwatchLogGroupARN = defsecTypes.StringDefault("", stageBlock.GetMetadata()) + } + return stage +} diff --git a/internal/adapters/terraform/aws/apigateway/apiv2_test.go b/internal/adapters/terraform/aws/apigateway/apiv2_test.go new file mode 100644 index 000000000000..818f96dfbcb6 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/apiv2_test.go @@ -0,0 +1,103 @@ +package apigateway + +import ( + "testing" + + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_adaptAPIsV2(t *testing.T) { + tests := []struct { + name string + terraform string + expected []v2.API + }{ + { + name: "defaults", + terraform: ` +resource "aws_apigatewayv2_api" "example" { + protocol_type = "HTTP" +} +`, + expected: []v2.API{ + { + Name: String(""), + ProtocolType: String("HTTP"), + }, + }, + }, + { + name: "full", + terraform: ` +resource "aws_apigatewayv2_api" "example" { + name = "tfsec" + protocol_type = "HTTP" +} +`, + expected: []v2.API{ + { + Name: String("tfsec"), + ProtocolType: String("HTTP"), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptAPIsV2(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptStageV2(t *testing.T) { + tests := []struct { + name string + terraform string + expected v2.Stage + }{ + { + name: "defaults", + terraform: ` +resource "aws_apigatewayv2_stage" "example" { + +} +`, + expected: v2.Stage{ + Name: String(""), + AccessLogging: v2.AccessLogging{ + CloudwatchLogGroupARN: String(""), + }, + }, + }, + { + name: "basics", + terraform: ` +resource "aws_apigatewayv2_stage" "example" { + name = "tfsec" + access_log_settings { + destination_arn = "arn:123" + } +} +`, + expected: v2.Stage{ + Name: String("tfsec"), + AccessLogging: v2.AccessLogging{ + CloudwatchLogGroupARN: String("arn:123"), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptStageV2(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/aws/apigateway/namesv1.go b/internal/adapters/terraform/aws/apigateway/namesv1.go new file mode 100644 index 000000000000..bec491d6e8a2 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/namesv1.go @@ -0,0 +1,24 @@ +package apigateway + +import ( + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptDomainNamesV1(modules terraform.Modules) []v1.DomainName { + + var domainNames []v1.DomainName + + for _, module := range modules { + for _, nameBlock := range module.GetResourcesByType("aws_api_gateway_domain_name") { + domainName := v1.DomainName{ + Metadata: nameBlock.GetMetadata(), + Name: nameBlock.GetAttribute("domain_name").AsStringValueOrDefault("", nameBlock), + SecurityPolicy: nameBlock.GetAttribute("security_policy").AsStringValueOrDefault("TLS_1_0", nameBlock), + } + domainNames = append(domainNames, domainName) + } + } + + return domainNames +} diff --git a/internal/adapters/terraform/aws/apigateway/namesv1_test.go b/internal/adapters/terraform/aws/apigateway/namesv1_test.go new file mode 100644 index 000000000000..72ace2eca839 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/namesv1_test.go @@ -0,0 +1,54 @@ +package apigateway + +import ( + "testing" + + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_adaptDomainNamesV1(t *testing.T) { + tests := []struct { + name string + terraform string + expected []v1.DomainName + }{ + { + name: "defaults", + terraform: ` +resource "aws_api_gateway_domain_name" "example" { +} +`, + expected: []v1.DomainName{ + { + Name: String(""), + SecurityPolicy: String("TLS_1_0"), + }, + }, + }, + { + name: "basic", + terraform: ` +resource "aws_api_gateway_domain_name" "example" { + domain_name = "testing.com" + security_policy = "TLS_1_2" +} +`, + expected: []v1.DomainName{ + { + Name: String("testing.com"), + SecurityPolicy: String("TLS_1_2"), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDomainNamesV1(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/aws/apigateway/namesv2.go b/internal/adapters/terraform/aws/apigateway/namesv2.go new file mode 100644 index 000000000000..f526ba793850 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/namesv2.go @@ -0,0 +1,28 @@ +package apigateway + +import ( + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptDomainNamesV2(modules terraform.Modules) []v2.DomainName { + + var domainNames []v2.DomainName + + for _, module := range modules { + for _, nameBlock := range module.GetResourcesByType("aws_apigatewayv2_domain_name") { + domainName := v2.DomainName{ + Metadata: nameBlock.GetMetadata(), + Name: nameBlock.GetAttribute("domain_name").AsStringValueOrDefault("", nameBlock), + SecurityPolicy: types.StringDefault("TLS_1_0", nameBlock.GetMetadata()), + } + if config := nameBlock.GetBlock("domain_name_configuration"); config.IsNotNil() { + domainName.SecurityPolicy = config.GetAttribute("security_policy").AsStringValueOrDefault("TLS_1_0", config) + } + domainNames = append(domainNames, domainName) + } + } + + return domainNames +} diff --git a/internal/adapters/terraform/aws/apigateway/namesv2_test.go b/internal/adapters/terraform/aws/apigateway/namesv2_test.go new file mode 100644 index 000000000000..ba7941f8e935 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/namesv2_test.go @@ -0,0 +1,56 @@ +package apigateway + +import ( + "testing" + + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_adaptDomainNamesV2(t *testing.T) { + tests := []struct { + name string + terraform string + expected []v2.DomainName + }{ + { + name: "defaults", + terraform: ` +resource "aws_apigatewayv2_domain_name" "example" { +} +`, + expected: []v2.DomainName{ + { + Name: String(""), + SecurityPolicy: String("TLS_1_0"), + }, + }, + }, + { + name: "fully populated", + terraform: ` +resource "aws_apigatewayv2_domain_name" "example" { + domain_name = "testing.com" + domain_name_configuration { + security_policy = "TLS_1_2" + } +} +`, + expected: []v2.DomainName{ + { + Name: String("testing.com"), + SecurityPolicy: String("TLS_1_2"), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDomainNamesV2(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/aws/athena/adapt.go b/internal/adapters/terraform/aws/athena/adapt.go new file mode 100644 index 000000000000..0f72c457551b --- /dev/null +++ b/internal/adapters/terraform/aws/athena/adapt.go @@ -0,0 +1,80 @@ +package athena + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/athena" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) athena.Athena { + return athena.Athena{ + Databases: adaptDatabases(modules), + Workgroups: adaptWorkgroups(modules), + } +} + +func adaptDatabases(modules terraform.Modules) []athena.Database { + var databases []athena.Database + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_athena_database") { + databases = append(databases, adaptDatabase(resource)) + } + } + return databases +} + +func adaptWorkgroups(modules terraform.Modules) []athena.Workgroup { + var workgroups []athena.Workgroup + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_athena_workgroup") { + workgroups = append(workgroups, adaptWorkgroup(resource)) + } + } + return workgroups +} + +func adaptDatabase(resource *terraform.Block) athena.Database { + database := athena.Database{ + Metadata: resource.GetMetadata(), + Name: resource.GetAttribute("name").AsStringValueOrDefault("", resource), + Encryption: athena.EncryptionConfiguration{ + Metadata: resource.GetMetadata(), + Type: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + } + if encryptionConfigBlock := resource.GetBlock("encryption_configuration"); encryptionConfigBlock.IsNotNil() { + database.Encryption.Metadata = encryptionConfigBlock.GetMetadata() + encryptionOptionAttr := encryptionConfigBlock.GetAttribute("encryption_option") + database.Encryption.Type = encryptionOptionAttr.AsStringValueOrDefault("", encryptionConfigBlock) + } + + return database +} + +func adaptWorkgroup(resource *terraform.Block) athena.Workgroup { + workgroup := athena.Workgroup{ + Metadata: resource.GetMetadata(), + Name: resource.GetAttribute("name").AsStringValueOrDefault("", resource), + Encryption: athena.EncryptionConfiguration{ + Metadata: resource.GetMetadata(), + Type: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + EnforceConfiguration: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if configBlock := resource.GetBlock("configuration"); configBlock.IsNotNil() { + + enforceWGConfigAttr := configBlock.GetAttribute("enforce_workgroup_configuration") + workgroup.EnforceConfiguration = enforceWGConfigAttr.AsBoolValueOrDefault(true, configBlock) + + if resultConfigBlock := configBlock.GetBlock("result_configuration"); configBlock.IsNotNil() { + if encryptionConfigBlock := resultConfigBlock.GetBlock("encryption_configuration"); encryptionConfigBlock.IsNotNil() { + encryptionOptionAttr := encryptionConfigBlock.GetAttribute("encryption_option") + workgroup.Encryption.Metadata = encryptionConfigBlock.GetMetadata() + workgroup.Encryption.Type = encryptionOptionAttr.AsStringValueOrDefault("", encryptionConfigBlock) + } + } + } + + return workgroup +} diff --git a/internal/adapters/terraform/aws/athena/adapt_test.go b/internal/adapters/terraform/aws/athena/adapt_test.go new file mode 100644 index 000000000000..179d1461df57 --- /dev/null +++ b/internal/adapters/terraform/aws/athena/adapt_test.go @@ -0,0 +1,211 @@ +package athena + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/athena" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptDatabase(t *testing.T) { + tests := []struct { + name string + terraform string + expected athena.Database + }{ + { + name: "athena database", + terraform: ` + resource "aws_athena_database" "my_wg" { + name = "database_name" + + encryption_configuration { + encryption_option = "SSE_KMS" + } + } +`, + expected: athena.Database{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("database_name", defsecTypes.NewTestMetadata()), + Encryption: athena.EncryptionConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String(athena.EncryptionTypeSSEKMS, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDatabase(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptWorkgroup(t *testing.T) { + tests := []struct { + name string + terraform string + expected athena.Workgroup + }{ + { + name: "encryption type SSE KMS", + terraform: ` + resource "aws_athena_workgroup" "my_wg" { + name = "example" + + configuration { + enforce_workgroup_configuration = true + + result_configuration { + encryption_configuration { + encryption_option = "SSE_KMS" + } + } + } + } +`, + expected: athena.Workgroup{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("example", defsecTypes.NewTestMetadata()), + Encryption: athena.EncryptionConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String(athena.EncryptionTypeSSEKMS, defsecTypes.NewTestMetadata()), + }, + EnforceConfiguration: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "configuration not enforced", + terraform: ` + resource "aws_athena_workgroup" "my_wg" { + name = "example" + + configuration { + enforce_workgroup_configuration = false + + result_configuration { + encryption_configuration { + encryption_option = "SSE_KMS" + } + } + } + } +`, + expected: athena.Workgroup{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("example", defsecTypes.NewTestMetadata()), + Encryption: athena.EncryptionConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String(athena.EncryptionTypeSSEKMS, defsecTypes.NewTestMetadata()), + }, + EnforceConfiguration: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "enforce configuration defaults to true", + terraform: ` + resource "aws_athena_workgroup" "my_wg" { + name = "example" + + configuration { + result_configuration { + encryption_configuration { + encryption_option = "" + } + } + } + } +`, + expected: athena.Workgroup{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("example", defsecTypes.NewTestMetadata()), + Encryption: athena.EncryptionConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String(athena.EncryptionTypeNone, defsecTypes.NewTestMetadata()), + }, + EnforceConfiguration: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "missing configuration block", + terraform: ` + resource "aws_athena_workgroup" "my_wg" { + name = "example" + } +`, + expected: athena.Workgroup{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("example", defsecTypes.NewTestMetadata()), + Encryption: athena.EncryptionConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String(athena.EncryptionTypeNone, defsecTypes.NewTestMetadata()), + }, + EnforceConfiguration: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptWorkgroup(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_athena_database" "good_example" { + name = "database_name" + bucket = aws_s3_bucket.hoge.bucket + + encryption_configuration { + encryption_option = "SSE_KMS" + kms_key_arn = aws_kms_key.example.arn + } + } + + resource "aws_athena_workgroup" "good_example" { + name = "example" + + configuration { + enforce_workgroup_configuration = true + publish_cloudwatch_metrics_enabled = true + + result_configuration { + output_location = "s3://${aws_s3_bucket.example.bucket}/output/" + + encryption_configuration { + encryption_option = "SSE_KMS" + kms_key_arn = aws_kms_key.example.arn + } + } + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Databases, 1) + require.Len(t, adapted.Workgroups, 1) + + assert.Equal(t, 7, adapted.Databases[0].Encryption.Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, adapted.Databases[0].Encryption.Type.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 16, adapted.Workgroups[0].EnforceConfiguration.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 16, adapted.Workgroups[0].EnforceConfiguration.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, adapted.Workgroups[0].Encryption.Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, adapted.Workgroups[0].Encryption.Type.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/cloudfront/adapt.go b/internal/adapters/terraform/aws/cloudfront/adapt.go new file mode 100644 index 000000000000..dba2662f9b9c --- /dev/null +++ b/internal/adapters/terraform/aws/cloudfront/adapt.go @@ -0,0 +1,79 @@ +package cloudfront + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) cloudfront.Cloudfront { + return cloudfront.Cloudfront{ + Distributions: adaptDistributions(modules), + } +} + +func adaptDistributions(modules terraform.Modules) []cloudfront.Distribution { + var distributions []cloudfront.Distribution + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_cloudfront_distribution") { + distributions = append(distributions, adaptDistribution(resource)) + } + } + return distributions +} + +func adaptDistribution(resource *terraform.Block) cloudfront.Distribution { + + distribution := cloudfront.Distribution{ + Metadata: resource.GetMetadata(), + WAFID: types.StringDefault("", resource.GetMetadata()), + Logging: cloudfront.Logging{ + Metadata: resource.GetMetadata(), + Bucket: types.StringDefault("", resource.GetMetadata()), + }, + DefaultCacheBehaviour: cloudfront.CacheBehaviour{ + Metadata: resource.GetMetadata(), + ViewerProtocolPolicy: types.String("allow-all", resource.GetMetadata()), + }, + OrdererCacheBehaviours: nil, + ViewerCertificate: cloudfront.ViewerCertificate{ + Metadata: resource.GetMetadata(), + MinimumProtocolVersion: types.StringDefault("TLSv1", resource.GetMetadata()), + }, + } + + distribution.WAFID = resource.GetAttribute("web_acl_id").AsStringValueOrDefault("", resource) + + if loggingBlock := resource.GetBlock("logging_config"); loggingBlock.IsNotNil() { + distribution.Logging.Metadata = loggingBlock.GetMetadata() + bucketAttr := loggingBlock.GetAttribute("bucket") + distribution.Logging.Bucket = bucketAttr.AsStringValueOrDefault("", loggingBlock) + } + + if defaultCacheBlock := resource.GetBlock("default_cache_behavior"); defaultCacheBlock.IsNotNil() { + distribution.DefaultCacheBehaviour.Metadata = defaultCacheBlock.GetMetadata() + viewerProtocolPolicyAttr := defaultCacheBlock.GetAttribute("viewer_protocol_policy") + distribution.DefaultCacheBehaviour.ViewerProtocolPolicy = viewerProtocolPolicyAttr.AsStringValueOrDefault("allow-all", defaultCacheBlock) + } + + orderedCacheBlocks := resource.GetBlocks("ordered_cache_behavior") + for _, orderedCacheBlock := range orderedCacheBlocks { + viewerProtocolPolicyAttr := orderedCacheBlock.GetAttribute("viewer_protocol_policy") + viewerProtocolPolicyVal := viewerProtocolPolicyAttr.AsStringValueOrDefault("allow-all", orderedCacheBlock) + distribution.OrdererCacheBehaviours = append(distribution.OrdererCacheBehaviours, cloudfront.CacheBehaviour{ + Metadata: orderedCacheBlock.GetMetadata(), + ViewerProtocolPolicy: viewerProtocolPolicyVal, + }) + } + + if viewerCertBlock := resource.GetBlock("viewer_certificate"); viewerCertBlock.IsNotNil() { + distribution.ViewerCertificate = cloudfront.ViewerCertificate{ + Metadata: viewerCertBlock.GetMetadata(), + MinimumProtocolVersion: viewerCertBlock.GetAttribute("minimum_protocol_version").AsStringValueOrDefault("TLSv1", viewerCertBlock), + SSLSupportMethod: viewerCertBlock.GetAttribute("ssl_support_method").AsStringValueOrDefault("", viewerCertBlock), + CloudfrontDefaultCertificate: viewerCertBlock.GetAttribute("cloudfront_default_certificate").AsBoolValueOrDefault(false, viewerCertBlock), + } + } + + return distribution +} diff --git a/internal/adapters/terraform/aws/cloudfront/adapt_test.go b/internal/adapters/terraform/aws/cloudfront/adapt_test.go new file mode 100644 index 000000000000..a10f321cfe56 --- /dev/null +++ b/internal/adapters/terraform/aws/cloudfront/adapt_test.go @@ -0,0 +1,163 @@ +package cloudfront + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptDistribution(t *testing.T) { + tests := []struct { + name string + terraform string + expected cloudfront.Distribution + }{ + { + name: "configured", + terraform: ` + resource "aws_cloudfront_distribution" "example" { + logging_config { + bucket = "mylogs.s3.amazonaws.com" + } + + web_acl_id = "waf_id" + + default_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + } + + ordered_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + } + + viewer_certificate { + cloudfront_default_certificate = true + minimum_protocol_version = "TLSv1.2_2021" + ssl_support_method = "sni-only" + } + } +`, + expected: cloudfront.Distribution{ + Metadata: defsecTypes.NewTestMetadata(), + WAFID: defsecTypes.String("waf_id", defsecTypes.NewTestMetadata()), + Logging: cloudfront.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Bucket: defsecTypes.String("mylogs.s3.amazonaws.com", defsecTypes.NewTestMetadata()), + }, + DefaultCacheBehaviour: cloudfront.CacheBehaviour{ + Metadata: defsecTypes.NewTestMetadata(), + ViewerProtocolPolicy: defsecTypes.String("redirect-to-https", defsecTypes.NewTestMetadata()), + }, + OrdererCacheBehaviours: []cloudfront.CacheBehaviour{ + { + Metadata: defsecTypes.NewTestMetadata(), + ViewerProtocolPolicy: defsecTypes.String("redirect-to-https", defsecTypes.NewTestMetadata()), + }, + }, + ViewerCertificate: cloudfront.ViewerCertificate{ + Metadata: defsecTypes.NewTestMetadata(), + MinimumProtocolVersion: defsecTypes.String("TLSv1.2_2021", defsecTypes.NewTestMetadata()), + CloudfrontDefaultCertificate: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + SSLSupportMethod: defsecTypes.String("sni-only", defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_cloudfront_distribution" "example" { + } +`, + expected: cloudfront.Distribution{ + Metadata: defsecTypes.NewTestMetadata(), + WAFID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Logging: cloudfront.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Bucket: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + DefaultCacheBehaviour: cloudfront.CacheBehaviour{ + Metadata: defsecTypes.NewTestMetadata(), + ViewerProtocolPolicy: defsecTypes.String("allow-all", defsecTypes.NewTestMetadata()), + }, + + ViewerCertificate: cloudfront.ViewerCertificate{ + Metadata: defsecTypes.NewTestMetadata(), + MinimumProtocolVersion: defsecTypes.String("TLSv1", defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDistribution(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_cloudfront_distribution" "example" { + logging_config { + bucket = "mylogs.s3.amazonaws.com" + } + + web_acl_id = "waf_id" + + default_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + } + + ordered_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + } + + viewer_certificate { + cloudfront_default_certificate = true + minimum_protocol_version = "TLSv1.2_2021" + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Distributions, 1) + distribution := adapted.Distributions[0] + + assert.Equal(t, 2, distribution.Metadata.Range().GetStartLine()) + assert.Equal(t, 21, distribution.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, distribution.Logging.Metadata.Range().GetStartLine()) + assert.Equal(t, 5, distribution.Logging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 7, distribution.WAFID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, distribution.WAFID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 9, distribution.DefaultCacheBehaviour.Metadata.Range().GetStartLine()) + assert.Equal(t, 11, distribution.DefaultCacheBehaviour.Metadata.Range().GetEndLine()) + + assert.Equal(t, 10, distribution.DefaultCacheBehaviour.ViewerProtocolPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, distribution.DefaultCacheBehaviour.ViewerProtocolPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, distribution.OrdererCacheBehaviours[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 15, distribution.OrdererCacheBehaviours[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, distribution.OrdererCacheBehaviours[0].ViewerProtocolPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, distribution.OrdererCacheBehaviours[0].ViewerProtocolPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, distribution.ViewerCertificate.Metadata.Range().GetStartLine()) + assert.Equal(t, 20, distribution.ViewerCertificate.Metadata.Range().GetEndLine()) + + assert.Equal(t, 19, distribution.ViewerCertificate.MinimumProtocolVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 19, distribution.ViewerCertificate.MinimumProtocolVersion.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/cloudtrail/adapt.go b/internal/adapters/terraform/aws/cloudtrail/adapt.go new file mode 100644 index 000000000000..0a9e8b0d5828 --- /dev/null +++ b/internal/adapters/terraform/aws/cloudtrail/adapt.go @@ -0,0 +1,67 @@ +package cloudtrail + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) cloudtrail.CloudTrail { + return cloudtrail.CloudTrail{ + Trails: adaptTrails(modules), + } +} + +func adaptTrails(modules terraform.Modules) []cloudtrail.Trail { + var trails []cloudtrail.Trail + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_cloudtrail") { + trails = append(trails, adaptTrail(resource)) + } + } + return trails +} + +func adaptTrail(resource *terraform.Block) cloudtrail.Trail { + nameAttr := resource.GetAttribute("name") + nameVal := nameAttr.AsStringValueOrDefault("", resource) + + enableLogFileValidationAttr := resource.GetAttribute("enable_log_file_validation") + enableLogFileValidationVal := enableLogFileValidationAttr.AsBoolValueOrDefault(false, resource) + + isMultiRegionAttr := resource.GetAttribute("is_multi_region_trail") + isMultiRegionVal := isMultiRegionAttr.AsBoolValueOrDefault(false, resource) + + KMSKeyIDAttr := resource.GetAttribute("kms_key_id") + KMSKeyIDVal := KMSKeyIDAttr.AsStringValueOrDefault("", resource) + + var selectors []cloudtrail.EventSelector + for _, selBlock := range resource.GetBlocks("event_selector") { + var resources []cloudtrail.DataResource + for _, resBlock := range selBlock.GetBlocks("data_resource") { + resources = append(resources, cloudtrail.DataResource{ + Metadata: resBlock.GetMetadata(), + Type: resBlock.GetAttribute("type").AsStringValueOrDefault("", resBlock), + Values: resBlock.GetAttribute("values").AsStringValues(), + }) + } + selector := cloudtrail.EventSelector{ + Metadata: selBlock.GetMetadata(), + DataResources: resources, + ReadWriteType: selBlock.GetAttribute("read_write_type").AsStringValueOrDefault("All", selBlock), + } + selectors = append(selectors, selector) + } + + return cloudtrail.Trail{ + Metadata: resource.GetMetadata(), + Name: nameVal, + EnableLogFileValidation: enableLogFileValidationVal, + IsMultiRegion: isMultiRegionVal, + KMSKeyID: KMSKeyIDVal, + CloudWatchLogsLogGroupArn: resource.GetAttribute("cloud_watch_logs_group_arn").AsStringValueOrDefault("", resource), + IsLogging: resource.GetAttribute("enable_logging").AsBoolValueOrDefault(true, resource), + BucketName: resource.GetAttribute("s3_bucket_name").AsStringValueOrDefault("", resource), + EventSelectors: selectors, + } +} diff --git a/internal/adapters/terraform/aws/cloudtrail/adapt_test.go b/internal/adapters/terraform/aws/cloudtrail/adapt_test.go new file mode 100644 index 000000000000..8a1df4d136e5 --- /dev/null +++ b/internal/adapters/terraform/aws/cloudtrail/adapt_test.go @@ -0,0 +1,106 @@ +package cloudtrail + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptTrail(t *testing.T) { + tests := []struct { + name string + terraform string + expected cloudtrail.Trail + }{ + { + name: "configured", + terraform: ` + resource "aws_cloudtrail" "example" { + name = "example" + is_multi_region_trail = true + + enable_log_file_validation = true + kms_key_id = "kms-key" + s3_bucket_name = "abcdefgh" + cloud_watch_logs_group_arn = "abc" + enable_logging = false + } +`, + expected: cloudtrail.Trail{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("example", defsecTypes.NewTestMetadata()), + EnableLogFileValidation: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + IsMultiRegion: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms-key", defsecTypes.NewTestMetadata()), + CloudWatchLogsLogGroupArn: defsecTypes.String("abc", defsecTypes.NewTestMetadata()), + IsLogging: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + BucketName: defsecTypes.String("abcdefgh", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_cloudtrail" "example" { + } +`, + expected: cloudtrail.Trail{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EnableLogFileValidation: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + IsMultiRegion: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + BucketName: defsecTypes.String("", defsecTypes.NewTestMetadata()), + CloudWatchLogsLogGroupArn: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IsLogging: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptTrail(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_cloudtrail" "example" { + name = "example" + is_multi_region_trail = true + + enable_log_file_validation = true + kms_key_id = "kms-key" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Trails, 1) + trail := adapted.Trails[0] + + assert.Equal(t, 2, trail.Metadata.Range().GetStartLine()) + assert.Equal(t, 8, trail.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, trail.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, trail.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, trail.IsMultiRegion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, trail.IsMultiRegion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, trail.EnableLogFileValidation.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, trail.EnableLogFileValidation.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, trail.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, trail.KMSKeyID.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/cloudwatch/adapt.go b/internal/adapters/terraform/aws/cloudwatch/adapt.go new file mode 100644 index 000000000000..3e3a378e0b8d --- /dev/null +++ b/internal/adapters/terraform/aws/cloudwatch/adapt.go @@ -0,0 +1,47 @@ +package cloudwatch + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) cloudwatch.CloudWatch { + return cloudwatch.CloudWatch{ + LogGroups: adaptLogGroups(modules), + } +} + +func adaptLogGroups(modules terraform.Modules) []cloudwatch.LogGroup { + var logGroups []cloudwatch.LogGroup + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_cloudwatch_log_group") { + logGroups = append(logGroups, adaptLogGroup(resource, module)) + } + } + return logGroups +} + +func adaptLogGroup(resource *terraform.Block, module *terraform.Module) cloudwatch.LogGroup { + nameAttr := resource.GetAttribute("name") + nameVal := nameAttr.AsStringValueOrDefault("", resource) + + KMSKeyIDAttr := resource.GetAttribute("kms_key_id") + KMSKeyIDVal := KMSKeyIDAttr.AsStringValueOrDefault("", resource) + + if keyBlock, err := module.GetReferencedBlock(KMSKeyIDAttr, resource); err == nil { + KMSKeyIDVal = types.String(keyBlock.FullName(), keyBlock.GetMetadata()) + } + + retentionInDaysAttr := resource.GetAttribute("retention_in_days") + retentionInDaysVal := retentionInDaysAttr.AsIntValueOrDefault(0, resource) + + return cloudwatch.LogGroup{ + Metadata: resource.GetMetadata(), + Arn: types.StringDefault("", resource.GetMetadata()), + Name: nameVal, + KMSKeyID: KMSKeyIDVal, + RetentionInDays: retentionInDaysVal, + MetricFilters: nil, + } +} diff --git a/internal/adapters/terraform/aws/cloudwatch/adapt_test.go b/internal/adapters/terraform/aws/cloudwatch/adapt_test.go new file mode 100644 index 000000000000..6272585b45d7 --- /dev/null +++ b/internal/adapters/terraform/aws/cloudwatch/adapt_test.go @@ -0,0 +1,114 @@ +package cloudwatch + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptLogGroups(t *testing.T) { + tests := []struct { + name string + terraform string + expected []cloudwatch.LogGroup + }{ + { + name: "key referencing block", + terraform: ` + resource "aws_cloudwatch_log_group" "my-group" { + name = "my-group" + kms_key_id = aws_kms_key.log_key.arn + } + + resource "aws_kms_key" "log_key" { + } +`, + expected: []cloudwatch.LogGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Arn: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Name: defsecTypes.String("my-group", defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("aws_kms_key.log_key", defsecTypes.NewTestMetadata()), + RetentionInDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + MetricFilters: nil, + }, + }, + }, + { + name: "key as string", + terraform: ` + resource "aws_cloudwatch_log_group" "my-group" { + name = "my-group" + kms_key_id = "key-as-string" + } +`, + expected: []cloudwatch.LogGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Arn: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Name: defsecTypes.String("my-group", defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("key-as-string", defsecTypes.NewTestMetadata()), + RetentionInDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "missing key", + terraform: ` + resource "aws_cloudwatch_log_group" "my-group" { + name = "my-group" + retention_in_days = 3 + } +`, + expected: []cloudwatch.LogGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Arn: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Name: defsecTypes.String("my-group", defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + RetentionInDays: defsecTypes.Int(3, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptLogGroups(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_cloudwatch_log_group" "my-group" { + name = "my-group" + kms_key_id = aws_kms_key.log_key.arn + retention_in_days = 3 + + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + require.Len(t, adapted.LogGroups, 1) + logGroup := adapted.LogGroups[0] + + assert.Equal(t, 3, logGroup.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, logGroup.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, logGroup.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, logGroup.KMSKeyID.GetMetadata().Range().GetStartLine()) + + assert.Equal(t, 5, logGroup.RetentionInDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, logGroup.RetentionInDays.GetMetadata().Range().GetStartLine()) +} diff --git a/internal/adapters/terraform/aws/codebuild/adapt.go b/internal/adapters/terraform/aws/codebuild/adapt.go new file mode 100644 index 000000000000..7870ff26eae0 --- /dev/null +++ b/internal/adapters/terraform/aws/codebuild/adapt.go @@ -0,0 +1,66 @@ +package codebuild + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) codebuild.CodeBuild { + return codebuild.CodeBuild{ + Projects: adaptProjects(modules), + } +} + +func adaptProjects(modules terraform.Modules) []codebuild.Project { + var projects []codebuild.Project + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_codebuild_project") { + projects = append(projects, adaptProject(resource)) + } + } + return projects +} + +func adaptProject(resource *terraform.Block) codebuild.Project { + + project := codebuild.Project{ + Metadata: resource.GetMetadata(), + ArtifactSettings: codebuild.ArtifactSettings{ + Metadata: resource.GetMetadata(), + EncryptionEnabled: types.BoolDefault(true, resource.GetMetadata()), + }, + SecondaryArtifactSettings: nil, + } + + var hasArtifacts bool + + if artifactsBlock := resource.GetBlock("artifacts"); artifactsBlock.IsNotNil() { + project.ArtifactSettings.Metadata = artifactsBlock.GetMetadata() + typeAttr := artifactsBlock.GetAttribute("type") + encryptionDisabledAttr := artifactsBlock.GetAttribute("encryption_disabled") + hasArtifacts = typeAttr.NotEqual("NO_ARTIFACTS") + if encryptionDisabledAttr.IsTrue() && hasArtifacts { + project.ArtifactSettings.EncryptionEnabled = types.Bool(false, artifactsBlock.GetMetadata()) + } else { + project.ArtifactSettings.EncryptionEnabled = types.Bool(true, artifactsBlock.GetMetadata()) + } + } + + secondaryArtifactBlocks := resource.GetBlocks("secondary_artifacts") + for _, secondaryArtifactBlock := range secondaryArtifactBlocks { + + secondaryEncryptionEnabled := types.BoolDefault(true, secondaryArtifactBlock.GetMetadata()) + secondaryEncryptionDisabledAttr := secondaryArtifactBlock.GetAttribute("encryption_disabled") + if secondaryEncryptionDisabledAttr.IsTrue() && hasArtifacts { + secondaryEncryptionEnabled = types.Bool(false, secondaryArtifactBlock.GetMetadata()) + } + + project.SecondaryArtifactSettings = append(project.SecondaryArtifactSettings, codebuild.ArtifactSettings{ + Metadata: secondaryArtifactBlock.GetMetadata(), + EncryptionEnabled: secondaryEncryptionEnabled, + }) + } + + return project +} diff --git a/internal/adapters/terraform/aws/codebuild/adapt_test.go b/internal/adapters/terraform/aws/codebuild/adapt_test.go new file mode 100644 index 000000000000..7c55ab0e0f11 --- /dev/null +++ b/internal/adapters/terraform/aws/codebuild/adapt_test.go @@ -0,0 +1,116 @@ +package codebuild + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptProject(t *testing.T) { + tests := []struct { + name string + terraform string + expected codebuild.Project + }{ + { + name: "configured", + terraform: ` + resource "aws_codebuild_project" "codebuild" { + + artifacts { + encryption_disabled = false + } + + secondary_artifacts { + encryption_disabled = false + } + secondary_artifacts { + encryption_disabled = true + } + } +`, + expected: codebuild.Project{ + Metadata: defsecTypes.NewTestMetadata(), + ArtifactSettings: codebuild.ArtifactSettings{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptionEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + SecondaryArtifactSettings: []codebuild.ArtifactSettings{ + { + Metadata: defsecTypes.NewTestMetadata(), + EncryptionEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + { + Metadata: defsecTypes.NewTestMetadata(), + EncryptionEnabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + { + name: "defaults - encryption enabled", + terraform: ` + resource "aws_codebuild_project" "codebuild" { + } +`, + expected: codebuild.Project{ + Metadata: defsecTypes.NewTestMetadata(), + ArtifactSettings: codebuild.ArtifactSettings{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptionEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptProject(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_codebuild_project" "codebuild" { + artifacts { + encryption_disabled = false + } + + secondary_artifacts { + encryption_disabled = false + } + + secondary_artifacts { + encryption_disabled = true + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Projects, 1) + project := adapted.Projects[0] + + assert.Equal(t, 2, project.Metadata.Range().GetStartLine()) + assert.Equal(t, 14, project.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, project.ArtifactSettings.Metadata.Range().GetStartLine()) + assert.Equal(t, 5, project.ArtifactSettings.Metadata.Range().GetEndLine()) + + assert.Equal(t, 7, project.SecondaryArtifactSettings[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 9, project.SecondaryArtifactSettings[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 11, project.SecondaryArtifactSettings[1].Metadata.Range().GetStartLine()) + assert.Equal(t, 13, project.SecondaryArtifactSettings[1].Metadata.Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/config/adapt.go b/internal/adapters/terraform/aws/config/adapt.go new file mode 100644 index 000000000000..608f6f741f81 --- /dev/null +++ b/internal/adapters/terraform/aws/config/adapt.go @@ -0,0 +1,33 @@ +package config + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/config" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) config.Config { + return config.Config{ + ConfigurationAggregrator: adaptConfigurationAggregrator(modules), + } +} + +func adaptConfigurationAggregrator(modules terraform.Modules) config.ConfigurationAggregrator { + configurationAggregrator := config.ConfigurationAggregrator{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + SourceAllRegions: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + } + + for _, resource := range modules.GetResourcesByType("aws_config_configuration_aggregator") { + configurationAggregrator.Metadata = resource.GetMetadata() + aggregationBlock := resource.GetFirstMatchingBlock("account_aggregation_source", "organization_aggregation_source") + if aggregationBlock.IsNil() { + configurationAggregrator.SourceAllRegions = defsecTypes.Bool(false, resource.GetMetadata()) + } else { + allRegionsAttr := aggregationBlock.GetAttribute("all_regions") + allRegionsVal := allRegionsAttr.AsBoolValueOrDefault(false, aggregationBlock) + configurationAggregrator.SourceAllRegions = allRegionsVal + } + } + return configurationAggregrator +} diff --git a/internal/adapters/terraform/aws/config/adapt_test.go b/internal/adapters/terraform/aws/config/adapt_test.go new file mode 100644 index 000000000000..d48579571d7e --- /dev/null +++ b/internal/adapters/terraform/aws/config/adapt_test.go @@ -0,0 +1,81 @@ +package config + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/config" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" +) + +func Test_adaptConfigurationAggregrator(t *testing.T) { + tests := []struct { + name string + terraform string + expected config.ConfigurationAggregrator + }{ + { + name: "configured", + terraform: ` + resource "aws_config_configuration_aggregator" "example" { + name = "example" + + account_aggregation_source { + account_ids = ["123456789012"] + all_regions = true + } + } +`, + expected: config.ConfigurationAggregrator{ + Metadata: defsecTypes.NewTestMetadata(), + SourceAllRegions: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_config_configuration_aggregator" "example" { + } +`, + expected: config.ConfigurationAggregrator{ + Metadata: defsecTypes.NewTestMetadata(), + SourceAllRegions: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptConfigurationAggregrator(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_config_configuration_aggregator" "example" { + name = "example" + + account_aggregation_source { + account_ids = ["123456789012"] + all_regions = true + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + aggregator := adapted.ConfigurationAggregrator + + assert.Equal(t, 2, aggregator.Metadata.Range().GetStartLine()) + assert.Equal(t, 9, aggregator.Metadata.Range().GetEndLine()) + + assert.Equal(t, 7, aggregator.SourceAllRegions.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, aggregator.SourceAllRegions.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/documentdb/adapt.go b/internal/adapters/terraform/aws/documentdb/adapt.go new file mode 100644 index 000000000000..104ef836d498 --- /dev/null +++ b/internal/adapters/terraform/aws/documentdb/adapt.go @@ -0,0 +1,63 @@ +package documentdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) documentdb.DocumentDB { + return documentdb.DocumentDB{ + Clusters: adaptClusters(modules), + } +} + +func adaptClusters(modules terraform.Modules) []documentdb.Cluster { + var clusters []documentdb.Cluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_docdb_cluster") { + clusters = append(clusters, adaptCluster(resource, module)) + } + } + return clusters +} + +func adaptCluster(resource *terraform.Block, module *terraform.Module) documentdb.Cluster { + identifierAttr := resource.GetAttribute("cluster_identifier") + identifierVal := identifierAttr.AsStringValueOrDefault("", resource) + + var enabledLogExports []types.StringValue + var instances []documentdb.Instance + + enabledLogExportsAttr := resource.GetAttribute("enabled_cloudwatch_logs_exports") + for _, logExport := range enabledLogExportsAttr.AsStringValues() { + enabledLogExports = append(enabledLogExports, logExport) + } + + instancesRes := module.GetReferencingResources(resource, "aws_docdb_cluster_instance", "cluster_identifier") + for _, instanceRes := range instancesRes { + keyIDAttr := instanceRes.GetAttribute("kms_key_id") + keyIDVal := keyIDAttr.AsStringValueOrDefault("", instanceRes) + + instances = append(instances, documentdb.Instance{ + Metadata: instanceRes.GetMetadata(), + KMSKeyID: keyIDVal, + }) + } + + storageEncryptedAttr := resource.GetAttribute("storage_encrypted") + storageEncryptedVal := storageEncryptedAttr.AsBoolValueOrDefault(false, resource) + + KMSKeyIDAttr := resource.GetAttribute("kms_key_id") + KMSKeyIDVal := KMSKeyIDAttr.AsStringValueOrDefault("", resource) + + return documentdb.Cluster{ + Metadata: resource.GetMetadata(), + Identifier: identifierVal, + EnabledLogExports: enabledLogExports, + BackupRetentionPeriod: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(0, resource), + Instances: instances, + StorageEncrypted: storageEncryptedVal, + KMSKeyID: KMSKeyIDVal, + } +} diff --git a/internal/adapters/terraform/aws/documentdb/adapt_test.go b/internal/adapters/terraform/aws/documentdb/adapt_test.go new file mode 100644 index 000000000000..359cd0be9da3 --- /dev/null +++ b/internal/adapters/terraform/aws/documentdb/adapt_test.go @@ -0,0 +1,125 @@ +package documentdb + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected documentdb.Cluster + }{ + { + name: "configured", + terraform: ` + resource "aws_docdb_cluster" "docdb" { + cluster_identifier = "my-docdb-cluster" + kms_key_id = "kms-key" + enabled_cloudwatch_logs_exports = "audit" + storage_encrypted = true + } + + resource "aws_docdb_cluster_instance" "cluster_instances" { + count = 1 + identifier = "my-docdb-cluster" + cluster_identifier = aws_docdb_cluster.docdb.id + kms_key_id = "kms-key#1" + } +`, + expected: documentdb.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + Identifier: defsecTypes.String("my-docdb-cluster", defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms-key", defsecTypes.NewTestMetadata()), + EnabledLogExports: []defsecTypes.StringValue{ + defsecTypes.String("audit", defsecTypes.NewTestMetadata()), + }, + Instances: []documentdb.Instance{ + { + Metadata: defsecTypes.NewTestMetadata(), + KMSKeyID: defsecTypes.String("kms-key#1", defsecTypes.NewTestMetadata()), + }, + }, + StorageEncrypted: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_docdb_cluster" "docdb" { + } +`, + expected: documentdb.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + Identifier: defsecTypes.String("", defsecTypes.NewTestMetadata()), + StorageEncrypted: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0], modules[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_docdb_cluster" "docdb" { + cluster_identifier = "my-docdb-cluster" + kms_key_id = "kms-key" + enabled_cloudwatch_logs_exports = "audit" + storage_encrypted = true + } + + resource "aws_docdb_cluster_instance" "cluster_instances" { + count = 1 + identifier = "my-docdb-cluster" + cluster_identifier = aws_docdb_cluster.docdb.id + kms_key_id = "kms-key" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + require.Len(t, adapted.Clusters[0].Instances, 1) + + cluster := adapted.Clusters[0] + instance := cluster.Instances[0] + + assert.Equal(t, 2, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 7, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, cluster.Identifier.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, cluster.Identifier.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, cluster.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, cluster.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, cluster.EnabledLogExports[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, cluster.EnabledLogExports[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, cluster.StorageEncrypted.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, cluster.StorageEncrypted.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 9, instance.Metadata.Range().GetStartLine()) + assert.Equal(t, 14, instance.Metadata.Range().GetEndLine()) + + assert.Equal(t, 13, instance.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, instance.KMSKeyID.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/dynamodb/adapt.go b/internal/adapters/terraform/aws/dynamodb/adapt.go new file mode 100644 index 000000000000..a09ffcfb742d --- /dev/null +++ b/internal/adapters/terraform/aws/dynamodb/adapt.go @@ -0,0 +1,94 @@ +package dynamodb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) dynamodb.DynamoDB { + return dynamodb.DynamoDB{ + DAXClusters: adaptClusters(modules), + Tables: adaptTables(modules), + } +} + +func adaptClusters(modules terraform.Modules) []dynamodb.DAXCluster { + var clusters []dynamodb.DAXCluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_dax_cluster") { + clusters = append(clusters, adaptCluster(resource, module)) + } + } + return clusters +} + +func adaptTables(modules terraform.Modules) []dynamodb.Table { + var tables []dynamodb.Table + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_dynamodb_table") { + tables = append(tables, adaptTable(resource, module)) + } + } + return tables +} + +func adaptCluster(resource *terraform.Block, module *terraform.Module) dynamodb.DAXCluster { + + cluster := dynamodb.DAXCluster{ + Metadata: resource.GetMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + KMSKeyID: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + PointInTimeRecovery: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if ssEncryptionBlock := resource.GetBlock("server_side_encryption"); ssEncryptionBlock.IsNotNil() { + cluster.ServerSideEncryption.Metadata = ssEncryptionBlock.GetMetadata() + enabledAttr := ssEncryptionBlock.GetAttribute("enabled") + cluster.ServerSideEncryption.Enabled = enabledAttr.AsBoolValueOrDefault(false, ssEncryptionBlock) + } + + if recoveryBlock := resource.GetBlock("point_in_time_recovery"); recoveryBlock.IsNotNil() { + recoveryEnabledAttr := recoveryBlock.GetAttribute("enabled") + cluster.PointInTimeRecovery = recoveryEnabledAttr.AsBoolValueOrDefault(false, recoveryBlock) + } + + return cluster +} + +func adaptTable(resource *terraform.Block, module *terraform.Module) dynamodb.Table { + + table := dynamodb.Table{ + Metadata: resource.GetMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + KMSKeyID: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + PointInTimeRecovery: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if ssEncryptionBlock := resource.GetBlock("server_side_encryption"); ssEncryptionBlock.IsNotNil() { + table.ServerSideEncryption.Metadata = ssEncryptionBlock.GetMetadata() + enabledAttr := ssEncryptionBlock.GetAttribute("enabled") + table.ServerSideEncryption.Enabled = enabledAttr.AsBoolValueOrDefault(false, ssEncryptionBlock) + + kmsKeyIdAttr := ssEncryptionBlock.GetAttribute("kms_key_arn") + table.ServerSideEncryption.KMSKeyID = kmsKeyIdAttr.AsStringValueOrDefault("alias/aws/dynamodb", ssEncryptionBlock) + + kmsBlock, err := module.GetReferencedBlock(kmsKeyIdAttr, resource) + if err == nil && kmsBlock.IsNotNil() { + table.ServerSideEncryption.KMSKeyID = defsecTypes.String(kmsBlock.FullName(), kmsBlock.GetMetadata()) + } + } + + if recoveryBlock := resource.GetBlock("point_in_time_recovery"); recoveryBlock.IsNotNil() { + recoveryEnabledAttr := recoveryBlock.GetAttribute("enabled") + table.PointInTimeRecovery = recoveryEnabledAttr.AsBoolValueOrDefault(false, recoveryBlock) + } + + return table +} diff --git a/internal/adapters/terraform/aws/dynamodb/adapt_test.go b/internal/adapters/terraform/aws/dynamodb/adapt_test.go new file mode 100644 index 000000000000..05f7e126c507 --- /dev/null +++ b/internal/adapters/terraform/aws/dynamodb/adapt_test.go @@ -0,0 +1,176 @@ +package dynamodb + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected dynamodb.DAXCluster + }{ + { + name: "cluster", + terraform: ` + resource "aws_dax_cluster" "example" { + server_side_encryption { + enabled = true + } + } +`, + expected: dynamodb.DAXCluster{ + Metadata: defsecTypes.NewTestMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + PointInTimeRecovery: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0], modules[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptTable(t *testing.T) { + tests := []struct { + name string + terraform string + expected dynamodb.Table + }{ + { + name: "table", + terraform: ` + resource "aws_dynamodb_table" "example" { + name = "example" + + server_side_encryption { + enabled = true + kms_key_arn = "key-string" + } + + point_in_time_recovery { + enabled = true + } + } +`, + expected: dynamodb.Table{ + Metadata: defsecTypes.NewTestMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("key-string", defsecTypes.NewTestMetadata()), + }, + PointInTimeRecovery: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "table no kms", + terraform: ` + resource "aws_dax_cluster" "example" { + server_side_encryption { + enabled = true + } + } +`, + expected: dynamodb.Table{ + Metadata: defsecTypes.NewTestMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("alias/aws/dynamodb", defsecTypes.NewTestMetadata()), + }, + PointInTimeRecovery: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "reference key", + terraform: ` + resource "aws_dynamodb_table" "example" { + name = "example" + + server_side_encryption { + enabled = true + kms_key_arn = aws_kms_key.a.arn + } + } + + resource "aws_kms_key" "a" { + } +`, + expected: dynamodb.Table{ + Metadata: defsecTypes.NewTestMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("aws_kms_key.a", defsecTypes.NewTestMetadata()), + }, + PointInTimeRecovery: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptTable(modules.GetBlocks()[0], modules[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_dynamodb_table" "example" { + name = "example" + + server_side_encryption { + enabled = true + kms_key_arn = "key-string" + } + + point_in_time_recovery { + enabled = true + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.DAXClusters, 0) + require.Len(t, adapted.Tables, 1) + table := adapted.Tables[0] + + assert.Equal(t, 2, table.Metadata.Range().GetStartLine()) + assert.Equal(t, 13, table.Metadata.Range().GetEndLine()) + + assert.Equal(t, 5, table.ServerSideEncryption.Metadata.Range().GetStartLine()) + assert.Equal(t, 8, table.ServerSideEncryption.Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, table.ServerSideEncryption.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, table.ServerSideEncryption.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, table.ServerSideEncryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, table.ServerSideEncryption.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, table.PointInTimeRecovery.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, table.PointInTimeRecovery.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/ec2/adapt.go b/internal/adapters/terraform/aws/ec2/adapt.go new file mode 100644 index 000000000000..3a339f1f93b6 --- /dev/null +++ b/internal/adapters/terraform/aws/ec2/adapt.go @@ -0,0 +1,102 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) ec2.EC2 { + + naclAdapter := naclAdapter{naclRuleIDs: modules.GetChildResourceIDMapByType("aws_network_acl_rule")} + sgAdapter := sgAdapter{sgRuleIDs: modules.GetChildResourceIDMapByType("aws_security_group_rule")} + + return ec2.EC2{ + Instances: getInstances(modules), + VPCs: adaptVPCs(modules), + SecurityGroups: sgAdapter.adaptSecurityGroups(modules), + Subnets: adaptSubnets(modules), + NetworkACLs: naclAdapter.adaptNetworkACLs(modules), + LaunchConfigurations: adaptLaunchConfigurations(modules), + LaunchTemplates: adaptLaunchTemplates(modules), + Volumes: adaptVolumes(modules), + } +} + +func getInstances(modules terraform.Modules) []ec2.Instance { + var instances []ec2.Instance + + blocks := modules.GetResourcesByType("aws_instance") + + for _, b := range blocks { + instance := ec2.Instance{ + Metadata: b.GetMetadata(), + MetadataOptions: getMetadataOptions(b), + UserData: b.GetAttribute("user_data").AsStringValueOrDefault("", b), + } + + if launchTemplate := findRelatedLaunchTemplate(modules, b); launchTemplate != nil { + instance = launchTemplate.Instance + } + + if instance.RootBlockDevice == nil { + instance.RootBlockDevice = &ec2.BlockDevice{ + Metadata: b.GetMetadata(), + Encrypted: types.BoolDefault(false, b.GetMetadata()), + } + } + + if rootBlockDevice := b.GetBlock("root_block_device"); rootBlockDevice.IsNotNil() { + instance.RootBlockDevice = &ec2.BlockDevice{ + Metadata: rootBlockDevice.GetMetadata(), + Encrypted: rootBlockDevice.GetAttribute("encrypted").AsBoolValueOrDefault(false, b), + } + } + + for _, ebsBlock := range b.GetBlocks("ebs_block_device") { + instance.EBSBlockDevices = append(instance.EBSBlockDevices, &ec2.BlockDevice{ + Metadata: ebsBlock.GetMetadata(), + Encrypted: ebsBlock.GetAttribute("encrypted").AsBoolValueOrDefault(false, b), + }) + } + + for _, resource := range modules.GetResourcesByType("aws_ebs_encryption_by_default") { + if resource.GetAttribute("enabled").NotEqual(false) { + instance.RootBlockDevice.Encrypted = types.BoolDefault(true, resource.GetMetadata()) + for i := 0; i < len(instance.EBSBlockDevices); i++ { + ebs := instance.EBSBlockDevices[i] + ebs.Encrypted = types.BoolDefault(true, resource.GetMetadata()) + } + } + } + + instances = append(instances, instance) + } + + return instances +} + +func findRelatedLaunchTemplate(modules terraform.Modules, instanceBlock *terraform.Block) *ec2.LaunchTemplate { + launchTemplateBlock := instanceBlock.GetBlock("launch_template") + if launchTemplateBlock.IsNil() { + return nil + } + + templateRef := launchTemplateBlock.GetAttribute("name") + + if !templateRef.IsResolvable() { + templateRef = launchTemplateBlock.GetAttribute("id") + } + + if templateRef.IsString() { + for _, r := range modules.GetResourcesByType("aws_launch_template") { + templateName := r.GetAttribute("name").AsStringValueOrDefault("", r).Value() + if templateRef.Equals(r.ID()) || templateRef.Equals(templateName) { + launchTemplate := adaptLaunchTemplate(r) + return &launchTemplate + } + } + } + + return nil +} diff --git a/internal/adapters/terraform/aws/ec2/adapt_test.go b/internal/adapters/terraform/aws/ec2/adapt_test.go new file mode 100644 index 000000000000..1596eb4ad633 --- /dev/null +++ b/internal/adapters/terraform/aws/ec2/adapt_test.go @@ -0,0 +1,255 @@ +package ec2 + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected ec2.EC2 + }{ + { + name: "configured", + terraform: ` + resource "aws_instance" "example" { + ami = "ami-7f89a64f" + instance_type = "t1.micro" + + root_block_device { + encrypted = true + } + + metadata_options { + http_tokens = "required" + http_endpoint = "disabled" + } + + ebs_block_device { + encrypted = true + } + + user_data = < 0 { + orphanage := ec2.SecurityGroup{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Description: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + IngressRules: nil, + EgressRules: nil, + IsDefault: defsecTypes.BoolUnresolvable(defsecTypes.NewUnmanagedMetadata()), + VPCID: defsecTypes.StringUnresolvable(defsecTypes.NewUnmanagedMetadata()), + } + for _, sgRule := range orphanResources { + if sgRule.GetAttribute("type").Equals("ingress") { + orphanage.IngressRules = append(orphanage.IngressRules, adaptSGRule(sgRule, modules)) + } else if sgRule.GetAttribute("type").Equals("egress") { + orphanage.EgressRules = append(orphanage.EgressRules, adaptSGRule(sgRule, modules)) + } + } + securityGroups = append(securityGroups, orphanage) + } + + return securityGroups +} + +func (a *naclAdapter) adaptNetworkACLs(modules terraform.Modules) []ec2.NetworkACL { + var networkACLs []ec2.NetworkACL + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_network_acl") { + networkACLs = append(networkACLs, a.adaptNetworkACL(resource, module)) + } + } + + orphanResources := modules.GetResourceByIDs(a.naclRuleIDs.Orphans()...) + if len(orphanResources) > 0 { + orphanage := ec2.NetworkACL{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Rules: nil, + IsDefaultRule: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + } + for _, naclRule := range orphanResources { + orphanage.Rules = append(orphanage.Rules, adaptNetworkACLRule(naclRule)) + } + networkACLs = append(networkACLs, orphanage) + } + + return networkACLs +} + +func (a *sgAdapter) adaptSecurityGroup(resource *terraform.Block, module terraform.Modules) ec2.SecurityGroup { + var ingressRules []ec2.SecurityGroupRule + var egressRules []ec2.SecurityGroupRule + + descriptionAttr := resource.GetAttribute("description") + descriptionVal := descriptionAttr.AsStringValueOrDefault("Managed by Terraform", resource) + + ingressBlocks := resource.GetBlocks("ingress") + for _, ingressBlock := range ingressBlocks { + ingressRules = append(ingressRules, adaptSGRule(ingressBlock, module)) + } + + egressBlocks := resource.GetBlocks("egress") + for _, egressBlock := range egressBlocks { + egressRules = append(egressRules, adaptSGRule(egressBlock, module)) + } + + rulesBlocks := module.GetReferencingResources(resource, "aws_security_group_rule", "security_group_id") + for _, ruleBlock := range rulesBlocks { + a.sgRuleIDs.Resolve(ruleBlock.ID()) + if ruleBlock.GetAttribute("type").Equals("ingress") { + ingressRules = append(ingressRules, adaptSGRule(ruleBlock, module)) + } else if ruleBlock.GetAttribute("type").Equals("egress") { + egressRules = append(egressRules, adaptSGRule(ruleBlock, module)) + } + } + + return ec2.SecurityGroup{ + Metadata: resource.GetMetadata(), + Description: descriptionVal, + IngressRules: ingressRules, + EgressRules: egressRules, + IsDefault: defsecTypes.Bool(false, defsecTypes.NewUnmanagedMetadata()), + VPCID: resource.GetAttribute("vpc_id").AsStringValueOrDefault("", resource), + } +} + +func adaptSGRule(resource *terraform.Block, modules terraform.Modules) ec2.SecurityGroupRule { + ruleDescAttr := resource.GetAttribute("description") + ruleDescVal := ruleDescAttr.AsStringValueOrDefault("", resource) + + var cidrs []defsecTypes.StringValue + + cidrBlocks := resource.GetAttribute("cidr_blocks") + ipv6cidrBlocks := resource.GetAttribute("ipv6_cidr_blocks") + varBlocks := modules.GetBlocks().OfType("variable") + + for _, vb := range varBlocks { + if cidrBlocks.IsNotNil() && cidrBlocks.ReferencesBlock(vb) { + cidrBlocks = vb.GetAttribute("default") + } + if ipv6cidrBlocks.IsNotNil() && ipv6cidrBlocks.ReferencesBlock(vb) { + ipv6cidrBlocks = vb.GetAttribute("default") + } + } + + if cidrBlocks.IsNotNil() { + cidrs = cidrBlocks.AsStringValues() + } + + if ipv6cidrBlocks.IsNotNil() { + cidrs = append(cidrs, ipv6cidrBlocks.AsStringValues()...) + } + + return ec2.SecurityGroupRule{ + Metadata: resource.GetMetadata(), + Description: ruleDescVal, + CIDRs: cidrs, + } +} + +func (a *naclAdapter) adaptNetworkACL(resource *terraform.Block, module *terraform.Module) ec2.NetworkACL { + var networkRules []ec2.NetworkACLRule + rulesBlocks := module.GetReferencingResources(resource, "aws_network_acl_rule", "network_acl_id") + for _, ruleBlock := range rulesBlocks { + a.naclRuleIDs.Resolve(ruleBlock.ID()) + networkRules = append(networkRules, adaptNetworkACLRule(ruleBlock)) + } + return ec2.NetworkACL{ + Metadata: resource.GetMetadata(), + Rules: networkRules, + IsDefaultRule: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } +} + +func adaptNetworkACLRule(resource *terraform.Block) ec2.NetworkACLRule { + var cidrs []defsecTypes.StringValue + + typeVal := defsecTypes.StringDefault("ingress", resource.GetMetadata()) + + egressAtrr := resource.GetAttribute("egress") + if egressAtrr.IsTrue() { + typeVal = defsecTypes.String("egress", egressAtrr.GetMetadata()) + } else if egressAtrr.IsNotNil() { + typeVal = defsecTypes.String("ingress", egressAtrr.GetMetadata()) + } + + actionAttr := resource.GetAttribute("rule_action") + actionVal := actionAttr.AsStringValueOrDefault("", resource) + + protocolAtrr := resource.GetAttribute("protocol") + protocolVal := protocolAtrr.AsStringValueOrDefault("-1", resource) + + cidrAttr := resource.GetAttribute("cidr_block") + if cidrAttr.IsNotNil() { + cidrs = append(cidrs, cidrAttr.AsStringValueOrDefault("", resource)) + } + ipv4cidrAttr := resource.GetAttribute("ipv6_cidr_block") + if ipv4cidrAttr.IsNotNil() { + cidrs = append(cidrs, ipv4cidrAttr.AsStringValueOrDefault("", resource)) + } + + return ec2.NetworkACLRule{ + Metadata: resource.GetMetadata(), + Type: typeVal, + Action: actionVal, + Protocol: protocolVal, + CIDRs: cidrs, + } +} diff --git a/internal/adapters/terraform/aws/ec2/vpc_test.go b/internal/adapters/terraform/aws/ec2/vpc_test.go new file mode 100644 index 000000000000..704c15c77404 --- /dev/null +++ b/internal/adapters/terraform/aws/ec2/vpc_test.go @@ -0,0 +1,339 @@ +package ec2 + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_AdaptVPC(t *testing.T) { + tests := []struct { + name string + terraform string + expected ec2.EC2 + }{ + { + name: "defined", + terraform: ` + resource "aws_flow_log" "this" { + vpc_id = aws_vpc.main.id + } + resource "aws_default_vpc" "default" { + tags = { + Name = "Default VPC" + } + } + + resource "aws_vpc" "main" { + cidr_block = "4.5.6.7/32" + } + + resource "aws_security_group" "example" { + name = "http" + description = "Allow inbound HTTP traffic" + + ingress { + description = "Rule #1" + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = [aws_vpc.main.cidr_block] + } + + egress { + cidr_blocks = ["1.2.3.4/32"] + } + } + + resource "aws_network_acl_rule" "example" { + egress = false + protocol = "tcp" + from_port = 22 + to_port = 22 + rule_action = "allow" + cidr_block = "10.0.0.0/16" + } + + resource "aws_security_group_rule" "example" { + type = "ingress" + description = "Rule #2" + security_group_id = aws_security_group.example.id + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = [ + "1.2.3.4/32", + "4.5.6.7/32", + ] + } +`, + expected: ec2.EC2{ + VPCs: []ec2.VPC{ + { + Metadata: defsecTypes.NewTestMetadata(), + IsDefault: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + ID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + FlowLogsEnabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + { + Metadata: defsecTypes.NewTestMetadata(), + IsDefault: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + ID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + FlowLogsEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + SecurityGroups: []ec2.SecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("Allow inbound HTTP traffic", defsecTypes.NewTestMetadata()), + IsDefault: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + VPCID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IngressRules: []ec2.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + + Description: defsecTypes.String("Rule #1", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("4.5.6.7/32", defsecTypes.NewTestMetadata()), + }, + }, + { + Metadata: defsecTypes.NewTestMetadata(), + + Description: defsecTypes.String("Rule #2", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("1.2.3.4/32", defsecTypes.NewTestMetadata()), + defsecTypes.String("4.5.6.7/32", defsecTypes.NewTestMetadata()), + }, + }, + }, + + EgressRules: []ec2.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("1.2.3.4/32", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + NetworkACLs: []ec2.NetworkACL{ + { + Metadata: defsecTypes.NewTestMetadata(), + Rules: []ec2.NetworkACLRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("ingress", defsecTypes.NewTestMetadata()), + Action: defsecTypes.String("allow", defsecTypes.NewTestMetadata()), + Protocol: defsecTypes.String("tcp", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("10.0.0.0/16", defsecTypes.NewTestMetadata()), + }, + }, + }, + IsDefaultRule: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_security_group" "example" { + ingress { + } + + egress { + } + } + + resource "aws_network_acl_rule" "example" { + } +`, + expected: ec2.EC2{ + SecurityGroups: []ec2.SecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("Managed by Terraform", defsecTypes.NewTestMetadata()), + IsDefault: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + VPCID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IngressRules: []ec2.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + + EgressRules: []ec2.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + NetworkACLs: []ec2.NetworkACL{ + { + Metadata: defsecTypes.NewTestMetadata(), + Rules: []ec2.NetworkACLRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("ingress", defsecTypes.NewTestMetadata()), + Action: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Protocol: defsecTypes.String("-1", defsecTypes.NewTestMetadata()), + }, + }, + IsDefaultRule: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + { + name: "aws_flow_log refer to locals", + terraform: ` +locals { + vpc_id = try(aws_vpc.this.id, "") +} + +resource "aws_vpc" "this" { +} + +resource "aws_flow_log" "this" { + vpc_id = local.vpc_id +} +`, + expected: ec2.EC2{ + VPCs: []ec2.VPC{ + { + Metadata: defsecTypes.NewTestMetadata(), + IsDefault: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + ID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + FlowLogsEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestVPCLines(t *testing.T) { + src := ` + resource "aws_default_vpc" "default" { + } + + resource "aws_vpc" "main" { + cidr_block = "4.5.6.7/32" + } + + resource "aws_security_group" "example" { + name = "http" + description = "Allow inbound HTTP traffic" + + ingress { + description = "HTTP from VPC" + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = [aws_vpc.main.cidr_block] + } + + egress { + cidr_blocks = ["1.2.3.4/32"] + } + } + + resource "aws_security_group_rule" "example" { + type = "ingress" + security_group_id = aws_security_group.example.id + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = [ + "1.2.3.4/32", + "4.5.6.7/32", + ] + } + + resource "aws_network_acl_rule" "example" { + egress = false + protocol = "tcp" + from_port = 22 + to_port = 22 + rule_action = "allow" + cidr_block = "10.0.0.0/16" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.VPCs, 2) + require.Len(t, adapted.SecurityGroups, 1) + require.Len(t, adapted.NetworkACLs, 1) + + defaultVPC := adapted.VPCs[0] + securityGroup := adapted.SecurityGroups[0] + networkACL := adapted.NetworkACLs[0] + + assert.Equal(t, 2, defaultVPC.Metadata.Range().GetStartLine()) + assert.Equal(t, 3, defaultVPC.Metadata.Range().GetEndLine()) + + assert.Equal(t, 9, securityGroup.Metadata.Range().GetStartLine()) + assert.Equal(t, 24, securityGroup.Metadata.Range().GetEndLine()) + + assert.Equal(t, 11, securityGroup.Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, securityGroup.Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, securityGroup.IngressRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 19, securityGroup.IngressRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, securityGroup.IngressRules[0].Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, securityGroup.IngressRules[0].Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, securityGroup.IngressRules[0].CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 18, securityGroup.IngressRules[0].CIDRs[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, securityGroup.IngressRules[1].Metadata.Range().GetStartLine()) + assert.Equal(t, 36, securityGroup.IngressRules[1].Metadata.Range().GetEndLine()) + + assert.Equal(t, 32, securityGroup.IngressRules[1].CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 35, securityGroup.IngressRules[1].CIDRs[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 21, securityGroup.EgressRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 23, securityGroup.EgressRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 22, securityGroup.EgressRules[0].CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, securityGroup.EgressRules[0].CIDRs[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 38, networkACL.Rules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 45, networkACL.Rules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 39, networkACL.Rules[0].Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 39, networkACL.Rules[0].Type.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 40, networkACL.Rules[0].Protocol.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 40, networkACL.Rules[0].Protocol.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 43, networkACL.Rules[0].Action.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 43, networkACL.Rules[0].Action.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 44, networkACL.Rules[0].CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 44, networkACL.Rules[0].CIDRs[0].GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/ecr/adapt.go b/internal/adapters/terraform/aws/ecr/adapt.go new file mode 100644 index 000000000000..83741a7e1e33 --- /dev/null +++ b/internal/adapters/terraform/aws/ecr/adapt.go @@ -0,0 +1,113 @@ +package ecr + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" + iamp "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" + "github.com/liamg/iamgo" +) + +func Adapt(modules terraform.Modules) ecr.ECR { + return ecr.ECR{ + Repositories: adaptRepositories(modules), + } +} + +func adaptRepositories(modules terraform.Modules) []ecr.Repository { + var repositories []ecr.Repository + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_ecr_repository") { + repositories = append(repositories, adaptRepository(resource, module, modules)) + } + } + return repositories +} + +func adaptRepository(resource *terraform.Block, module *terraform.Module, modules terraform.Modules) ecr.Repository { + repo := ecr.Repository{ + Metadata: resource.GetMetadata(), + ImageScanning: ecr.ImageScanning{ + Metadata: resource.GetMetadata(), + ScanOnPush: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + ImageTagsImmutable: defsecTypes.BoolDefault(false, resource.GetMetadata()), + Policies: nil, + Encryption: ecr.Encryption{ + Metadata: resource.GetMetadata(), + Type: defsecTypes.StringDefault("AES256", resource.GetMetadata()), + KMSKeyID: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + } + + if imageScanningBlock := resource.GetBlock("image_scanning_configuration"); imageScanningBlock.IsNotNil() { + repo.ImageScanning.Metadata = imageScanningBlock.GetMetadata() + scanOnPushAttr := imageScanningBlock.GetAttribute("scan_on_push") + repo.ImageScanning.ScanOnPush = scanOnPushAttr.AsBoolValueOrDefault(false, imageScanningBlock) + } + + mutabilityAttr := resource.GetAttribute("image_tag_mutability") + if mutabilityAttr.Equals("IMMUTABLE") { + repo.ImageTagsImmutable = defsecTypes.Bool(true, mutabilityAttr.GetMetadata()) + } else if mutabilityAttr.Equals("MUTABLE") { + repo.ImageTagsImmutable = defsecTypes.Bool(false, mutabilityAttr.GetMetadata()) + } + + policyBlocks := module.GetReferencingResources(resource, "aws_ecr_repository_policy", "repository") + for _, policyRes := range policyBlocks { + if policyAttr := policyRes.GetAttribute("policy"); policyAttr.IsString() { + + dataBlock, err := module.GetBlockByID(policyAttr.Value().AsString()) + if err != nil { + + parsed, err := iamgo.ParseString(policyAttr.Value().AsString()) + if err != nil { + continue + } + + policy := iamp.Policy{ + Metadata: policyRes.GetMetadata(), + Name: defsecTypes.StringDefault("", policyRes.GetMetadata()), + Document: iamp.Document{ + Parsed: *parsed, + Metadata: policyAttr.GetMetadata(), + }, + Builtin: defsecTypes.Bool(false, policyRes.GetMetadata()), + } + + repo.Policies = append(repo.Policies, policy) + } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := iam.ConvertTerraformDocument(modules, dataBlock); err == nil { + policy := iamp.Policy{ + Metadata: policyRes.GetMetadata(), + Name: defsecTypes.StringDefault("", policyRes.GetMetadata()), + Document: iamp.Document{ + Parsed: doc.Document, + Metadata: doc.Source.GetMetadata(), + IsOffset: true, + }, + Builtin: defsecTypes.Bool(false, policyRes.GetMetadata()), + } + repo.Policies = append(repo.Policies, policy) + } + } + } + } + + if encryptBlock := resource.GetBlock("encryption_configuration"); encryptBlock.IsNotNil() { + repo.Encryption.Metadata = encryptBlock.GetMetadata() + encryptionTypeAttr := encryptBlock.GetAttribute("encryption_type") + repo.Encryption.Type = encryptionTypeAttr.AsStringValueOrDefault("AES256", encryptBlock) + + kmsKeyAttr := encryptBlock.GetAttribute("kms_key") + repo.Encryption.KMSKeyID = kmsKeyAttr.AsStringValueOrDefault("", encryptBlock) + if kmsKeyAttr.IsResourceBlockReference("aws_kms_key") { + if keyBlock, err := module.GetReferencedBlock(kmsKeyAttr, encryptBlock); err == nil { + repo.Encryption.KMSKeyID = defsecTypes.String(keyBlock.FullName(), keyBlock.GetMetadata()) + } + } + } + + return repo +} diff --git a/internal/adapters/terraform/aws/ecr/adapt_test.go b/internal/adapters/terraform/aws/ecr/adapt_test.go new file mode 100644 index 000000000000..502708014e90 --- /dev/null +++ b/internal/adapters/terraform/aws/ecr/adapt_test.go @@ -0,0 +1,248 @@ +package ecr + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/liamg/iamgo" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptRepository(t *testing.T) { + tests := []struct { + name string + terraform string + expected ecr.Repository + }{ + { + name: "configured", + terraform: ` + resource "aws_kms_key" "ecr_kms" { + enable_key_rotation = true + } + + resource "aws_ecr_repository" "foo" { + name = "bar" + image_tag_mutability = "MUTABLE" + + image_scanning_configuration { + scan_on_push = true + } + + encryption_configuration { + encryption_type = "KMS" + kms_key = aws_kms_key.ecr_kms.key_id + } + } + + resource "aws_ecr_repository_policy" "foopolicy" { + repository = aws_ecr_repository.foo.name + + policy = < 0 { + var volumes []ecs.Volume + for _, volumeBlock := range volumeBlocks { + volumes = append(volumes, ecs.Volume{ + Metadata: volumeBlock.GetMetadata(), + EFSVolumeConfiguration: adaptEFSVolumeConfiguration(volumeBlock), + }) + } + return volumes + } + + return []ecs.Volume{} +} + +func adaptEFSVolumeConfiguration(volumeBlock *terraform.Block) ecs.EFSVolumeConfiguration { + EFSVolumeConfiguration := ecs.EFSVolumeConfiguration{ + Metadata: volumeBlock.GetMetadata(), + TransitEncryptionEnabled: types.BoolDefault(true, volumeBlock.GetMetadata()), + } + + if EFSConfigBlock := volumeBlock.GetBlock("efs_volume_configuration"); EFSConfigBlock.IsNotNil() { + EFSVolumeConfiguration.Metadata = EFSConfigBlock.GetMetadata() + transitEncryptionAttr := EFSConfigBlock.GetAttribute("transit_encryption") + EFSVolumeConfiguration.TransitEncryptionEnabled = types.Bool(transitEncryptionAttr.Equals("ENABLED"), EFSConfigBlock.GetMetadata()) + if transitEncryptionAttr.IsNotNil() { + EFSVolumeConfiguration.TransitEncryptionEnabled = types.Bool(transitEncryptionAttr.Equals("ENABLED"), transitEncryptionAttr.GetMetadata()) + } + } + + return EFSVolumeConfiguration +} diff --git a/internal/adapters/terraform/aws/ecs/adapt_test.go b/internal/adapters/terraform/aws/ecs/adapt_test.go new file mode 100644 index 000000000000..5ed13d970de3 --- /dev/null +++ b/internal/adapters/terraform/aws/ecs/adapt_test.go @@ -0,0 +1,246 @@ +package ecs + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptClusterSettings(t *testing.T) { + tests := []struct { + name string + terraform string + expected ecs.ClusterSettings + }{ + { + name: "container insights enabled", + terraform: ` + resource "aws_ecs_cluster" "example" { + name = "services-cluster" + + setting { + name = "containerInsights" + value = "enabled" + } + } +`, + expected: ecs.ClusterSettings{ + Metadata: defsecTypes.NewTestMetadata(), + ContainerInsightsEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "invalid name", + terraform: ` + resource "aws_ecs_cluster" "example" { + name = "services-cluster" + + setting { + name = "invalidName" + value = "enabled" + } + } +`, + expected: ecs.ClusterSettings{ + Metadata: defsecTypes.NewTestMetadata(), + ContainerInsightsEnabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_ecs_cluster" "example" { + } +`, + expected: ecs.ClusterSettings{ + Metadata: defsecTypes.NewTestMetadata(), + ContainerInsightsEnabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptClusterSettings(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptTaskDefinitionResource(t *testing.T) { + tests := []struct { + name string + terraform string + expected ecs.TaskDefinition + }{ + { + name: "configured", + terraform: ` + resource "aws_ecs_task_definition" "example" { + family = "service" + container_definitions = < 0 { + orphanage := elb.LoadBalancer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Type: defsecTypes.StringDefault(elb.TypeApplication, defsecTypes.NewUnmanagedMetadata()), + DropInvalidHeaderFields: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Internal: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Listeners: nil, + } + for _, listenerResource := range orphanResources { + orphanage.Listeners = append(orphanage.Listeners, adaptListener(listenerResource, "application")) + } + loadBalancers = append(loadBalancers, orphanage) + } + + return loadBalancers +} + +func (a *adapter) adaptLoadBalancer(resource *terraform.Block, module terraform.Modules) elb.LoadBalancer { + var listeners []elb.Listener + + typeAttr := resource.GetAttribute("load_balancer_type") + typeVal := typeAttr.AsStringValueOrDefault("application", resource) + + dropInvalidHeadersAttr := resource.GetAttribute("drop_invalid_header_fields") + dropInvalidHeadersVal := dropInvalidHeadersAttr.AsBoolValueOrDefault(false, resource) + + internalAttr := resource.GetAttribute("internal") + internalVal := internalAttr.AsBoolValueOrDefault(false, resource) + + listenerBlocks := module.GetReferencingResources(resource, "aws_lb_listener", "load_balancer_arn") + listenerBlocks = append(listenerBlocks, module.GetReferencingResources(resource, "aws_alb_listener", "load_balancer_arn")...) + + for _, listenerBlock := range listenerBlocks { + a.listenerIDs.Resolve(listenerBlock.ID()) + listeners = append(listeners, adaptListener(listenerBlock, typeVal.Value())) + } + return elb.LoadBalancer{ + Metadata: resource.GetMetadata(), + Type: typeVal, + DropInvalidHeaderFields: dropInvalidHeadersVal, + Internal: internalVal, + Listeners: listeners, + } +} + +func (a *adapter) adaptClassicLoadBalancer(resource *terraform.Block, module terraform.Modules) elb.LoadBalancer { + internalAttr := resource.GetAttribute("internal") + internalVal := internalAttr.AsBoolValueOrDefault(false, resource) + + return elb.LoadBalancer{ + Metadata: resource.GetMetadata(), + Type: defsecTypes.String("classic", resource.GetMetadata()), + DropInvalidHeaderFields: defsecTypes.BoolDefault(false, resource.GetMetadata()), + Internal: internalVal, + Listeners: nil, + } +} + +func adaptListener(listenerBlock *terraform.Block, typeVal string) elb.Listener { + listener := elb.Listener{ + Metadata: listenerBlock.GetMetadata(), + Protocol: defsecTypes.StringDefault("", listenerBlock.GetMetadata()), + TLSPolicy: defsecTypes.StringDefault("", listenerBlock.GetMetadata()), + DefaultActions: nil, + } + + protocolAttr := listenerBlock.GetAttribute("protocol") + if typeVal == "application" { + listener.Protocol = protocolAttr.AsStringValueOrDefault("HTTP", listenerBlock) + } + + sslPolicyAttr := listenerBlock.GetAttribute("ssl_policy") + listener.TLSPolicy = sslPolicyAttr.AsStringValueOrDefault("", listenerBlock) + + for _, defaultActionBlock := range listenerBlock.GetBlocks("default_action") { + action := elb.Action{ + Metadata: defaultActionBlock.GetMetadata(), + Type: defaultActionBlock.GetAttribute("type").AsStringValueOrDefault("", defaultActionBlock), + } + listener.DefaultActions = append(listener.DefaultActions, action) + } + + return listener +} diff --git a/internal/adapters/terraform/aws/elb/adapt_test.go b/internal/adapters/terraform/aws/elb/adapt_test.go new file mode 100644 index 000000000000..fa841476f14a --- /dev/null +++ b/internal/adapters/terraform/aws/elb/adapt_test.go @@ -0,0 +1,161 @@ +package elb + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/elb" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected elb.ELB + }{ + { + name: "configured", + terraform: ` + resource "aws_alb" "example" { + name = "good_alb" + internal = true + load_balancer_type = "application" + + access_logs { + bucket = aws_s3_bucket.lb_logs.bucket + prefix = "test-lb" + enabled = true + } + + drop_invalid_header_fields = true + } + + resource "aws_alb_listener" "example" { + load_balancer_arn = aws_alb.example.arn + protocol = "HTTPS" + ssl_policy = "ELBSecurityPolicy-TLS-1-1-2017-01" + + default_action { + type = "forward" + } + } +`, + expected: elb.ELB{ + LoadBalancers: []elb.LoadBalancer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("application", defsecTypes.NewTestMetadata()), + DropInvalidHeaderFields: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Internal: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Listeners: []elb.Listener{ + { + Metadata: defsecTypes.NewTestMetadata(), + Protocol: defsecTypes.String("HTTPS", defsecTypes.NewTestMetadata()), + TLSPolicy: defsecTypes.String("ELBSecurityPolicy-TLS-1-1-2017-01", defsecTypes.NewTestMetadata()), + DefaultActions: []elb.Action{ + { + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("forward", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_alb" "example" { + } +`, + expected: elb.ELB{ + LoadBalancers: []elb.LoadBalancer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("application", defsecTypes.NewTestMetadata()), + DropInvalidHeaderFields: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Internal: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Listeners: nil, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_alb" "example" { + name = "good_alb" + internal = true + load_balancer_type = "application" + drop_invalid_header_fields = true + + access_logs { + bucket = aws_s3_bucket.lb_logs.bucket + prefix = "test-lb" + enabled = true + } + } + + resource "aws_alb_listener" "example" { + load_balancer_arn = aws_alb.example.arn + protocol = "HTTPS" + ssl_policy = "ELBSecurityPolicy-TLS-1-1-2017-01" + + default_action { + type = "forward" + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.LoadBalancers, 1) + loadBalancer := adapted.LoadBalancers[0] + + assert.Equal(t, 2, loadBalancer.Metadata.Range().GetStartLine()) + assert.Equal(t, 13, loadBalancer.Metadata.Range().GetEndLine()) + + assert.Equal(t, 4, loadBalancer.Internal.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, loadBalancer.Internal.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, loadBalancer.Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, loadBalancer.Type.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, loadBalancer.DropInvalidHeaderFields.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, loadBalancer.DropInvalidHeaderFields.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 15, loadBalancer.Listeners[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 23, loadBalancer.Listeners[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 17, loadBalancer.Listeners[0].Protocol.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, loadBalancer.Listeners[0].Protocol.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, loadBalancer.Listeners[0].TLSPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 18, loadBalancer.Listeners[0].TLSPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 20, loadBalancer.Listeners[0].DefaultActions[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 22, loadBalancer.Listeners[0].DefaultActions[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 21, loadBalancer.Listeners[0].DefaultActions[0].Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 21, loadBalancer.Listeners[0].DefaultActions[0].Type.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/aws/emr/adapt.go b/internal/adapters/terraform/aws/emr/adapt.go new file mode 100644 index 000000000000..0e85a1023ab5 --- /dev/null +++ b/internal/adapters/terraform/aws/emr/adapt.go @@ -0,0 +1,49 @@ +package emr + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/emr" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) emr.EMR { + return emr.EMR{ + Clusters: adaptClusters(modules), + SecurityConfiguration: adaptSecurityConfigurations(modules), + } +} +func adaptClusters(modules terraform.Modules) []emr.Cluster { + var clusters []emr.Cluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_emr_cluster") { + clusters = append(clusters, adaptCluster(resource)) + } + } + return clusters +} + +func adaptCluster(resource *terraform.Block) emr.Cluster { + + return emr.Cluster{ + Metadata: resource.GetMetadata(), + } +} + +func adaptSecurityConfigurations(modules terraform.Modules) []emr.SecurityConfiguration { + var securityConfiguration []emr.SecurityConfiguration + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_emr_security_configuration") { + securityConfiguration = append(securityConfiguration, adaptSecurityConfiguration(resource)) + } + } + return securityConfiguration +} + +func adaptSecurityConfiguration(resource *terraform.Block) emr.SecurityConfiguration { + + return emr.SecurityConfiguration{ + Metadata: resource.GetMetadata(), + Name: resource.GetAttribute("name").AsStringValueOrDefault("", resource), + Configuration: resource.GetAttribute("configuration").AsStringValueOrDefault("", resource), + } + +} diff --git a/internal/adapters/terraform/aws/emr/adapt_test.go b/internal/adapters/terraform/aws/emr/adapt_test.go new file mode 100644 index 000000000000..0f1373dc7ecc --- /dev/null +++ b/internal/adapters/terraform/aws/emr/adapt_test.go @@ -0,0 +1,116 @@ +package emr + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/emr" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_adaptSecurityConfiguration(t *testing.T) { + tests := []struct { + name string + terraform string + expected emr.SecurityConfiguration + }{ + { + name: "test", + terraform: ` + resource "aws_emr_security_configuration" "foo" { + name = "emrsc_test" + configuration = < 0 { + return &iam.Document{ + Parsed: documents[0].Document, + Metadata: documents[0].Source.GetMetadata(), + IsOffset: true, + }, nil + } + + if attr.IsString() { + + dataBlock, err := modules.GetBlockById(attr.Value().AsString()) + if err != nil { + parsed, err := iamgo.Parse([]byte(unescapeVars(attr.Value().AsString()))) + if err != nil { + return nil, err + } + return &iam.Document{ + Parsed: *parsed, + Metadata: attr.GetMetadata(), + IsOffset: false, + HasRefs: len(attr.AllReferences()) > 0, + }, nil + } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := ConvertTerraformDocument(modules, dataBlock); err == nil { + return &iam.Document{ + Metadata: dataBlock.GetMetadata(), + Parsed: doc.Document, + IsOffset: true, + HasRefs: false, + }, nil + } + } + } + + return &iam.Document{ + Metadata: owner.GetMetadata(), + }, nil +} + +func unescapeVars(input string) string { + return strings.ReplaceAll(input, "&{", "${") +} + +// ConvertTerraformDocument converts a terraform data policy into an iamgo policy https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document +func ConvertTerraformDocument(modules terraform.Modules, block *terraform.Block) (*wrappedDocument, error) { + + builder := iamgo.NewPolicyBuilder() + + if sourceAttr := block.GetAttribute("source_json"); sourceAttr.IsString() { + doc, err := iamgo.ParseString(sourceAttr.Value().AsString()) + if err != nil { + return nil, err + } + builder = iamgo.PolicyBuilderFromDocument(*doc) + } + + if sourceDocumentsAttr := block.GetAttribute("source_policy_documents"); sourceDocumentsAttr.IsIterable() { + docs := findAllPolicies(modules, block, sourceDocumentsAttr) + for _, doc := range docs { + statements, _ := doc.Document.Statements() + for _, statement := range statements { + builder.WithStatement(statement) + } + } + } + + if idAttr := block.GetAttribute("policy_id"); idAttr.IsString() { + r := idAttr.GetMetadata().Range() + builder.WithId(idAttr.Value().AsString(), r.GetStartLine(), r.GetEndLine()) + } + + if versionAttr := block.GetAttribute("version"); versionAttr.IsString() { + r := versionAttr.GetMetadata().Range() + builder.WithVersion(versionAttr.Value().AsString(), r.GetStartLine(), r.GetEndLine()) + } + + for _, statementBlock := range block.GetBlocks("statement") { + statement := parseStatement(statementBlock) + builder.WithStatement(statement, statement.Range().StartLine, statement.Range().EndLine) + } + + if overrideDocumentsAttr := block.GetAttribute("override_policy_documents"); overrideDocumentsAttr.IsIterable() { + docs := findAllPolicies(modules, block, overrideDocumentsAttr) + for _, doc := range docs { + statements, _ := doc.Document.Statements() + for _, statement := range statements { + builder.WithStatement(statement, statement.Range().StartLine, statement.Range().EndLine) + } + } + } + + return &wrappedDocument{Document: builder.Build(), Source: block}, nil +} + +// nolint +func parseStatement(statementBlock *terraform.Block) iamgo.Statement { + + metadata := statementBlock.GetMetadata() + + builder := iamgo.NewStatementBuilder() + builder.WithRange(metadata.Range().GetStartLine(), metadata.Range().GetEndLine()) + + if sidAttr := statementBlock.GetAttribute("sid"); sidAttr.IsString() { + r := sidAttr.GetMetadata().Range() + builder.WithSid(sidAttr.Value().AsString(), r.GetStartLine(), r.GetEndLine()) + } + if actionsAttr := statementBlock.GetAttribute("actions"); actionsAttr.IsIterable() { + r := actionsAttr.GetMetadata().Range() + values := actionsAttr.AsStringValues().AsStrings() + builder.WithActions(values, r.GetStartLine(), r.GetEndLine()) + } + if notActionsAttr := statementBlock.GetAttribute("not_actions"); notActionsAttr.IsIterable() { + r := notActionsAttr.GetMetadata().Range() + values := notActionsAttr.AsStringValues().AsStrings() + builder.WithNotActions(values, r.GetStartLine(), r.GetEndLine()) + } + if resourcesAttr := statementBlock.GetAttribute("resources"); resourcesAttr.IsIterable() { + r := resourcesAttr.GetMetadata().Range() + values := resourcesAttr.AsStringValues().AsStrings() + builder.WithResources(values, r.GetStartLine(), r.GetEndLine()) + } + if notResourcesAttr := statementBlock.GetAttribute("not_resources"); notResourcesAttr.IsIterable() { + r := notResourcesAttr.GetMetadata().Range() + values := notResourcesAttr.AsStringValues().AsStrings() + builder.WithNotResources(values, r.GetStartLine(), r.GetEndLine()) + } + if effectAttr := statementBlock.GetAttribute("effect"); effectAttr.IsString() { + r := effectAttr.GetMetadata().Range() + builder.WithEffect(effectAttr.Value().AsString(), r.GetStartLine(), r.GetEndLine()) + } else { + builder.WithEffect(iamgo.EffectAllow) + } + + for _, principalBlock := range statementBlock.GetBlocks("principals") { + typeAttr := principalBlock.GetAttribute("type") + if !typeAttr.IsString() { + continue + } + identifiersAttr := principalBlock.GetAttribute("identifiers") + if !identifiersAttr.IsIterable() { + continue + } + r := principalBlock.GetMetadata().Range() + switch typeAttr.Value().AsString() { + case "*": + builder.WithAllPrincipals(true, r.GetStartLine(), r.GetEndLine()) + case "AWS": + values := identifiersAttr.AsStringValues().AsStrings() + builder.WithAWSPrincipals(values, r.GetStartLine(), r.GetEndLine()) + case "Federated": + values := identifiersAttr.AsStringValues().AsStrings() + builder.WithFederatedPrincipals(values, r.GetStartLine(), r.GetEndLine()) + case "Service": + values := identifiersAttr.AsStringValues().AsStrings() + builder.WithServicePrincipals(values, r.GetStartLine(), r.GetEndLine()) + case "CanonicalUser": + values := identifiersAttr.AsStringValues().AsStrings() + builder.WithCanonicalUsersPrincipals(values, r.GetStartLine(), r.GetEndLine()) + } + } + + for _, conditionBlock := range statementBlock.GetBlocks("condition") { + testAttr := conditionBlock.GetAttribute("test") + if !testAttr.IsString() { + continue + } + variableAttr := conditionBlock.GetAttribute("variable") + if !variableAttr.IsString() { + continue + } + valuesAttr := conditionBlock.GetAttribute("values") + values := valuesAttr.AsStringValues().AsStrings() + if valuesAttr.IsNil() || len(values) == 0 { + continue + } + + r := conditionBlock.GetMetadata().Range() + + builder.WithCondition( + testAttr.Value().AsString(), + variableAttr.Value().AsString(), + values, + r.GetStartLine(), + r.GetEndLine(), + ) + + } + return builder.Build() +} + +func findAllPolicies(modules terraform.Modules, parentBlock *terraform.Block, attr *terraform.Attribute) []wrappedDocument { + var documents []wrappedDocument + for _, ref := range attr.AllReferences() { + for _, b := range modules.GetBlocks() { + if b.Type() != "data" || b.TypeLabel() != "aws_iam_policy_document" { + continue + } + if ref.RefersTo(b.Reference()) { + document, err := ConvertTerraformDocument(modules, b) + if err != nil { + continue + } + documents = append(documents, *document) + continue + } + kref := *ref + kref.SetKey(parentBlock.Reference().RawKey()) + if kref.RefersTo(b.Reference()) { + document, err := ConvertTerraformDocument(modules, b) + if err != nil { + continue + } + documents = append(documents, *document) + } + } + } + return documents +} diff --git a/internal/adapters/terraform/aws/iam/groups.go b/internal/adapters/terraform/aws/iam/groups.go new file mode 100644 index 000000000000..d2b2ec8430e3 --- /dev/null +++ b/internal/adapters/terraform/aws/iam/groups.go @@ -0,0 +1,32 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptGroups(modules terraform.Modules) []iam.Group { + var groups []iam.Group + + for _, groupBlock := range modules.GetResourcesByType("aws_iam_group") { + group := iam.Group{ + Metadata: groupBlock.GetMetadata(), + Name: groupBlock.GetAttribute("name").AsStringValueOrDefault("", groupBlock), + } + + if policy, ok := applyForDependentResource( + modules, groupBlock.ID(), "name", "aws_iam_group_policy", "group", findPolicy(modules), + ); ok && policy != nil { + group.Policies = append(group.Policies, *policy) + } + + if policy, ok := applyForDependentResource( + modules, groupBlock.ID(), "name", "aws_iam_group_policy_attachment", "group", findAttachmentPolicy(modules), + ); ok && policy != nil { + group.Policies = append(group.Policies, *policy) + } + + groups = append(groups, group) + } + return groups +} diff --git a/internal/adapters/terraform/aws/iam/groups_test.go b/internal/adapters/terraform/aws/iam/groups_test.go new file mode 100644 index 000000000000..c2179368043e --- /dev/null +++ b/internal/adapters/terraform/aws/iam/groups_test.go @@ -0,0 +1,115 @@ +package iam + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_adaptGroups(t *testing.T) { + tests := []struct { + name string + terraform string + expected []iam.Group + }{ + { + name: "policy", + terraform: ` + resource "aws_iam_group_policy" "my_developer_policy" { + name = "my_developer_policy" + group = aws_iam_group.my_developers.name + + policy = < 0 { + orphanage := lambda.Function{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Tracing: lambda.Tracing{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Mode: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + Permissions: nil, + } + for _, permission := range orphanResources { + orphanage.Permissions = append(orphanage.Permissions, a.adaptPermission(permission)) + } + functions = append(functions, orphanage) + } + + return functions +} + +func (a *adapter) adaptFunction(function *terraform.Block, modules terraform.Modules, orphans terraform.ResourceIDResolutions) lambda.Function { + var permissions []lambda.Permission + for _, module := range modules { + for _, p := range module.GetResourcesByType("aws_lambda_permission") { + if referencedBlock, err := module.GetReferencedBlock(p.GetAttribute("function_name"), p); err == nil && referencedBlock == function { + permissions = append(permissions, a.adaptPermission(p)) + delete(orphans, p.ID()) + } + } + } + + return lambda.Function{ + Metadata: function.GetMetadata(), + Tracing: a.adaptTracing(function), + Permissions: permissions, + } +} + +func (a *adapter) adaptTracing(function *terraform.Block) lambda.Tracing { + if tracingConfig := function.GetBlock("tracing_config"); tracingConfig.IsNotNil() { + return lambda.Tracing{ + Metadata: tracingConfig.GetMetadata(), + Mode: tracingConfig.GetAttribute("mode").AsStringValueOrDefault("", tracingConfig), + } + } + + return lambda.Tracing{ + Metadata: function.GetMetadata(), + Mode: defsecTypes.StringDefault("", function.GetMetadata()), + } +} + +func (a *adapter) adaptPermission(permission *terraform.Block) lambda.Permission { + sourceARNAttr := permission.GetAttribute("source_arn") + sourceARN := sourceARNAttr.AsStringValueOrDefault("", permission) + + if len(sourceARNAttr.AllReferences()) > 0 { + sourceARN = defsecTypes.String(sourceARNAttr.AllReferences()[0].NameLabel(), sourceARNAttr.GetMetadata()) + } + + return lambda.Permission{ + Metadata: permission.GetMetadata(), + Principal: permission.GetAttribute("principal").AsStringValueOrDefault("", permission), + SourceARN: sourceARN, + } +} diff --git a/internal/adapters/terraform/aws/lambda/adapt_test.go b/internal/adapters/terraform/aws/lambda/adapt_test.go new file mode 100644 index 000000000000..c82457e9695b --- /dev/null +++ b/internal/adapters/terraform/aws/lambda/adapt_test.go @@ -0,0 +1,155 @@ +package lambda + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/lambda" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected lambda.Lambda + }{ + { + name: "reference arn", + terraform: ` + resource "aws_lambda_function" "example" { + filename = "lambda_function_payload.zip" + function_name = "lambda_function_name" + role = aws_iam_role.iam_for_lambda.arn + runtime = "nodejs12.x" + + tracing_config { + mode = "Passthrough" + } + } + + resource "aws_lambda_permission" "example" { + statement_id = "AllowExecutionFromSNS" + action = "lambda:InvokeFunction" + function_name = aws_lambda_function.example.function_name + principal = "sns.amazonaws.com" + source_arn = aws_sns_topic.default.arn + } +`, + expected: lambda.Lambda{ + Functions: []lambda.Function{ + { + Metadata: defsecTypes.NewTestMetadata(), + Tracing: lambda.Tracing{ + Metadata: defsecTypes.NewTestMetadata(), + Mode: defsecTypes.String("Passthrough", defsecTypes.NewTestMetadata()), + }, + Permissions: []lambda.Permission{ + { + Metadata: defsecTypes.NewTestMetadata(), + Principal: defsecTypes.String("sns.amazonaws.com", defsecTypes.NewTestMetadata()), + SourceARN: defsecTypes.String("default", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + { + name: "defaults (with an orphan)", + terraform: ` + resource "aws_lambda_function" "example" { + tracing_config { + } + } + + resource "aws_lambda_permission" "example" { + } +`, + expected: lambda.Lambda{ + Functions: []lambda.Function{ + { + Metadata: defsecTypes.NewTestMetadata(), + Tracing: lambda.Tracing{ + Metadata: defsecTypes.NewTestMetadata(), + Mode: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + { + Metadata: defsecTypes.NewTestMetadata(), + Tracing: lambda.Tracing{ + Metadata: defsecTypes.NewTestMetadata(), + Mode: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + Permissions: []lambda.Permission{ + { + Metadata: defsecTypes.NewTestMetadata(), + Principal: defsecTypes.String("", defsecTypes.NewTestMetadata()), + SourceARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_lambda_function" "example" { + filename = "lambda_function_payload.zip" + function_name = "lambda_function_name" + role = aws_iam_role.iam_for_lambda.arn + runtime = "nodejs12.x" + + tracing_config { + mode = "Passthrough" + } + } + + resource "aws_lambda_permission" "example" { + statement_id = "AllowExecutionFromSNS" + action = "lambda:InvokeFunction" + function_name = aws_lambda_function.example.function_name + principal = "sns.amazonaws.com" + source_arn = "string arn" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Functions, 1) + function := adapted.Functions[0] + + assert.Equal(t, 2, function.Metadata.Range().GetStartLine()) + assert.Equal(t, 11, function.Metadata.Range().GetEndLine()) + + assert.Equal(t, 8, function.Tracing.Metadata.Range().GetStartLine()) + assert.Equal(t, 10, function.Tracing.Metadata.Range().GetEndLine()) + + assert.Equal(t, 9, function.Tracing.Mode.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 9, function.Tracing.Mode.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, function.Permissions[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 19, function.Permissions[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 17, function.Permissions[0].Principal.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, function.Permissions[0].Principal.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, function.Permissions[0].SourceARN.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 18, function.Permissions[0].SourceARN.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/mq/adapt.go b/internal/adapters/terraform/aws/mq/adapt.go new file mode 100644 index 000000000000..c5da698dca8d --- /dev/null +++ b/internal/adapters/terraform/aws/mq/adapt.go @@ -0,0 +1,48 @@ +package mq + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/mq" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) mq.MQ { + return mq.MQ{ + Brokers: adaptBrokers(modules), + } +} + +func adaptBrokers(modules terraform.Modules) []mq.Broker { + var brokers []mq.Broker + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_mq_broker") { + brokers = append(brokers, adaptBroker(resource)) + } + } + return brokers +} + +func adaptBroker(resource *terraform.Block) mq.Broker { + + broker := mq.Broker{ + Metadata: resource.GetMetadata(), + PublicAccess: types.BoolDefault(false, resource.GetMetadata()), + Logging: mq.Logging{ + Metadata: resource.GetMetadata(), + General: types.BoolDefault(false, resource.GetMetadata()), + Audit: types.BoolDefault(false, resource.GetMetadata()), + }, + } + + publicAccessAttr := resource.GetAttribute("publicly_accessible") + broker.PublicAccess = publicAccessAttr.AsBoolValueOrDefault(false, resource) + if logsBlock := resource.GetBlock("logs"); logsBlock.IsNotNil() { + broker.Logging.Metadata = logsBlock.GetMetadata() + auditAttr := logsBlock.GetAttribute("audit") + broker.Logging.Audit = auditAttr.AsBoolValueOrDefault(false, logsBlock) + generalAttr := logsBlock.GetAttribute("general") + broker.Logging.General = generalAttr.AsBoolValueOrDefault(false, logsBlock) + } + + return broker +} diff --git a/internal/adapters/terraform/aws/mq/adapt_test.go b/internal/adapters/terraform/aws/mq/adapt_test.go new file mode 100644 index 000000000000..0a3dd1d1e3cc --- /dev/null +++ b/internal/adapters/terraform/aws/mq/adapt_test.go @@ -0,0 +1,119 @@ +package mq + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/mq" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptBroker(t *testing.T) { + tests := []struct { + name string + terraform string + expected mq.Broker + }{ + { + name: "audit logs", + terraform: ` + resource "aws_mq_broker" "example" { + logs { + audit = true + } + + publicly_accessible = false + } +`, + expected: mq.Broker{ + Metadata: defsecTypes.NewTestMetadata(), + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Logging: mq.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + General: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Audit: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "general logs", + terraform: ` + resource "aws_mq_broker" "example" { + logs { + general = true + } + + publicly_accessible = true + } +`, + expected: mq.Broker{ + Metadata: defsecTypes.NewTestMetadata(), + PublicAccess: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Logging: mq.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + General: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Audit: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_mq_broker" "example" { + } +`, + expected: mq.Broker{ + Metadata: defsecTypes.NewTestMetadata(), + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Logging: mq.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + General: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Audit: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptBroker(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_mq_broker" "example" { + logs { + general = true + } + + publicly_accessible = true + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Brokers, 1) + broker := adapted.Brokers[0] + + assert.Equal(t, 2, broker.Metadata.Range().GetStartLine()) + assert.Equal(t, 8, broker.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, broker.Logging.Metadata.Range().GetStartLine()) + assert.Equal(t, 5, broker.Logging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 4, broker.Logging.General.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, broker.Logging.General.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, broker.PublicAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, broker.PublicAccess.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/msk/adapt.go b/internal/adapters/terraform/aws/msk/adapt.go new file mode 100644 index 000000000000..faf43df2197b --- /dev/null +++ b/internal/adapters/terraform/aws/msk/adapt.go @@ -0,0 +1,97 @@ +package msk + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/msk" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) msk.MSK { + return msk.MSK{ + Clusters: adaptClusters(modules), + } +} + +func adaptClusters(modules terraform.Modules) []msk.Cluster { + var clusters []msk.Cluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_msk_cluster") { + clusters = append(clusters, adaptCluster(resource)) + } + } + return clusters +} + +func adaptCluster(resource *terraform.Block) msk.Cluster { + cluster := msk.Cluster{ + Metadata: resource.GetMetadata(), + EncryptionInTransit: msk.EncryptionInTransit{ + Metadata: resource.GetMetadata(), + ClientBroker: defsecTypes.StringDefault("TLS_PLAINTEXT", resource.GetMetadata()), + }, + EncryptionAtRest: msk.EncryptionAtRest{ + Metadata: resource.GetMetadata(), + KMSKeyARN: defsecTypes.StringDefault("", resource.GetMetadata()), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + Logging: msk.Logging{ + Metadata: resource.GetMetadata(), + Broker: msk.BrokerLogging{ + Metadata: resource.GetMetadata(), + S3: msk.S3Logging{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + Cloudwatch: msk.CloudwatchLogging{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + Firehose: msk.FirehoseLogging{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + }, + }, + } + + if encryptBlock := resource.GetBlock("encryption_info"); encryptBlock.IsNotNil() { + if encryptionInTransitBlock := encryptBlock.GetBlock("encryption_in_transit"); encryptionInTransitBlock.IsNotNil() { + cluster.EncryptionInTransit.Metadata = encryptionInTransitBlock.GetMetadata() + if clientBrokerAttr := encryptionInTransitBlock.GetAttribute("client_broker"); clientBrokerAttr.IsNotNil() { + cluster.EncryptionInTransit.ClientBroker = clientBrokerAttr.AsStringValueOrDefault("TLS", encryptionInTransitBlock) + } + } + + if encryptionAtRestAttr := encryptBlock.GetAttribute("encryption_at_rest_kms_key_arn"); encryptionAtRestAttr.IsNotNil() { + cluster.EncryptionAtRest.Metadata = encryptionAtRestAttr.GetMetadata() + cluster.EncryptionAtRest.KMSKeyARN = encryptionAtRestAttr.AsStringValueOrDefault("", encryptBlock) + cluster.EncryptionAtRest.Enabled = defsecTypes.Bool(true, encryptionAtRestAttr.GetMetadata()) + } + } + + if logBlock := resource.GetBlock("logging_info"); logBlock.IsNotNil() { + cluster.Logging.Metadata = logBlock.GetMetadata() + if brokerLogsBlock := logBlock.GetBlock("broker_logs"); brokerLogsBlock.IsNotNil() { + cluster.Logging.Broker.Metadata = brokerLogsBlock.GetMetadata() + if brokerLogsBlock.HasChild("s3") { + if s3Block := brokerLogsBlock.GetBlock("s3"); s3Block.IsNotNil() { + s3enabledAttr := s3Block.GetAttribute("enabled") + cluster.Logging.Broker.S3.Metadata = s3Block.GetMetadata() + cluster.Logging.Broker.S3.Enabled = s3enabledAttr.AsBoolValueOrDefault(false, s3Block) + } + } + if cloudwatchBlock := brokerLogsBlock.GetBlock("cloudwatch_logs"); cloudwatchBlock.IsNotNil() { + cwEnabledAttr := cloudwatchBlock.GetAttribute("enabled") + cluster.Logging.Broker.Cloudwatch.Metadata = cloudwatchBlock.GetMetadata() + cluster.Logging.Broker.Cloudwatch.Enabled = cwEnabledAttr.AsBoolValueOrDefault(false, cloudwatchBlock) + } + if firehoseBlock := brokerLogsBlock.GetBlock("firehose"); firehoseBlock.IsNotNil() { + firehoseEnabledAttr := firehoseBlock.GetAttribute("enabled") + cluster.Logging.Broker.Firehose.Metadata = firehoseBlock.GetMetadata() + cluster.Logging.Broker.Firehose.Enabled = firehoseEnabledAttr.AsBoolValueOrDefault(false, firehoseBlock) + } + } + } + + return cluster +} diff --git a/internal/adapters/terraform/aws/msk/adapt_test.go b/internal/adapters/terraform/aws/msk/adapt_test.go new file mode 100644 index 000000000000..87a063f4ee72 --- /dev/null +++ b/internal/adapters/terraform/aws/msk/adapt_test.go @@ -0,0 +1,200 @@ +package msk + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/msk" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected msk.Cluster + }{ + { + name: "configured", + terraform: ` + resource "aws_msk_cluster" "example" { + cluster_name = "example" + + encryption_info { + encryption_in_transit { + client_broker = "TLS" + in_cluster = true + } + encryption_at_rest_kms_key_arn = "foo-bar-key" + } + + logging_info { + broker_logs { + cloudwatch_logs { + enabled = true + log_group = aws_cloudwatch_log_group.test.name + } + firehose { + enabled = true + delivery_stream = aws_kinesis_firehose_delivery_stream.test_stream.name + } + s3 { + enabled = true + bucket = aws_s3_bucket.bucket.id + prefix = "logs/msk-" + } + } + } + } +`, + expected: msk.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptionInTransit: msk.EncryptionInTransit{ + Metadata: defsecTypes.NewTestMetadata(), + ClientBroker: defsecTypes.String("TLS", defsecTypes.NewTestMetadata()), + }, + EncryptionAtRest: msk.EncryptionAtRest{ + Metadata: defsecTypes.NewTestMetadata(), + KMSKeyARN: defsecTypes.String("foo-bar-key", defsecTypes.NewTestMetadata()), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Logging: msk.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Broker: msk.BrokerLogging{ + Metadata: defsecTypes.NewTestMetadata(), + S3: msk.S3Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Cloudwatch: msk.CloudwatchLogging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Firehose: msk.FirehoseLogging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_msk_cluster" "example" { + } +`, + expected: msk.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptionInTransit: msk.EncryptionInTransit{ + Metadata: defsecTypes.NewTestMetadata(), + ClientBroker: defsecTypes.String("TLS_PLAINTEXT", defsecTypes.NewTestMetadata()), + }, + Logging: msk.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Broker: msk.BrokerLogging{ + Metadata: defsecTypes.NewTestMetadata(), + S3: msk.S3Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + Cloudwatch: msk.CloudwatchLogging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + Firehose: msk.FirehoseLogging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_msk_cluster" "example" { + cluster_name = "example" + + encryption_info { + encryption_in_transit { + client_broker = "TLS" + in_cluster = true + } + encryption_at_rest_kms_key_arn = "foo-bar-key" + } + + logging_info { + broker_logs { + cloudwatch_logs { + enabled = true + log_group = aws_cloudwatch_log_group.test.name + } + firehose { + enabled = true + delivery_stream = aws_kinesis_firehose_delivery_stream.test_stream.name + } + s3 { + enabled = true + bucket = aws_s3_bucket.bucket.id + prefix = "logs/msk-" + } + } + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + cluster := adapted.Clusters[0] + + assert.Equal(t, 2, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 30, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, cluster.EncryptionInTransit.Metadata.Range().GetStartLine()) + assert.Equal(t, 9, cluster.EncryptionInTransit.Metadata.Range().GetEndLine()) + + assert.Equal(t, 10, cluster.EncryptionAtRest.Metadata.Range().GetStartLine()) + assert.Equal(t, 10, cluster.EncryptionAtRest.Metadata.Range().GetEndLine()) + + assert.Equal(t, 13, cluster.Logging.Metadata.Range().GetStartLine()) + assert.Equal(t, 29, cluster.Logging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, cluster.Logging.Broker.Metadata.Range().GetStartLine()) + assert.Equal(t, 28, cluster.Logging.Broker.Metadata.Range().GetEndLine()) + + assert.Equal(t, 15, cluster.Logging.Broker.Cloudwatch.Metadata.Range().GetStartLine()) + assert.Equal(t, 18, cluster.Logging.Broker.Cloudwatch.Metadata.Range().GetEndLine()) + + assert.Equal(t, 16, cluster.Logging.Broker.Cloudwatch.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 16, cluster.Logging.Broker.Cloudwatch.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 19, cluster.Logging.Broker.Firehose.Metadata.Range().GetStartLine()) + assert.Equal(t, 22, cluster.Logging.Broker.Firehose.Metadata.Range().GetEndLine()) + + assert.Equal(t, 20, cluster.Logging.Broker.Firehose.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 20, cluster.Logging.Broker.Firehose.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, cluster.Logging.Broker.S3.Metadata.Range().GetStartLine()) + assert.Equal(t, 27, cluster.Logging.Broker.S3.Metadata.Range().GetEndLine()) + + assert.Equal(t, 24, cluster.Logging.Broker.S3.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 24, cluster.Logging.Broker.S3.Enabled.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/neptune/adapt.go b/internal/adapters/terraform/aws/neptune/adapt.go new file mode 100644 index 000000000000..7283624c33d2 --- /dev/null +++ b/internal/adapters/terraform/aws/neptune/adapt.go @@ -0,0 +1,50 @@ +package neptune + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) neptune.Neptune { + return neptune.Neptune{ + Clusters: adaptClusters(modules), + } +} + +func adaptClusters(modules terraform.Modules) []neptune.Cluster { + var clusters []neptune.Cluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_neptune_cluster") { + clusters = append(clusters, adaptCluster(resource)) + } + } + return clusters +} + +func adaptCluster(resource *terraform.Block) neptune.Cluster { + cluster := neptune.Cluster{ + Metadata: resource.GetMetadata(), + Logging: neptune.Logging{ + Metadata: resource.GetMetadata(), + Audit: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + StorageEncrypted: defsecTypes.BoolDefault(false, resource.GetMetadata()), + KMSKeyID: defsecTypes.StringDefault("", resource.GetMetadata()), + } + + if enableLogExportsAttr := resource.GetAttribute("enable_cloudwatch_logs_exports"); enableLogExportsAttr.IsNotNil() { + cluster.Logging.Metadata = enableLogExportsAttr.GetMetadata() + if enableLogExportsAttr.Contains("audit") { + cluster.Logging.Audit = defsecTypes.Bool(true, enableLogExportsAttr.GetMetadata()) + } + } + + storageEncryptedAttr := resource.GetAttribute("storage_encrypted") + cluster.StorageEncrypted = storageEncryptedAttr.AsBoolValueOrDefault(false, resource) + + KMSKeyAttr := resource.GetAttribute("kms_key_arn") + cluster.KMSKeyID = KMSKeyAttr.AsStringValueOrDefault("", resource) + + return cluster +} diff --git a/internal/adapters/terraform/aws/neptune/adapt_test.go b/internal/adapters/terraform/aws/neptune/adapt_test.go new file mode 100644 index 000000000000..ce2d5b80c896 --- /dev/null +++ b/internal/adapters/terraform/aws/neptune/adapt_test.go @@ -0,0 +1,97 @@ +package neptune + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected neptune.Cluster + }{ + { + name: "configured", + terraform: ` + resource "aws_neptune_cluster" "example" { + enable_cloudwatch_logs_exports = ["audit"] + storage_encrypted = true + kms_key_arn = "kms-key" + } +`, + expected: neptune.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + Logging: neptune.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Audit: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + StorageEncrypted: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms-key", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_neptune_cluster" "example" { + } +`, + expected: neptune.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + Logging: neptune.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Audit: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + StorageEncrypted: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_neptune_cluster" "example" { + enable_cloudwatch_logs_exports = ["audit"] + storage_encrypted = true + kms_key_arn = "kms-key" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + cluster := adapted.Clusters[0] + + assert.Equal(t, 2, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 6, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, cluster.Logging.Metadata.Range().GetStartLine()) + assert.Equal(t, 3, cluster.Logging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, cluster.Logging.Audit.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, cluster.Logging.Audit.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, cluster.StorageEncrypted.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, cluster.StorageEncrypted.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, cluster.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, cluster.KMSKeyID.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/provider/adapt.go b/internal/adapters/terraform/aws/provider/adapt.go new file mode 100644 index 000000000000..b34fc8e730c5 --- /dev/null +++ b/internal/adapters/terraform/aws/provider/adapt.go @@ -0,0 +1,166 @@ +package provider + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +const ( + defaultMaxRetires = 25 + defaultSharedConfigFile = "~/.aws/config" + //#nosec G101 -- False positive + defaultSharedCredentialsFile = "~/.aws/credentials" +) + +func Adapt(modules terraform.Modules) []aws.TerraformProvider { + return adaptProviders(modules) +} + +func adaptProviders(modules terraform.Modules) []aws.TerraformProvider { + var providers []aws.TerraformProvider + for _, providerBlock := range modules.GetBlocks().OfType("provider") { + if providerBlock.Label() == "aws" { + providers = append(providers, adaptProvider(providerBlock)) + } + } + + return providers +} + +func adaptProvider(b *terraform.Block) aws.TerraformProvider { + return aws.TerraformProvider{ + Metadata: b.GetMetadata(), + Alias: getStringAttrValue("alias", b), + Version: getStringAttrValue("version", b), + AccessKey: getStringAttrValue("access_key", b), + AllowedAccountsIDs: b.GetAttribute("allowed_account_ids").AsStringValueSliceOrEmpty(), + AssumeRole: adaptAssumeRole(b), + AssumeRoleWithWebIdentity: adaptAssumeRoleWithWebIdentity(b), + CustomCABundle: getStringAttrValue("custom_ca_bundle", b), + DefaultTags: adaptDefaultTags(b), + EC2MetadataServiceEndpoint: getStringAttrValue("ec2_metadata_service_endpoint", b), + EC2MetadataServiceEndpointMode: getStringAttrValue("ec2_metadata_service_endpoint_mode", b), + Endpoints: adaptEndpoints(b), + ForbiddenAccountIDs: b.GetAttribute("forbidden_account_ids").AsStringValueSliceOrEmpty(), + HttpProxy: getStringAttrValue("http_proxy", b), + IgnoreTags: adaptIgnoreTags(b), + Insecure: b.GetAttribute("insecure").AsBoolValueOrDefault(false, b), + MaxRetries: b.GetAttribute("max_retries").AsIntValueOrDefault(defaultMaxRetires, b), + Profile: getStringAttrValue("profile", b), + Region: getStringAttrValue("region", b), + RetryMode: getStringAttrValue("retry_mode", b), + S3UsePathStyle: b.GetAttribute("s3_use_path_style").AsBoolValueOrDefault(false, b), + S3USEast1RegionalEndpoint: getStringAttrValue("s3_us_east_1_regional_endpoint", b), + SecretKey: getStringAttrValue("secret_key", b), + SharedConfigFiles: b.GetAttribute("shared_config_files").AsStringValuesOrDefault(b, defaultSharedConfigFile), + SharedCredentialsFiles: b.GetAttribute("shared_credentials_files").AsStringValuesOrDefault(b, defaultSharedCredentialsFile), + SkipCredentialsValidation: b.GetAttribute("skip_credentials_validation").AsBoolValueOrDefault(false, b), + SkipMetadataAPICheck: b.GetAttribute("skip_metadata_api_check").AsBoolValueOrDefault(false, b), + SkipRegionValidation: b.GetAttribute("skip_region_validation").AsBoolValueOrDefault(false, b), + SkipRequestingAccountID: b.GetAttribute("skip_requesting_account_id").AsBoolValueOrDefault(false, b), + STSRegion: getStringAttrValue("sts_region", b), + Token: getStringAttrValue("token", b), + UseDualstackEndpoint: b.GetAttribute("use_dualstack_endpoint").AsBoolValueOrDefault(false, b), + UseFIPSEndpoint: b.GetAttribute("use_fips_endpoint").AsBoolValueOrDefault(false, b), + } +} + +func adaptAssumeRole(p *terraform.Block) aws.AssumeRole { + assumeRoleBlock := p.GetBlock("assume_role") + + if assumeRoleBlock.IsNil() { + return aws.AssumeRole{ + Metadata: p.GetMetadata(), + Duration: types.StringDefault("", p.GetMetadata()), + ExternalID: types.StringDefault("", p.GetMetadata()), + Policy: types.StringDefault("", p.GetMetadata()), + RoleARN: types.StringDefault("", p.GetMetadata()), + SessionName: types.StringDefault("", p.GetMetadata()), + SourceIdentity: types.StringDefault("", p.GetMetadata()), + } + } + + return aws.AssumeRole{ + Metadata: assumeRoleBlock.GetMetadata(), + Duration: getStringAttrValue("duration", p), + ExternalID: getStringAttrValue("external_id", p), + Policy: getStringAttrValue("policy", p), + PolicyARNs: p.GetAttribute("policy_arns").AsStringValueSliceOrEmpty(), + RoleARN: getStringAttrValue("role_arn", p), + SessionName: getStringAttrValue("session_name", p), + SourceIdentity: getStringAttrValue("source_identity", p), + Tags: p.GetAttribute("tags").AsMapValue(), + TransitiveTagKeys: p.GetAttribute("transitive_tag_keys").AsStringValueSliceOrEmpty(), + } +} + +func adaptAssumeRoleWithWebIdentity(p *terraform.Block) aws.AssumeRoleWithWebIdentity { + block := p.GetBlock("assume_role_with_web_identity") + if block.IsNil() { + return aws.AssumeRoleWithWebIdentity{ + Metadata: p.GetMetadata(), + Duration: types.StringDefault("", p.GetMetadata()), + Policy: types.StringDefault("", p.GetMetadata()), + RoleARN: types.StringDefault("", p.GetMetadata()), + SessionName: types.StringDefault("", p.GetMetadata()), + WebIdentityToken: types.StringDefault("", p.GetMetadata()), + WebIdentityTokenFile: types.StringDefault("", p.GetMetadata()), + } + } + + return aws.AssumeRoleWithWebIdentity{ + Metadata: block.GetMetadata(), + Duration: getStringAttrValue("duration", p), + Policy: getStringAttrValue("policy", p), + PolicyARNs: p.GetAttribute("policy_arns").AsStringValueSliceOrEmpty(), + RoleARN: getStringAttrValue("role_arn", p), + SessionName: getStringAttrValue("session_name", p), + WebIdentityToken: getStringAttrValue("web_identity_token", p), + WebIdentityTokenFile: getStringAttrValue("web_identity_token_file", p), + } +} + +func adaptEndpoints(p *terraform.Block) types.MapValue { + block := p.GetBlock("endpoints") + if block.IsNil() { + return types.MapDefault(make(map[string]string), p.GetMetadata()) + } + + values := make(map[string]string) + + for name, attr := range block.Attributes() { + values[name] = attr.AsStringValueOrDefault("", block).Value() + } + + return types.Map(values, block.GetMetadata()) +} + +func adaptDefaultTags(p *terraform.Block) aws.DefaultTags { + attr, _ := p.GetNestedAttribute("default_tags.tags") + if attr.IsNil() { + return aws.DefaultTags{} + } + + return aws.DefaultTags{ + Metadata: attr.GetMetadata(), + Tags: attr.AsMapValue(), + } +} + +func adaptIgnoreTags(p *terraform.Block) aws.IgnoreTags { + block := p.GetBlock("ignore_tags") + if block.IsNil() { + return aws.IgnoreTags{} + } + + return aws.IgnoreTags{ + Metadata: block.GetMetadata(), + Keys: block.GetAttribute("keys").AsStringValueSliceOrEmpty(), + KeyPrefixes: block.GetAttribute("key_prefixes").AsStringValueSliceOrEmpty(), + } +} + +func getStringAttrValue(name string, parent *terraform.Block) types.StringValue { + return parent.GetAttribute(name).AsStringValueOrDefault("", parent) +} diff --git a/internal/adapters/terraform/aws/provider/adapt_test.go b/internal/adapters/terraform/aws/provider/adapt_test.go new file mode 100644 index 000000000000..ffcd45027627 --- /dev/null +++ b/internal/adapters/terraform/aws/provider/adapt_test.go @@ -0,0 +1,129 @@ +package provider + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/aws" + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected []aws.TerraformProvider + }{ + { + name: "happy", + source: ` +variable "s3_use_path_style" { + default = true +} + +provider "aws" { + version = "~> 5.0" + region = "us-east-1" + profile = "localstack" + + access_key = "fake" + secret_key = "fake" + skip_credentials_validation = true + skip_metadata_api_check = true + skip_requesting_account_id = true + s3_use_path_style = var.s3_use_path_style + + endpoints { + dynamodb = "http://localhost:4566" + s3 = "http://localhost:4566" + } + + default_tags { + tags = { + Environment = "Local" + Name = "LocalStack" + } + } +}`, + expected: []aws.TerraformProvider{ + { + Version: types.String("~> 5.0", types.NewTestMetadata()), + Region: types.String("us-east-1", types.NewTestMetadata()), + DefaultTags: aws.DefaultTags{ + Metadata: types.NewTestMetadata(), + Tags: types.Map(map[string]string{ + "Environment": "Local", + "Name": "LocalStack", + }, types.NewTestMetadata()), + }, + Endpoints: types.Map(map[string]string{ + "dynamodb": "http://localhost:4566", + "s3": "http://localhost:4566", + }, types.NewTestMetadata()), + Profile: types.String("localstack", types.NewTestMetadata()), + AccessKey: types.String("fake", types.NewTestMetadata()), + SecretKey: types.String("fake", types.NewTestMetadata()), + SkipCredentialsValidation: types.Bool(true, types.NewTestMetadata()), + SkipMetadataAPICheck: types.Bool(true, types.NewTestMetadata()), + SkipRequestingAccountID: types.Bool(true, types.NewTestMetadata()), + S3UsePathStyle: types.Bool(true, types.NewTestMetadata()), + MaxRetries: types.IntDefault(defaultMaxRetires, types.NewTestMetadata()), + SharedConfigFiles: types.StringValueList{ + types.StringDefault(defaultSharedConfigFile, types.NewTestMetadata()), + }, + SharedCredentialsFiles: types.StringValueList{ + types.StringDefault(defaultSharedCredentialsFile, types.NewTestMetadata()), + }, + }, + }, + }, + { + name: "multiply provider configurations", + source: ` + +provider "aws" { + region = "us-east-1" +} + +provider "aws" { + alias = "west" + region = "us-west-2" +} +`, + expected: []aws.TerraformProvider{ + { + Region: types.String("us-east-1", types.NewTestMetadata()), + Endpoints: types.Map(make(map[string]string), types.NewTestMetadata()), + MaxRetries: types.IntDefault(defaultMaxRetires, types.NewTestMetadata()), + SharedConfigFiles: types.StringValueList{ + types.StringDefault(defaultSharedConfigFile, types.NewTestMetadata()), + }, + SharedCredentialsFiles: types.StringValueList{ + types.StringDefault(defaultSharedCredentialsFile, types.NewTestMetadata()), + }, + }, + { + Alias: types.String("west", types.NewTestMetadata()), + Region: types.String("us-west-2", types.NewTestMetadata()), + Endpoints: types.Map(make(map[string]string), types.NewTestMetadata()), + MaxRetries: types.IntDefault(defaultMaxRetires, types.NewTestMetadata()), + SharedConfigFiles: types.StringValueList{ + types.StringDefault(defaultSharedConfigFile, types.NewTestMetadata()), + }, + SharedCredentialsFiles: types.StringValueList{ + types.StringDefault(defaultSharedCredentialsFile, types.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.source, ".tf") + testutil.AssertDefsecEqual(t, test.expected, Adapt(modules)) + }) + } +} diff --git a/internal/adapters/terraform/aws/rds/adapt.go b/internal/adapters/terraform/aws/rds/adapt.go new file mode 100644 index 000000000000..a03b3d124058 --- /dev/null +++ b/internal/adapters/terraform/aws/rds/adapt.go @@ -0,0 +1,256 @@ +package rds + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) rds.RDS { + return rds.RDS{ + Instances: getInstances(modules), + Clusters: getClusters(modules), + Classic: getClassic(modules), + Snapshots: getSnapshots(modules), + ParameterGroups: getParameterGroups(modules), + } +} + +func getInstances(modules terraform.Modules) (instances []rds.Instance) { + for _, resource := range modules.GetResourcesByType("aws_db_instance") { + instances = append(instances, adaptInstance(resource, modules)) + } + + return instances +} + +func getParameterGroups(modules terraform.Modules) (parametergroups []rds.ParameterGroups) { + for _, resource := range modules.GetResourcesByType("aws_db_parameter_group") { + parametergroups = append(parametergroups, adaptDBParameterGroups(resource, modules)) + } + + return parametergroups +} + +func getSnapshots(modules terraform.Modules) (snapshots []rds.Snapshots) { + for _, resource := range modules.GetResourcesByType("aws_db_snapshot") { + snapshots = append(snapshots, adaptDBSnapshots(resource, modules)) + } + + return snapshots +} + +func getClusters(modules terraform.Modules) (clusters []rds.Cluster) { + + rdsInstanceMaps := modules.GetChildResourceIDMapByType("aws_rds_cluster_instance") + for _, resource := range modules.GetResourcesByType("aws_rds_cluster") { + cluster, instanceIDs := adaptCluster(resource, modules) + for _, id := range instanceIDs { + rdsInstanceMaps.Resolve(id) + } + clusters = append(clusters, cluster) + } + + orphanResources := modules.GetResourceByIDs(rdsInstanceMaps.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := rds.Cluster{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + BackupRetentionPeriodDays: defsecTypes.IntDefault(1, defsecTypes.NewUnmanagedMetadata()), + ReplicationSourceARN: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Enabled: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + KMSKeyID: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + Instances: nil, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EncryptStorage: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + KMSKeyID: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + PublicAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Engine: defsecTypes.StringUnresolvable(defsecTypes.NewUnmanagedMetadata()), + LatestRestorableTime: defsecTypes.TimeUnresolvable(defsecTypes.NewUnmanagedMetadata()), + DeletionProtection: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + } + for _, orphan := range orphanResources { + orphanage.Instances = append(orphanage.Instances, adaptClusterInstance(orphan, modules)) + } + clusters = append(clusters, orphanage) + } + + return clusters +} + +func getClassic(modules terraform.Modules) rds.Classic { + classic := rds.Classic{ + DBSecurityGroups: nil, + } + for _, resource := range modules.GetResourcesByType("aws_db_security_group", "aws_redshift_security_group", "aws_elasticache_security_group") { + classic.DBSecurityGroups = append(classic.DBSecurityGroups, adaptClassicDBSecurityGroup(resource)) + } + return classic +} + +func adaptClusterInstance(resource *terraform.Block, modules terraform.Modules) rds.ClusterInstance { + clusterIdAttr := resource.GetAttribute("cluster_identifier") + clusterId := clusterIdAttr.AsStringValueOrDefault("", resource) + + if clusterIdAttr.IsResourceBlockReference("aws_rds_cluster") { + if referenced, err := modules.GetReferencedBlock(clusterIdAttr, resource); err == nil { + clusterId = defsecTypes.String(referenced.FullName(), referenced.GetMetadata()) + } + } + + return rds.ClusterInstance{ + ClusterIdentifier: clusterId, + Instance: adaptInstance(resource, modules), + } +} + +func adaptClassicDBSecurityGroup(resource *terraform.Block) rds.DBSecurityGroup { + return rds.DBSecurityGroup{ + Metadata: resource.GetMetadata(), + } +} + +func adaptInstance(resource *terraform.Block, modules terraform.Modules) rds.Instance { + + var ReadReplicaDBInstanceIdentifiers []defsecTypes.StringValue + rrdiAttr := resource.GetAttribute("replicate_source_db") + for _, rrdi := range rrdiAttr.AsStringValues() { + ReadReplicaDBInstanceIdentifiers = append(ReadReplicaDBInstanceIdentifiers, rrdi) + } + + var TagList []rds.TagList + tagres := resource.GetBlocks("tags") + for _, tagres := range tagres { + + TagList = append(TagList, rds.TagList{ + Metadata: tagres.GetMetadata(), + }) + } + + var EnabledCloudwatchLogsExports []defsecTypes.StringValue + ecweAttr := resource.GetAttribute("enabled_cloudwatch_logs_exports") + for _, ecwe := range ecweAttr.AsStringValues() { + EnabledCloudwatchLogsExports = append(EnabledCloudwatchLogsExports, ecwe) + } + + replicaSource := resource.GetAttribute("replicate_source_db") + replicaSourceValue := "" + if replicaSource.IsNotNil() { + if referenced, err := modules.GetReferencedBlock(replicaSource, resource); err == nil { + replicaSourceValue = referenced.ID() + } + } + return rds.Instance{ + Metadata: resource.GetMetadata(), + BackupRetentionPeriodDays: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(0, resource), + ReplicationSourceARN: defsecTypes.StringExplicit(replicaSourceValue, resource.GetMetadata()), + PerformanceInsights: adaptPerformanceInsights(resource), + Encryption: adaptEncryption(resource), + PublicAccess: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(false, resource), + Engine: resource.GetAttribute("engine").AsStringValueOrDefault(rds.EngineAurora, resource), + IAMAuthEnabled: resource.GetAttribute("iam_database_authentication_enabled").AsBoolValueOrDefault(false, resource), + DeletionProtection: resource.GetAttribute("deletion_protection").AsBoolValueOrDefault(false, resource), + DBInstanceArn: resource.GetAttribute("arn").AsStringValueOrDefault("", resource), + StorageEncrypted: resource.GetAttribute("storage_encrypted").AsBoolValueOrDefault(true, resource), + DBInstanceIdentifier: resource.GetAttribute("identifier").AsStringValueOrDefault("", resource), + EngineVersion: resource.GetAttribute("engine_version").AsStringValueOrDefault("", resource), + AutoMinorVersionUpgrade: resource.GetAttribute("auto_minor_version_upgrade").AsBoolValueOrDefault(false, resource), + MultiAZ: resource.GetAttribute("multi_az").AsBoolValueOrDefault(false, resource), + PubliclyAccessible: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(false, resource), + LatestRestorableTime: defsecTypes.TimeUnresolvable(resource.GetMetadata()), + ReadReplicaDBInstanceIdentifiers: ReadReplicaDBInstanceIdentifiers, + TagList: TagList, + EnabledCloudwatchLogsExports: EnabledCloudwatchLogsExports, + } +} + +func adaptDBParameterGroups(resource *terraform.Block, modules terraform.Modules) rds.ParameterGroups { + + var Parameters []rds.Parameters + paramres := resource.GetBlocks("parameter") + for _, paramres := range paramres { + + Parameters = append(Parameters, rds.Parameters{ + Metadata: paramres.GetMetadata(), + ParameterName: defsecTypes.StringDefault("", paramres.GetMetadata()), + ParameterValue: defsecTypes.StringDefault("", paramres.GetMetadata()), + }) + } + + return rds.ParameterGroups{ + Metadata: resource.GetMetadata(), + DBParameterGroupName: resource.GetAttribute("name").AsStringValueOrDefault("", resource), + DBParameterGroupFamily: resource.GetAttribute("family").AsStringValueOrDefault("", resource), + Parameters: Parameters, + } +} + +func adaptDBSnapshots(resource *terraform.Block, modules terraform.Modules) rds.Snapshots { + + return rds.Snapshots{ + Metadata: resource.GetMetadata(), + DBSnapshotIdentifier: resource.GetAttribute("db_snapshot_identifier").AsStringValueOrDefault("", resource), + DBSnapshotArn: resource.GetAttribute("db_snapshot_arn").AsStringValueOrDefault("", resource), + Encrypted: resource.GetAttribute("encrypted").AsBoolValueOrDefault(true, resource), + KmsKeyId: resource.GetAttribute("kms_key_id").AsStringValueOrDefault("", resource), + SnapshotAttributes: nil, + } +} + +func adaptCluster(resource *terraform.Block, modules terraform.Modules) (rds.Cluster, []string) { + + clusterInstances, ids := getClusterInstances(resource, modules) + + var public bool + for _, instance := range clusterInstances { + if instance.PublicAccess.IsTrue() { + public = true + break + } + } + + return rds.Cluster{ + Metadata: resource.GetMetadata(), + BackupRetentionPeriodDays: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(1, resource), + ReplicationSourceARN: resource.GetAttribute("replication_source_identifier").AsStringValueOrDefault("", resource), + PerformanceInsights: adaptPerformanceInsights(resource), + Instances: clusterInstances, + Encryption: adaptEncryption(resource), + PublicAccess: defsecTypes.Bool(public, resource.GetMetadata()), + Engine: resource.GetAttribute("engine").AsStringValueOrDefault(rds.EngineAurora, resource), + LatestRestorableTime: defsecTypes.TimeUnresolvable(resource.GetMetadata()), + AvailabilityZones: resource.GetAttribute("availability_zones").AsStringValueSliceOrEmpty(), + DeletionProtection: resource.GetAttribute("deletion_protection").AsBoolValueOrDefault(false, resource), + }, ids +} + +func getClusterInstances(resource *terraform.Block, modules terraform.Modules) (clusterInstances []rds.ClusterInstance, instanceIDs []string) { + clusterInstanceResources := modules.GetReferencingResources(resource, "aws_rds_cluster_instance", "cluster_identifier") + + for _, ciResource := range clusterInstanceResources { + instanceIDs = append(instanceIDs, ciResource.ID()) + clusterInstances = append(clusterInstances, adaptClusterInstance(ciResource, modules)) + } + return clusterInstances, instanceIDs +} + +func adaptPerformanceInsights(resource *terraform.Block) rds.PerformanceInsights { + return rds.PerformanceInsights{ + Metadata: resource.GetMetadata(), + Enabled: resource.GetAttribute("performance_insights_enabled").AsBoolValueOrDefault(false, resource), + KMSKeyID: resource.GetAttribute("performance_insights_kms_key_id").AsStringValueOrDefault("", resource), + } +} + +func adaptEncryption(resource *terraform.Block) rds.Encryption { + return rds.Encryption{ + Metadata: resource.GetMetadata(), + EncryptStorage: resource.GetAttribute("storage_encrypted").AsBoolValueOrDefault(false, resource), + KMSKeyID: resource.GetAttribute("kms_key_id").AsStringValueOrDefault("", resource), + } +} diff --git a/internal/adapters/terraform/aws/rds/adapt_test.go b/internal/adapters/terraform/aws/rds/adapt_test.go new file mode 100644 index 000000000000..3636d0589479 --- /dev/null +++ b/internal/adapters/terraform/aws/rds/adapt_test.go @@ -0,0 +1,332 @@ +package rds + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected rds.RDS + }{ + { + name: "defined", + terraform: ` + + resource "aws_rds_cluster" "example" { + engine = "aurora-mysql" + availability_zones = ["us-west-2a", "us-west-2b", "us-west-2c"] + backup_retention_period = 7 + kms_key_id = "kms_key_1" + storage_encrypted = true + replication_source_identifier = "arn-of-a-source-db-cluster" + deletion_protection = true + } + + resource "aws_rds_cluster_instance" "example" { + cluster_identifier = aws_rds_cluster.example.id + name = "bar" + performance_insights_enabled = true + performance_insights_kms_key_id = "performance_key_0" + kms_key_id = "kms_key_0" + storage_encrypted = true + } + + resource "aws_db_security_group" "example" { + # ... + } + + resource "aws_db_instance" "example" { + publicly_accessible = false + backup_retention_period = 5 + skip_final_snapshot = true + performance_insights_enabled = true + performance_insights_kms_key_id = "performance_key_1" + storage_encrypted = true + kms_key_id = "kms_key_2" + } +`, + expected: rds.RDS{ + Instances: []rds.Instance{ + { + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(5, defsecTypes.NewTestMetadata()), + ReplicationSourceARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("performance_key_1", defsecTypes.NewTestMetadata()), + }, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptStorage: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms_key_2", defsecTypes.NewTestMetadata()), + }, + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String(rds.EngineAurora, defsecTypes.NewTestMetadata()), + StorageEncrypted: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + Clusters: []rds.Cluster{ + { + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(7, defsecTypes.NewTestMetadata()), + ReplicationSourceARN: defsecTypes.String("arn-of-a-source-db-cluster", defsecTypes.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptStorage: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms_key_1", defsecTypes.NewTestMetadata()), + }, + Instances: []rds.ClusterInstance{ + { + Instance: rds.Instance{ + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + ReplicationSourceARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("performance_key_0", defsecTypes.NewTestMetadata()), + }, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptStorage: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms_key_0", defsecTypes.NewTestMetadata()), + }, + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String(rds.EngineAurora, defsecTypes.NewTestMetadata()), + StorageEncrypted: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + ClusterIdentifier: defsecTypes.String("aws_rds_cluster.example", defsecTypes.NewTestMetadata()), + }, + }, + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String(rds.EngineAuroraMysql, defsecTypes.NewTestMetadata()), + AvailabilityZones: defsecTypes.StringValueList{ + defsecTypes.String("us-west-2a", defsecTypes.NewTestMetadata()), + defsecTypes.String("us-west-2b", defsecTypes.NewTestMetadata()), + defsecTypes.String("us-west-2c", defsecTypes.NewTestMetadata()), + }, + DeletionProtection: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + Classic: rds.Classic{ + DBSecurityGroups: []rds.DBSecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptInstance(t *testing.T) { + tests := []struct { + name string + terraform string + expected rds.Instance + }{ + { + name: "instance defaults", + terraform: ` + resource "aws_db_instance" "example" { + } +`, + expected: rds.Instance{ + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + ReplicationSourceARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptStorage: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String(rds.EngineAurora, defsecTypes.NewTestMetadata()), + StorageEncrypted: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + IAMAuthEnabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptInstance(modules.GetBlocks()[0], modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected rds.Cluster + }{ + { + name: "cluster defaults", + terraform: ` + resource "aws_rds_cluster" "example" { + } +`, + expected: rds.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(1, defsecTypes.NewTestMetadata()), + ReplicationSourceARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptStorage: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String(rds.EngineAurora, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted, _ := adaptCluster(modules.GetBlocks()[0], modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_rds_cluster" "example" { + backup_retention_period = 7 + kms_key_id = "kms_key_1" + storage_encrypted = true + replication_source_identifier = "arn-of-a-source-db-cluster" + } + + resource "aws_rds_cluster_instance" "example" { + cluster_identifier = aws_rds_cluster.example.id + backup_retention_period = 7 + performance_insights_enabled = true + performance_insights_kms_key_id = "performance_key" + storage_encrypted = true + kms_key_id = "kms_key_0" + } + + resource "aws_db_security_group" "example" { + } + + resource "aws_db_instance" "example" { + publicly_accessible = false + backup_retention_period = 7 + performance_insights_enabled = true + performance_insights_kms_key_id = "performance_key" + storage_encrypted = true + kms_key_id = "kms_key_0" + } +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + require.Len(t, adapted.Instances, 1) + + cluster := adapted.Clusters[0] + instance := adapted.Instances[0] + classic := adapted.Classic + + assert.Equal(t, 2, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 7, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, cluster.BackupRetentionPeriodDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, cluster.BackupRetentionPeriodDays.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, cluster.Encryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, cluster.Encryption.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, cluster.Encryption.EncryptStorage.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, cluster.Encryption.EncryptStorage.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, cluster.ReplicationSourceARN.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, cluster.ReplicationSourceARN.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 9, cluster.Instances[0].Instance.Metadata.Range().GetStartLine()) + assert.Equal(t, 16, cluster.Instances[0].Instance.Metadata.Range().GetEndLine()) + + assert.Equal(t, 2, cluster.Instances[0].ClusterIdentifier.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, cluster.Instances[0].ClusterIdentifier.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, cluster.Instances[0].Instance.BackupRetentionPeriodDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, cluster.Instances[0].Instance.BackupRetentionPeriodDays.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 12, cluster.Instances[0].Instance.PerformanceInsights.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 12, cluster.Instances[0].Instance.PerformanceInsights.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, cluster.Instances[0].Instance.PerformanceInsights.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, cluster.Instances[0].Instance.PerformanceInsights.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 14, cluster.Instances[0].Instance.Encryption.EncryptStorage.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, cluster.Instances[0].Instance.Encryption.EncryptStorage.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 15, cluster.Instances[0].Instance.Encryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 15, cluster.Instances[0].Instance.Encryption.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, classic.DBSecurityGroups[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 19, classic.DBSecurityGroups[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 21, instance.Metadata.Range().GetStartLine()) + assert.Equal(t, 28, instance.Metadata.Range().GetEndLine()) + + assert.Equal(t, 22, instance.PublicAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, instance.PublicAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, instance.BackupRetentionPeriodDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, instance.BackupRetentionPeriodDays.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 24, instance.PerformanceInsights.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 24, instance.PerformanceInsights.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 25, instance.PerformanceInsights.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 25, instance.PerformanceInsights.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, instance.Encryption.EncryptStorage.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 26, instance.Encryption.EncryptStorage.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 27, instance.Encryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 27, instance.Encryption.KMSKeyID.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/redshift/adapt.go b/internal/adapters/terraform/aws/redshift/adapt.go new file mode 100644 index 000000000000..2875b6649c46 --- /dev/null +++ b/internal/adapters/terraform/aws/redshift/adapt.go @@ -0,0 +1,117 @@ +package redshift + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) redshift.Redshift { + return redshift.Redshift{ + Clusters: adaptClusters(modules), + SecurityGroups: adaptSecurityGroups(modules), + ClusterParameters: adaptParameters(modules), + ReservedNodes: nil, + } +} + +func adaptClusters(modules terraform.Modules) []redshift.Cluster { + var clusters []redshift.Cluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_redshift_cluster") { + clusters = append(clusters, adaptCluster(resource, module)) + } + } + return clusters +} + +func adaptSecurityGroups(modules terraform.Modules) []redshift.SecurityGroup { + var securityGroups []redshift.SecurityGroup + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_redshift_security_group") { + securityGroups = append(securityGroups, adaptSecurityGroup(resource)) + } + } + return securityGroups +} + +func adaptParameters(modules terraform.Modules) []redshift.ClusterParameter { + var Parameters []redshift.ClusterParameter + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_redshift_parameter_group") { + for _, r := range resource.GetBlocks("parameter") { + Parameters = append(Parameters, adaptParameter(r)) + } + } + } + return Parameters +} + +func adaptCluster(resource *terraform.Block, module *terraform.Module) redshift.Cluster { + cluster := redshift.Cluster{ + Metadata: resource.GetMetadata(), + ClusterIdentifier: resource.GetAttribute("cluster_identifier").AsStringValueOrDefault("", resource), + NodeType: resource.GetAttribute("node_type").AsStringValueOrDefault("", resource), + MasterUsername: resource.GetAttribute("master_username").AsStringValueOrDefault("", resource), + NumberOfNodes: resource.GetAttribute("number_of_nodes").AsIntValueOrDefault(1, resource), + PubliclyAccessible: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(true, resource), + LoggingEnabled: defsecTypes.Bool(false, resource.GetMetadata()), + AutomatedSnapshotRetentionPeriod: defsecTypes.Int(0, resource.GetMetadata()), + AllowVersionUpgrade: resource.GetAttribute("allow_version_upgrade").AsBoolValueOrDefault(true, resource), + VpcId: defsecTypes.String("", resource.GetMetadata()), + Encryption: redshift.Encryption{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + KMSKeyID: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + EndPoint: redshift.EndPoint{ + Metadata: resource.GetMetadata(), + Port: resource.GetAttribute("port").AsIntValueOrDefault(5439, resource), + }, + SubnetGroupName: defsecTypes.StringDefault("", resource.GetMetadata()), + } + + encryptedAttr := resource.GetAttribute("encrypted") + cluster.Encryption.Enabled = encryptedAttr.AsBoolValueOrDefault(false, resource) + + if logBlock := resource.GetBlock("logging"); logBlock.IsNotNil() { + cluster.LoggingEnabled = logBlock.GetAttribute("enable").AsBoolValueOrDefault(false, logBlock) + } + + if snapBlock := resource.GetBlock("snapshot_copy"); snapBlock.IsNotNil() { + snapAttr := snapBlock.GetAttribute("retention_period") + cluster.AutomatedSnapshotRetentionPeriod = snapAttr.AsIntValueOrDefault(7, snapBlock) + } + + KMSKeyIDAttr := resource.GetAttribute("kms_key_id") + cluster.Encryption.KMSKeyID = KMSKeyIDAttr.AsStringValueOrDefault("", resource) + if KMSKeyIDAttr.IsResourceBlockReference("aws_kms_key") { + if kmsKeyBlock, err := module.GetReferencedBlock(KMSKeyIDAttr, resource); err == nil { + cluster.Encryption.KMSKeyID = defsecTypes.String(kmsKeyBlock.FullName(), kmsKeyBlock.GetMetadata()) + } + } + + subnetGroupNameAttr := resource.GetAttribute("cluster_subnet_group_name") + cluster.SubnetGroupName = subnetGroupNameAttr.AsStringValueOrDefault("", resource) + + return cluster +} + +func adaptSecurityGroup(resource *terraform.Block) redshift.SecurityGroup { + descriptionAttr := resource.GetAttribute("description") + descriptionVal := descriptionAttr.AsStringValueOrDefault("Managed by Terraform", resource) + + return redshift.SecurityGroup{ + Metadata: resource.GetMetadata(), + Description: descriptionVal, + } +} + +func adaptParameter(resource *terraform.Block) redshift.ClusterParameter { + + return redshift.ClusterParameter{ + Metadata: resource.GetMetadata(), + ParameterName: resource.GetAttribute("name").AsStringValueOrDefault("", resource), + ParameterValue: resource.GetAttribute("value").AsStringValueOrDefault("", resource), + } +} diff --git a/internal/adapters/terraform/aws/redshift/adapt_test.go b/internal/adapters/terraform/aws/redshift/adapt_test.go new file mode 100644 index 000000000000..443cc3bcbea9 --- /dev/null +++ b/internal/adapters/terraform/aws/redshift/adapt_test.go @@ -0,0 +1,230 @@ +package redshift + +import ( + "fmt" + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected redshift.Redshift + }{ + { + name: "reference key id", + terraform: ` + resource "aws_kms_key" "redshift" { + enable_key_rotation = true + } + + resource "aws_redshift_cluster" "example" { + cluster_identifier = "tf-redshift-cluster" + publicly_accessible = false + number_of_nodes = 1 + allow_version_upgrade = false + port = 5440 + encrypted = true + kms_key_id = aws_kms_key.redshift.key_id + cluster_subnet_group_name = "redshift_subnet" + } + + resource "aws_redshift_security_group" "default" { + name = "redshift-sg" + description = "some description" + } +`, + expected: redshift.Redshift{ + Clusters: []redshift.Cluster{ + { + Metadata: defsecTypes.NewTestMetadata(), + ClusterIdentifier: defsecTypes.String("tf-redshift-cluster", defsecTypes.NewTestMetadata()), + PubliclyAccessible: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + NumberOfNodes: defsecTypes.Int(1, defsecTypes.NewTestMetadata()), + AllowVersionUpgrade: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + EndPoint: redshift.EndPoint{ + Metadata: defsecTypes.NewTestMetadata(), + Port: defsecTypes.Int(5440, defsecTypes.NewTestMetadata()), + }, + Encryption: redshift.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("aws_kms_key.redshift", defsecTypes.NewTestMetadata()), + }, + SubnetGroupName: defsecTypes.String("redshift_subnet", defsecTypes.NewTestMetadata()), + }, + }, + SecurityGroups: []redshift.SecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("some description", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + fmt.Println(adapted.SecurityGroups[0].Description.Value()) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected redshift.Cluster + }{ + { + name: "key as string", + terraform: ` + resource "aws_redshift_cluster" "example" { + cluster_identifier = "tf-redshift-cluster" + publicly_accessible = false + number_of_nodes = 1 + allow_version_upgrade = false + port = 5440 + encrypted = true + kms_key_id = "key-id" + cluster_subnet_group_name = "redshift_subnet" + } +`, + expected: redshift.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + ClusterIdentifier: defsecTypes.String("tf-redshift-cluster", defsecTypes.NewTestMetadata()), + PubliclyAccessible: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + NumberOfNodes: defsecTypes.Int(1, defsecTypes.NewTestMetadata()), + AllowVersionUpgrade: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + EndPoint: redshift.EndPoint{ + Metadata: defsecTypes.NewTestMetadata(), + Port: defsecTypes.Int(5440, defsecTypes.NewTestMetadata()), + }, + Encryption: redshift.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("key-id", defsecTypes.NewTestMetadata()), + }, + SubnetGroupName: defsecTypes.String("redshift_subnet", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_redshift_cluster" "example" { + } +`, + expected: redshift.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + ClusterIdentifier: defsecTypes.String("", defsecTypes.NewTestMetadata()), + PubliclyAccessible: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + NumberOfNodes: defsecTypes.Int(1, defsecTypes.NewTestMetadata()), + AllowVersionUpgrade: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EndPoint: redshift.EndPoint{ + Metadata: defsecTypes.NewTestMetadata(), + Port: defsecTypes.Int(5439, defsecTypes.NewTestMetadata()), + }, + Encryption: redshift.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + SubnetGroupName: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0], modules[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptSecurityGroup(t *testing.T) { + tests := []struct { + name string + terraform string + expected redshift.SecurityGroup + }{ + { + name: "defaults", + terraform: ` +resource "" "example" { +} +`, + expected: redshift.SecurityGroup{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("Managed by Terraform", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptSecurityGroup(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_kms_key" "redshift" { + enable_key_rotation = true + } + + resource "aws_redshift_cluster" "example" { + cluster_identifier = "tf-redshift-cluster" + encrypted = true + kms_key_id = aws_kms_key.redshift.key_id + cluster_subnet_group_name = "subnet name" + } + + resource "aws_redshift_security_group" "default" { + name = "redshift-sg" + description = "some description" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + require.Len(t, adapted.SecurityGroups, 1) + cluster := adapted.Clusters[0] + securityGroup := adapted.SecurityGroups[0] + + assert.Equal(t, 6, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 11, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 8, cluster.Encryption.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 8, cluster.Encryption.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 2, cluster.Encryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, cluster.Encryption.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, cluster.SubnetGroupName.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, cluster.SubnetGroupName.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, securityGroup.Metadata.Range().GetStartLine()) + assert.Equal(t, 16, securityGroup.Metadata.Range().GetEndLine()) + + assert.Equal(t, 15, securityGroup.Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 15, securityGroup.Description.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/s3/adapt.go b/internal/adapters/terraform/aws/s3/adapt.go new file mode 100644 index 000000000000..56e61a8763f9 --- /dev/null +++ b/internal/adapters/terraform/aws/s3/adapt.go @@ -0,0 +1,18 @@ +package s3 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) s3.S3 { + + a := &adapter{ + modules: modules, + bucketMap: make(map[string]*s3.Bucket), + } + + return s3.S3{ + Buckets: a.adaptBuckets(), + } +} diff --git a/internal/adapters/terraform/aws/s3/adapt_test.go b/internal/adapters/terraform/aws/s3/adapt_test.go new file mode 100644 index 000000000000..5b6ebe4df1ec --- /dev/null +++ b/internal/adapters/terraform/aws/s3/adapt_test.go @@ -0,0 +1,385 @@ +package s3 + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/liamg/iamgo" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_PublicAccessBlock(t *testing.T) { + testCases := []struct { + desc string + source string + expectedBuckets int + hasPublicAccess bool + }{ + { + desc: "public access block is found when using the bucket name as the lookup", + source: ` +resource "aws_s3_bucket" "example" { + bucket = "bucketname" +} + +resource "aws_s3_bucket_public_access_block" "example_access_block"{ + bucket = "bucketname" +} +`, + expectedBuckets: 1, + hasPublicAccess: true, + }, + { + desc: "public access block is found when using the bucket name as the lookup", + source: ` +resource "aws_s3_bucket" "example" { + bucket = "bucketname" +} + +resource "aws_s3_bucket_public_access_block" "example_access_block"{ + bucket = aws_s3_bucket.example.id +} +`, + expectedBuckets: 1, + hasPublicAccess: true, + }, + } + for _, tC := range testCases { + t.Run(tC.desc, func(t *testing.T) { + + modules := tftestutil.CreateModulesFromSource(t, tC.source, ".tf") + s3Ctx := Adapt(modules) + + assert.Equal(t, tC.expectedBuckets, len(s3Ctx.Buckets)) + + for _, bucket := range s3Ctx.Buckets { + if tC.hasPublicAccess { + assert.NotNil(t, bucket.PublicAccessBlock) + } else { + assert.Nil(t, bucket.PublicAccessBlock) + } + } + + bucket := s3Ctx.Buckets[0] + assert.NotNil(t, bucket.PublicAccessBlock) + + }) + } + +} + +func Test_PublicAccessDoesNotReference(t *testing.T) { + testCases := []struct { + desc string + source string + }{ + { + desc: "just a bucket, no public access block", + source: ` +resource "aws_s3_bucket" "example" { + bucket = "bucketname" +} + `, + }, + { + desc: "bucket with unrelated public access block", + source: ` +resource "aws_s3_bucket" "example" { + bucket = "bucketname" +} + +resource "aws_s3_bucket_public_access_block" "example_access_block"{ + bucket = aws_s3_bucket.other.id +} + `, + }, + { + desc: "bucket with unrelated public access block via name", + source: ` +resource "aws_s3_bucket" "example" { + bucket = "bucketname" +} + +resource "aws_s3_bucket_public_access_block" "example_access_block"{ + bucket = "something" +} + `, + }, + } + for _, tC := range testCases { + t.Run(tC.desc, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, tC.source, ".tf") + s3Ctx := Adapt(modules) + require.Len(t, s3Ctx.Buckets, 1) + assert.Nil(t, s3Ctx.Buckets[0].PublicAccessBlock) + + }) + } +} + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected s3.S3 + }{ + { + name: "basic", + terraform: ` + resource "aws_s3_bucket" "example" { + bucket = "bucket" + } + + resource "aws_s3_bucket_public_access_block" "example" { + bucket = aws_s3_bucket.example.id + + restrict_public_buckets = true + block_public_acls = true + block_public_policy = true + ignore_public_acls = true + + } + + resource "aws_s3_bucket_acl" "example" { + bucket = aws_s3_bucket.example.id + acl = "private" + } + + resource "aws_s3_bucket_server_side_encryption_configuration" "example" { + bucket = aws_s3_bucket.example.bucket + + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = "string-key" + sse_algorithm = "aws:kms" + } + } + } + + resource "aws_s3_bucket_logging" "example" { + bucket = aws_s3_bucket.example.id + + target_bucket = aws_s3_bucket.example.id + target_prefix = "log/" + } + + resource "aws_s3_bucket_versioning" "versioning_example" { + bucket = aws_s3_bucket.example.id + versioning_configuration { + status = "Enabled" + mfa_delete = "Enabled" + } + } + + resource "aws_s3_bucket_policy" "allow_access_from_another_account" { + bucket = aws_s3_bucket.example.bucket + policy = data.aws_iam_policy_document.allow_access_from_another_account.json + } + + data "aws_iam_policy_document" "allow_access_from_another_account" { + statement { + + actions = [ + "s3:GetObject", + "s3:ListBucket", + ] + + resources = [ + "arn:aws:s3:::*", + ] + } + } + `, + expected: s3.S3{ + Buckets: []s3.Bucket{ + { + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("bucket", defsecTypes.NewTestMetadata()), + PublicAccessBlock: &s3.PublicAccessBlock{ + Metadata: defsecTypes.NewTestMetadata(), + BlockPublicACLs: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + BlockPublicPolicy: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + IgnorePublicACLs: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + RestrictPublicBuckets: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + BucketPolicies: []iam.Policy{ + { + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Document: func() iam.Document { + + builder := iamgo.NewPolicyBuilder() + + sb := iamgo.NewStatementBuilder() + sb.WithEffect(iamgo.EffectAllow) + sb.WithActions([]string{"s3:GetObject", "s3:ListBucket"}) + sb.WithResources([]string{"arn:aws:s3:::*"}) + + builder.WithStatement(sb.Build()) + + return iam.Document{ + Parsed: builder.Build(), + Metadata: defsecTypes.NewTestMetadata(), + IsOffset: true, + HasRefs: false, + } + }(), + Builtin: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Encryption: s3.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Algorithm: defsecTypes.String("aws:kms", defsecTypes.NewTestMetadata()), + KMSKeyId: defsecTypes.String("string-key", defsecTypes.NewTestMetadata()), + }, + Versioning: s3.Versioning{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + MFADelete: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Logging: s3.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + TargetBucket: defsecTypes.String("aws_s3_bucket.example", defsecTypes.NewTestMetadata()), + }, + ACL: defsecTypes.String("private", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_s3_bucket" "example" { + bucket = "bucket" + } + + resource "aws_s3_bucket_public_access_block" "example" { + bucket = aws_s3_bucket.example.id + + restrict_public_buckets = true + block_public_acls = true + block_public_policy = true + ignore_public_acls = true + } + + resource "aws_s3_bucket_acl" "example" { + bucket = aws_s3_bucket.example.id + acl = "private" + } + + resource "aws_s3_bucket_server_side_encryption_configuration" "example" { + bucket = aws_s3_bucket.example.bucket + + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = "string-key" + sse_algorithm = "aws:kms" + } + } + } + + resource "aws_s3_bucket_logging" "example" { + bucket = aws_s3_bucket.example.id + + target_bucket = aws_s3_bucket.example.id + target_prefix = "log/" + } + + resource "aws_s3_bucket_versioning" "versioning_example" { + bucket = aws_s3_bucket.example.id + versioning_configuration { + status = "Enabled" + } + } + + resource "aws_s3_bucket_policy" "allow_access_from_another_account" { + bucket = aws_s3_bucket.example.bucket + policy = data.aws_iam_policy_document.allow_access_from_another_account.json + } + + data "aws_iam_policy_document" "allow_access_from_another_account" { + statement { + + actions = [ + "s3:GetObject", + "s3:ListBucket", + ] + + resources = [ + "arn:aws:s3:::*", + ] + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Buckets, 1) + bucket := adapted.Buckets[0] + + assert.Equal(t, 2, bucket.Metadata.Range().GetStartLine()) + assert.Equal(t, 4, bucket.Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, bucket.PublicAccessBlock.Metadata.Range().GetStartLine()) + assert.Equal(t, 13, bucket.PublicAccessBlock.Metadata.Range().GetEndLine()) + + assert.Equal(t, 9, bucket.PublicAccessBlock.RestrictPublicBuckets.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 9, bucket.PublicAccessBlock.RestrictPublicBuckets.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, bucket.PublicAccessBlock.BlockPublicACLs.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, bucket.PublicAccessBlock.BlockPublicACLs.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, bucket.PublicAccessBlock.BlockPublicPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, bucket.PublicAccessBlock.BlockPublicPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 12, bucket.PublicAccessBlock.IgnorePublicACLs.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 12, bucket.PublicAccessBlock.IgnorePublicACLs.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, bucket.ACL.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, bucket.ACL.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 20, bucket.Encryption.Metadata.Range().GetStartLine()) + assert.Equal(t, 29, bucket.Encryption.Metadata.Range().GetEndLine()) + + assert.Equal(t, 25, bucket.Encryption.KMSKeyId.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 25, bucket.Encryption.KMSKeyId.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, bucket.Encryption.Algorithm.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 26, bucket.Encryption.Algorithm.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 31, bucket.Logging.Metadata.Range().GetStartLine()) + assert.Equal(t, 36, bucket.Logging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 34, bucket.Logging.TargetBucket.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, bucket.Logging.TargetBucket.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 38, bucket.Versioning.Metadata.Range().GetStartLine()) + assert.Equal(t, 43, bucket.Versioning.Metadata.Range().GetEndLine()) + + assert.Equal(t, 41, bucket.Versioning.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 41, bucket.Versioning.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 47, bucket.BucketPolicies[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 47, bucket.BucketPolicies[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 50, bucket.BucketPolicies[0].Document.Metadata.Range().GetStartLine()) + assert.Equal(t, 62, bucket.BucketPolicies[0].Document.Metadata.Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/s3/bucket.go b/internal/adapters/terraform/aws/s3/bucket.go new file mode 100644 index 000000000000..b254e5d56a5b --- /dev/null +++ b/internal/adapters/terraform/aws/s3/bucket.go @@ -0,0 +1,283 @@ +package s3 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +type adapter struct { + modules terraform.Modules + bucketMap map[string]*s3.Bucket +} + +func (a *adapter) adaptBuckets() []s3.Bucket { + for _, block := range a.modules.GetResourcesByType("aws_s3_bucket") { + bucket := &s3.Bucket{ + Metadata: block.GetMetadata(), + Name: block.GetAttribute("bucket").AsStringValueOrDefault("", block), + PublicAccessBlock: nil, + BucketPolicies: nil, + Encryption: getEncryption(block, a), + Versioning: getVersioning(block, a), + Logging: getLogging(block, a), + ACL: getBucketAcl(block, a), + AccelerateConfigurationStatus: getAccelerateStatus(block, a), + BucketLocation: block.GetAttribute("region").AsStringValueOrDefault("", block), + LifecycleConfiguration: getLifecycle(block, a), + Website: getWebsite(block, a), + Objects: getObject(block, a), + } + a.bucketMap[block.ID()] = bucket + } + + a.adaptBucketPolicies() + a.adaptPublicAccessBlocks() + + var buckets []s3.Bucket + for _, bucket := range a.bucketMap { + buckets = append(buckets, *bucket) + } + + return buckets +} + +func getEncryption(block *terraform.Block, a *adapter) s3.Encryption { + if sseConfgihuration := block.GetBlock("server_side_encryption_configuration"); sseConfgihuration != nil { + return newS3Encryption(block, sseConfgihuration) + } + if val, ok := applyForBucketRelatedResource(a, block, "aws_s3_bucket_server_side_encryption_configuration", func(resource *terraform.Block) s3.Encryption { + return newS3Encryption(resource, resource) + }); ok { + return val + } + return s3.Encryption{ + Metadata: block.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, block.GetMetadata()), + KMSKeyId: defsecTypes.StringDefault("", block.GetMetadata()), + Algorithm: defsecTypes.StringDefault("", block.GetMetadata()), + } +} + +func newS3Encryption(root *terraform.Block, sseConfgihuration *terraform.Block) s3.Encryption { + return s3.Encryption{ + Metadata: root.GetMetadata(), + Enabled: isEncrypted(sseConfgihuration), + Algorithm: terraform.MapNestedAttribute( + sseConfgihuration, + "rule.apply_server_side_encryption_by_default.sse_algorithm", + func(attr *terraform.Attribute, parent *terraform.Block) defsecTypes.StringValue { + return attr.AsStringValueOrDefault("", parent) + }, + ), + KMSKeyId: terraform.MapNestedAttribute( + sseConfgihuration, + "rule.apply_server_side_encryption_by_default.kms_master_key_id", + func(attr *terraform.Attribute, parent *terraform.Block) defsecTypes.StringValue { + return attr.AsStringValueOrDefault("", parent) + }, + ), + } +} + +func getVersioning(block *terraform.Block, a *adapter) s3.Versioning { + versioning := s3.Versioning{ + Metadata: block.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, block.GetMetadata()), + MFADelete: defsecTypes.BoolDefault(false, block.GetMetadata()), + } + if lockBlock := block.GetBlock("object_lock_configuration"); lockBlock != nil { + if enabled := isObjeckLockEnabled(lockBlock); enabled != nil { + versioning.Enabled = *enabled + } + } + if vBlock := block.GetBlock("versioning"); vBlock != nil { + versioning.Enabled = vBlock.GetAttribute("enabled").AsBoolValueOrDefault(true, vBlock) + versioning.MFADelete = vBlock.GetAttribute("mfa_delete").AsBoolValueOrDefault(false, vBlock) + } + + if enabled, ok := applyForBucketRelatedResource(a, block, "aws_s3_bucket_object_lock_configuration", func(resource *terraform.Block) *defsecTypes.BoolValue { + if block.GetAttribute("object_lock_enabled").IsTrue() { + return isObjeckLockEnabled(resource) + } + return nil + }); ok && enabled != nil { + versioning.Enabled = *enabled + } + + if val, ok := applyForBucketRelatedResource(a, block, "aws_s3_bucket_versioning", getVersioningFromResource); ok { + return val + } + return versioning +} + +func isObjeckLockEnabled(resource *terraform.Block) *defsecTypes.BoolValue { + var val defsecTypes.BoolValue + attr := resource.GetAttribute("object_lock_enabled") + switch { + case attr.IsNil(): // enabled by default + val = defsecTypes.BoolDefault(true, resource.GetMetadata()) + case attr.Equals("Enabled"): + val = defsecTypes.Bool(true, attr.GetMetadata()) + } + return &val +} + +// from aws_s3_bucket_versioning +func getVersioningFromResource(block *terraform.Block) s3.Versioning { + versioning := s3.Versioning{ + Metadata: block.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, block.GetMetadata()), + MFADelete: defsecTypes.BoolDefault(false, block.GetMetadata()), + } + if config := block.GetBlock("versioning_configuration"); config != nil { + if status := config.GetAttribute("status"); status.IsNotNil() { + versioning.Enabled = defsecTypes.Bool(status.Equals("Enabled", terraform.IgnoreCase), status.GetMetadata()) + } + if mfa := config.GetAttribute("mfa_delete"); mfa.IsNotNil() { + versioning.MFADelete = defsecTypes.Bool(mfa.Equals("Enabled", terraform.IgnoreCase), mfa.GetMetadata()) + } + } + return versioning +} + +func getLogging(block *terraform.Block, a *adapter) s3.Logging { + if loggingBlock := block.GetBlock("logging"); loggingBlock.IsNotNil() { + targetBucket := loggingBlock.GetAttribute("target_bucket").AsStringValueOrDefault("", loggingBlock) + if referencedBlock, err := a.modules.GetReferencedBlock(loggingBlock.GetAttribute("target_bucket"), loggingBlock); err == nil { + targetBucket = defsecTypes.String(referencedBlock.FullName(), loggingBlock.GetAttribute("target_bucket").GetMetadata()) + } + return s3.Logging{ + Metadata: loggingBlock.GetMetadata(), + Enabled: defsecTypes.Bool(true, loggingBlock.GetMetadata()), + TargetBucket: targetBucket, + } + } + + if val, ok := applyForBucketRelatedResource(a, block, "aws_s3_bucket_logging", func(resource *terraform.Block) s3.Logging { + targetBucket := resource.GetAttribute("target-bucket").AsStringValueOrDefault("", resource) + if referencedBlock, err := a.modules.GetReferencedBlock(resource.GetAttribute("target_bucket"), resource); err == nil { + targetBucket = defsecTypes.String(referencedBlock.FullName(), resource.GetAttribute("target_bucket").GetMetadata()) + } + return s3.Logging{ + Metadata: resource.GetMetadata(), + Enabled: hasLogging(resource), + TargetBucket: targetBucket, + } + }); ok { + return val + } + + return s3.Logging{ + Metadata: block.GetMetadata(), + Enabled: defsecTypes.Bool(false, block.GetMetadata()), + TargetBucket: defsecTypes.StringDefault("", block.GetMetadata()), + } +} + +func getBucketAcl(block *terraform.Block, a *adapter) defsecTypes.StringValue { + aclAttr := block.GetAttribute("acl") + if aclAttr.IsString() { + return aclAttr.AsStringValueOrDefault("private", block) + } + + if val, ok := applyForBucketRelatedResource(a, block, "aws_s3_bucket_acl", func(resource *terraform.Block) defsecTypes.StringValue { + return resource.GetAttribute("acl").AsStringValueOrDefault("private", resource) + }); ok { + return val + } + return defsecTypes.StringDefault("private", block.GetMetadata()) +} + +func isEncrypted(sseConfgihuration *terraform.Block) defsecTypes.BoolValue { + return terraform.MapNestedAttribute( + sseConfgihuration, + "rule.apply_server_side_encryption_by_default.sse_algorithm", + func(attr *terraform.Attribute, parent *terraform.Block) defsecTypes.BoolValue { + if attr.IsNil() { + return defsecTypes.BoolDefault(false, parent.GetMetadata()) + } + return defsecTypes.Bool( + true, + attr.GetMetadata(), + ) + }, + ) +} + +func hasLogging(b *terraform.Block) defsecTypes.BoolValue { + if loggingBlock := b.GetBlock("logging"); loggingBlock.IsNotNil() { + if targetAttr := loggingBlock.GetAttribute("target_bucket"); targetAttr.IsNotNil() && targetAttr.IsNotEmpty() { + return defsecTypes.Bool(true, targetAttr.GetMetadata()) + } + return defsecTypes.BoolDefault(false, loggingBlock.GetMetadata()) + } + if targetBucket := b.GetAttribute("target_bucket"); targetBucket.IsNotNil() { + return defsecTypes.Bool(true, targetBucket.GetMetadata()) + } + return defsecTypes.BoolDefault(false, b.GetMetadata()) +} + +func getLifecycle(b *terraform.Block, a *adapter) []s3.Rules { + + var rules []s3.Rules + for _, r := range a.modules.GetReferencingResources(b, "aws_s3_bucket_lifecycle_configuration", "bucket") { + ruleblock := r.GetBlocks("rule") + for _, rule := range ruleblock { + rules = append(rules, s3.Rules{ + Metadata: rule.GetMetadata(), + Status: rule.GetAttribute("status").AsStringValueOrDefault("Enabled", rule), + }) + } + } + return rules +} + +func getWebsite(b *terraform.Block, a *adapter) (website *s3.Website) { + for _, r := range a.modules.GetReferencingResources(b, "aws_s3_bucket_website_configuration", "bucket") { + website = &s3.Website{ + Metadata: r.GetMetadata(), + } + } + return website +} + +func getObject(b *terraform.Block, a *adapter) []s3.Contents { + var object []s3.Contents + for _, r := range a.modules.GetReferencingResources(b, "aws_s3_object", "bucket") { + object = append(object, s3.Contents{ + Metadata: r.GetMetadata(), + }) + } + return object +} + +func getAccelerateStatus(b *terraform.Block, a *adapter) defsecTypes.StringValue { + var status defsecTypes.StringValue + for _, r := range a.modules.GetReferencingResources(b, " aws_s3_bucket_accelerate_configuration", "bucket") { + status = r.GetAttribute("status").AsStringValueOrDefault("Enabled", r) + } + return status +} + +func applyForBucketRelatedResource[T any](a *adapter, block *terraform.Block, resType string, fn func(resource *terraform.Block) T) (T, bool) { + for _, resource := range a.modules.GetResourcesByType(resType) { + bucketAttr := resource.GetAttribute("bucket") + if bucketAttr.IsNotNil() { + if bucketAttr.IsString() { + actualBucketName := block.GetAttribute("bucket").AsStringValueOrDefault("", block).Value() + if bucketAttr.Equals(block.ID()) || bucketAttr.Equals(actualBucketName) { + return fn(resource), true + } + } + if referencedBlock, err := a.modules.GetReferencedBlock(bucketAttr, resource); err == nil { + if referencedBlock.ID() == block.ID() { + return fn(resource), true + } + } + } + + } + var res T + return res, false +} diff --git a/internal/adapters/terraform/aws/s3/bucket_test.go b/internal/adapters/terraform/aws/s3/bucket_test.go new file mode 100644 index 000000000000..4fcdc2e50ef9 --- /dev/null +++ b/internal/adapters/terraform/aws/s3/bucket_test.go @@ -0,0 +1,331 @@ +package s3 + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" +) + +func Test_GetBuckets(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "bucket1" { + + +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + +} + +func Test_BucketGetACL(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + acl = "authenticated-read" + + # ... other configuration ... +}` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.Equal(t, "authenticated-read", s3.Buckets[0].ACL.Value()) + +} + +func Test_V4BucketGetACL(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" +} + +resource "aws_s3_bucket_acl" "example" { + bucket = aws_s3_bucket.example.id + acl = "authenticated-read" +}` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.Equal(t, "authenticated-read", s3.Buckets[0].ACL.Value()) + +} + +func Test_BucketGetLogging(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... + logging { + target_bucket = aws_s3_bucket.log_bucket.id + target_prefix = "log/" + } +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Logging.Enabled.Value()) + +} + +func Test_V4BucketGetLogging(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "log_bucket" { + bucket = "example-log-bucket" + + # ... other configuration ... +} + +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... +} + +resource "aws_s3_bucket_logging" "example" { + bucket = aws_s3_bucket.example.id + target_bucket = aws_s3_bucket.log_bucket.id + target_prefix = "log/" +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 2, len(s3.Buckets)) + for _, bucket := range s3.Buckets { + switch bucket.Name.Value() { + case "yournamehere": + assert.True(t, bucket.Logging.Enabled.Value()) + case "example-log-bucket": + assert.False(t, bucket.Logging.Enabled.Value()) + } + } +} + +func Test_BucketGetVersioning(t *testing.T) { + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... + versioning { + enabled = true + } +}` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Versioning.Enabled.Value()) +} + +func Test_V4BucketGetVersioning(t *testing.T) { + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... +} + +resource "aws_s3_bucket_versioning" "example" { + bucket = aws_s3_bucket.example.id + versioning_configuration { + status = "Enabled" + } +}` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Versioning.Enabled.Value()) +} + +func Test_BucketGetVersioningWithLockDeprecated(t *testing.T) { + source := ` +resource "aws_s3_bucket" "example" { + bucket = "mybucket" + object_lock_configuration { + object_lock_enabled = "Enabled" + } +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Versioning.Enabled.Value()) + +} + +func Test_BucketGetVersioningWithLockForNewBucket(t *testing.T) { + source := ` +resource "aws_s3_bucket" "example" { + bucket = "mybucket" + object_lock_enabled = true +} + +resource "aws_s3_bucket_object_lock_configuration" "example" { + bucket = aws_s3_bucket.example.id +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Versioning.Enabled.Value()) + +} + +func Test_BucketGetVersioningWhenLockDisabledButVersioningEnabled(t *testing.T) { + source := ` +resource "aws_s3_bucket" "example" { + bucket = "mybucket" +} + +resource "aws_s3_bucket_object_lock_configuration" "example" { + bucket = aws_s3_bucket.example.id +} + +resource "aws_s3_bucket_versioning" "example" { + bucket = aws_s3_bucket.example.id + versioning_configuration { + status = "Enabled" + } +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Versioning.Enabled.Value()) + +} + +func Test_BucketGetEncryption(t *testing.T) { + + source := ` + resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... + server_side_encryption_configuration { + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = aws_kms_key.mykey.arn + sse_algorithm = "aws:kms" + } + } + } +}` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Encryption.Enabled.Value()) +} + +func Test_V4BucketGetEncryption(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... +} + +resource "aws_s3_bucket_server_side_encryption_configuration" "example" { + bucket = aws_s3_bucket.example.id + + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = aws_kms_key.mykey.arn + sse_algorithm = "aws:kms" + } + } +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Encryption.Enabled.Value()) +} + +func Test_BucketWithPolicy(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "bucket1" { + bucket = "lol" +} + +resource "aws_s3_bucket_policy" "allow_access_from_another_account" { + bucket = aws_s3_bucket.bucket1.id + policy = data.aws_iam_policy_document.allow_access_from_another_account.json +} + +data "aws_iam_policy_document" "allow_access_from_another_account" { + statement { + principals { + type = "AWS" + identifiers = ["123456789012"] + } + + actions = [ + "s3:GetObject", + "s3:ListBucket", + ] + + resources = [ + aws_s3_bucket.bucket1.arn, + ] + } +} + +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + require.Equal(t, 1, len(s3.Buckets)) + require.Equal(t, 1, len(s3.Buckets[0].BucketPolicies)) + + policy := s3.Buckets[0].BucketPolicies[0] + + statements, _ := policy.Document.Parsed.Statements() + require.Equal(t, 1, len(statements)) + + principals, _ := statements[0].Principals() + actions, _ := statements[0].Actions() + + awsPrincipals, _ := principals.AWS() + require.Equal(t, 1, len(awsPrincipals)) + require.Equal(t, 2, len(actions)) + +} diff --git a/internal/adapters/terraform/aws/s3/policies.go b/internal/adapters/terraform/aws/s3/policies.go new file mode 100644 index 000000000000..dc3f39294b27 --- /dev/null +++ b/internal/adapters/terraform/aws/s3/policies.go @@ -0,0 +1,53 @@ +package s3 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + iamAdapter "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" +) + +func (a *adapter) adaptBucketPolicies() { + + for _, b := range a.modules.GetResourcesByType("aws_s3_bucket_policy") { + + policyAttr := b.GetAttribute("policy") + if policyAttr.IsNil() { + continue + } + doc, err := iamAdapter.ParsePolicyFromAttr(policyAttr, b, a.modules) + if err != nil { + continue + } + + policy := iam.Policy{ + Metadata: policyAttr.GetMetadata(), + Name: defsecTypes.StringDefault("", b.GetMetadata()), + Document: *doc, + Builtin: defsecTypes.Bool(false, b.GetMetadata()), + } + + var bucketName string + bucketAttr := b.GetAttribute("bucket") + + if bucketAttr.IsNotNil() { + if referencedBlock, err := a.modules.GetReferencedBlock(bucketAttr, b); err == nil { + if bucket, ok := a.bucketMap[referencedBlock.ID()]; ok { + bucket.BucketPolicies = append(bucket.BucketPolicies, policy) + a.bucketMap[referencedBlock.ID()] = bucket + continue + } + } + } + + if bucketAttr.IsString() { + bucketName = bucketAttr.Value().AsString() + for id, bucket := range a.bucketMap { + if bucket.Name.EqualTo(bucketName) { + bucket.BucketPolicies = append(bucket.BucketPolicies, policy) + a.bucketMap[id] = bucket + break + } + } + } + } +} diff --git a/internal/adapters/terraform/aws/s3/public_access_block.go b/internal/adapters/terraform/aws/s3/public_access_block.go new file mode 100644 index 000000000000..0c9455485dda --- /dev/null +++ b/internal/adapters/terraform/aws/s3/public_access_block.go @@ -0,0 +1,41 @@ +package s3 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" +) + +func (a *adapter) adaptPublicAccessBlocks() { + + for _, b := range a.modules.GetResourcesByType("aws_s3_bucket_public_access_block") { + + pba := s3.PublicAccessBlock{ + Metadata: b.GetMetadata(), + BlockPublicACLs: b.GetAttribute("block_public_acls").AsBoolValueOrDefault(false, b), + BlockPublicPolicy: b.GetAttribute("block_public_policy").AsBoolValueOrDefault(false, b), + IgnorePublicACLs: b.GetAttribute("ignore_public_acls").AsBoolValueOrDefault(false, b), + RestrictPublicBuckets: b.GetAttribute("restrict_public_buckets").AsBoolValueOrDefault(false, b), + } + + var bucketName string + bucketAttr := b.GetAttribute("bucket") + if bucketAttr.IsNotNil() { + if referencedBlock, err := a.modules.GetReferencedBlock(bucketAttr, b); err == nil { + if bucket, ok := a.bucketMap[referencedBlock.ID()]; ok { + bucket.PublicAccessBlock = &pba + a.bucketMap[referencedBlock.ID()] = bucket + continue + } + } + } + if bucketAttr.IsString() { + bucketName = bucketAttr.Value().AsString() + for id, bucket := range a.bucketMap { + if bucketAttr.Equals(id) || bucket.Name.EqualTo(bucketName) { + bucket.PublicAccessBlock = &pba + a.bucketMap[id] = bucket + continue + } + } + } + } +} diff --git a/internal/adapters/terraform/aws/sns/adapt.go b/internal/adapters/terraform/aws/sns/adapt.go new file mode 100644 index 000000000000..c746dc9520f3 --- /dev/null +++ b/internal/adapters/terraform/aws/sns/adapt.go @@ -0,0 +1,38 @@ +package sns + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sns" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) sns.SNS { + return sns.SNS{ + Topics: adaptTopics(modules), + } +} + +func adaptTopics(modules terraform.Modules) []sns.Topic { + var topics []sns.Topic + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_sns_topic") { + topics = append(topics, adaptTopic(resource)) + } + } + return topics +} + +func adaptTopic(resourceBlock *terraform.Block) sns.Topic { + return sns.Topic{ + Metadata: resourceBlock.GetMetadata(), + ARN: types.StringDefault("", resourceBlock.GetMetadata()), + Encryption: adaptEncryption(resourceBlock), + } +} + +func adaptEncryption(resourceBlock *terraform.Block) sns.Encryption { + return sns.Encryption{ + Metadata: resourceBlock.GetMetadata(), + KMSKeyID: resourceBlock.GetAttribute("kms_master_key_id").AsStringValueOrDefault("", resourceBlock), + } +} diff --git a/internal/adapters/terraform/aws/sns/adapt_test.go b/internal/adapters/terraform/aws/sns/adapt_test.go new file mode 100644 index 000000000000..c02dd9ea9c59 --- /dev/null +++ b/internal/adapters/terraform/aws/sns/adapt_test.go @@ -0,0 +1,82 @@ +package sns + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/sns" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptTopic(t *testing.T) { + tests := []struct { + name string + terraform string + expected sns.Topic + }{ + { + name: "defined", + terraform: ` + resource "aws_sns_topic" "good_example" { + kms_master_key_id = "/blah" + } +`, + expected: sns.Topic{ + Metadata: defsecTypes.NewTestMetadata(), + ARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Encryption: sns.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + KMSKeyID: defsecTypes.String("/blah", defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "default", + terraform: ` + resource "aws_sns_topic" "good_example" { + } +`, + expected: sns.Topic{ + Metadata: defsecTypes.NewTestMetadata(), + ARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Encryption: sns.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptTopic(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_sns_topic" "good_example" { + kms_master_key_id = "/blah" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Topics, 1) + topic := adapted.Topics[0] + + assert.Equal(t, 2, topic.Metadata.Range().GetStartLine()) + assert.Equal(t, 4, topic.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, topic.Encryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, topic.Encryption.KMSKeyID.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/sqs/adapt.go b/internal/adapters/terraform/aws/sqs/adapt.go new file mode 100644 index 000000000000..04bca31e7101 --- /dev/null +++ b/internal/adapters/terraform/aws/sqs/adapt.go @@ -0,0 +1,167 @@ +package sqs + +import ( + iamp "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" + "github.com/liamg/iamgo" + + "github.com/google/uuid" +) + +func Adapt(modules terraform.Modules) sqs.SQS { + return sqs.SQS{ + Queues: (&adapter{ + modules: modules, + queues: make(map[string]sqs.Queue), + }).adaptQueues(), + } +} + +type adapter struct { + modules terraform.Modules + queues map[string]sqs.Queue +} + +func (a *adapter) adaptQueues() []sqs.Queue { + for _, resource := range a.modules.GetResourcesByType("aws_sqs_queue") { + a.adaptQueue(resource) + } + + for _, policyBlock := range a.modules.GetResourcesByType("aws_sqs_queue_policy") { + + policy := iamp.Policy{ + Metadata: policyBlock.GetMetadata(), + Name: defsecTypes.StringDefault("", policyBlock.GetMetadata()), + Document: iamp.Document{ + Metadata: policyBlock.GetMetadata(), + }, + Builtin: defsecTypes.Bool(false, policyBlock.GetMetadata()), + } + if attr := policyBlock.GetAttribute("policy"); attr.IsString() { + dataBlock, err := a.modules.GetBlockById(attr.Value().AsString()) + if err != nil { + parsed, err := iamgo.ParseString(attr.Value().AsString()) + if err != nil { + continue + } + policy.Document.Parsed = *parsed + policy.Document.Metadata = attr.GetMetadata() + } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := iam.ConvertTerraformDocument(a.modules, dataBlock); err == nil { + policy.Document.Parsed = doc.Document + policy.Document.Metadata = doc.Source.GetMetadata() + policy.Document.IsOffset = true + } + } + } else if refBlock, err := a.modules.GetReferencedBlock(attr, policyBlock); err == nil { + if refBlock.Type() == "data" && refBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := iam.ConvertTerraformDocument(a.modules, refBlock); err == nil { + policy.Document.Parsed = doc.Document + policy.Document.Metadata = doc.Source.GetMetadata() + } + } + } + + if urlAttr := policyBlock.GetAttribute("queue_url"); urlAttr.IsNotNil() { + if refBlock, err := a.modules.GetReferencedBlock(urlAttr, policyBlock); err == nil { + if queue, ok := a.queues[refBlock.ID()]; ok { + queue.Policies = append(queue.Policies, policy) + a.queues[refBlock.ID()] = queue + continue + } + } + } + + a.queues[uuid.NewString()] = sqs.Queue{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + QueueURL: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + Encryption: sqs.Encryption{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + ManagedEncryption: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + KMSKeyID: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + Policies: []iamp.Policy{policy}, + } + } + + var queues []sqs.Queue + for _, queue := range a.queues { + queues = append(queues, queue) + } + return queues +} + +func (a *adapter) adaptQueue(resource *terraform.Block) { + + kmsKeyIdAttr := resource.GetAttribute("kms_master_key_id") + kmsKeyIdVal := kmsKeyIdAttr.AsStringValueOrDefault("", resource) + managedEncryption := resource.GetAttribute("sqs_managed_sse_enabled") + + var policies []iamp.Policy + if attr := resource.GetAttribute("policy"); attr.IsString() { + + dataBlock, err := a.modules.GetBlockById(attr.Value().AsString()) + if err != nil { + policy := iamp.Policy{ + Metadata: attr.GetMetadata(), + Name: defsecTypes.StringDefault("", attr.GetMetadata()), + Document: iamp.Document{ + Metadata: attr.GetMetadata(), + }, + Builtin: defsecTypes.Bool(false, attr.GetMetadata()), + } + parsed, err := iamgo.ParseString(attr.Value().AsString()) + if err == nil { + policy.Document.Parsed = *parsed + policy.Document.Metadata = attr.GetMetadata() + policy.Metadata = attr.GetMetadata() + policies = append(policies, policy) + } + } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := iam.ConvertTerraformDocument(a.modules, dataBlock); err == nil { + policy := iamp.Policy{ + Metadata: attr.GetMetadata(), + Name: defsecTypes.StringDefault("", attr.GetMetadata()), + Document: iamp.Document{ + Metadata: doc.Source.GetMetadata(), + Parsed: doc.Document, + IsOffset: true, + HasRefs: false, + }, + Builtin: defsecTypes.Bool(false, attr.GetMetadata()), + } + policies = append(policies, policy) + } + } + + } else if refBlock, err := a.modules.GetReferencedBlock(attr, resource); err == nil { + if refBlock.Type() == "data" && refBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := iam.ConvertTerraformDocument(a.modules, refBlock); err == nil { + policy := iamp.Policy{ + Metadata: doc.Source.GetMetadata(), + Name: defsecTypes.StringDefault("", doc.Source.GetMetadata()), + Document: iamp.Document{ + Metadata: doc.Source.GetMetadata(), + Parsed: doc.Document, + }, + Builtin: defsecTypes.Bool(false, refBlock.GetMetadata()), + } + policies = append(policies, policy) + } + } + } + + a.queues[resource.ID()] = sqs.Queue{ + Metadata: resource.GetMetadata(), + QueueURL: defsecTypes.StringDefault("", resource.GetMetadata()), + Encryption: sqs.Encryption{ + Metadata: resource.GetMetadata(), + ManagedEncryption: managedEncryption.AsBoolValueOrDefault(false, resource), + KMSKeyID: kmsKeyIdVal, + }, + Policies: policies, + } +} diff --git a/internal/adapters/terraform/aws/sqs/adapt_test.go b/internal/adapters/terraform/aws/sqs/adapt_test.go new file mode 100644 index 000000000000..736d09d1b0e5 --- /dev/null +++ b/internal/adapters/terraform/aws/sqs/adapt_test.go @@ -0,0 +1,140 @@ +package sqs + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/liamg/iamgo" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected sqs.SQS + }{ + { + name: "np kms key", + terraform: ` + resource "aws_sqs_queue" "good_example" { + + policy = <= azurerm 2.97.0 + if omsAgentBlock := resource.GetBlock("oms_agent"); omsAgentBlock.IsNotNil() { + cluster.AddonProfile.OMSAgent.Metadata = omsAgentBlock.GetMetadata() + cluster.AddonProfile.OMSAgent.Enabled = defsecTypes.Bool(true, omsAgentBlock.GetMetadata()) + } + + // azurerm < 2.99.0 + if resource.HasChild("role_based_access_control") { + roleBasedAccessControlBlock := resource.GetBlock("role_based_access_control") + rbEnabledAttr := roleBasedAccessControlBlock.GetAttribute("enabled") + cluster.RoleBasedAccessControl.Metadata = roleBasedAccessControlBlock.GetMetadata() + cluster.RoleBasedAccessControl.Enabled = rbEnabledAttr.AsBoolValueOrDefault(false, roleBasedAccessControlBlock) + } + if resource.HasChild("role_based_access_control_enabled") { + // azurerm >= 2.99.0 + roleBasedAccessControlEnabledAttr := resource.GetAttribute("role_based_access_control_enabled") + cluster.RoleBasedAccessControl.Metadata = roleBasedAccessControlEnabledAttr.GetMetadata() + cluster.RoleBasedAccessControl.Enabled = roleBasedAccessControlEnabledAttr.AsBoolValueOrDefault(false, resource) + } + + if resource.HasChild("azure_active_directory_role_based_access_control") { + azureRoleBasedAccessControl := resource.GetBlock("azure_active_directory_role_based_access_control") + if azureRoleBasedAccessControl.IsNotNil() { + enabledAttr := azureRoleBasedAccessControl.GetAttribute("azure_rbac_enabled") + if !cluster.RoleBasedAccessControl.Enabled.IsTrue() { + cluster.RoleBasedAccessControl.Metadata = azureRoleBasedAccessControl.GetMetadata() + cluster.RoleBasedAccessControl.Enabled = enabledAttr.AsBoolValueOrDefault(false, azureRoleBasedAccessControl) + } + } + } + return cluster +} diff --git a/internal/adapters/terraform/azure/container/adapt_test.go b/internal/adapters/terraform/azure/container/adapt_test.go new file mode 100644 index 000000000000..8c5c450153f8 --- /dev/null +++ b/internal/adapters/terraform/azure/container/adapt_test.go @@ -0,0 +1,262 @@ +package container + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/container" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected container.KubernetesCluster + }{ + { + name: "defined", + terraform: ` + resource "azurerm_kubernetes_cluster" "example" { + private_cluster_enabled = true + + network_profile { + network_policy = "calico" + } + + api_server_access_profile { + + authorized_ip_ranges = [ + "1.2.3.4/32" + ] + + } + + addon_profile { + oms_agent { + enabled = true + } + } + + role_based_access_control { + enabled = true + } + } +`, + expected: container.KubernetesCluster{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkProfile: container.NetworkProfile{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkPolicy: defsecTypes.String("calico", defsecTypes.NewTestMetadata()), + }, + EnablePrivateCluster: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + APIServerAuthorizedIPRanges: []defsecTypes.StringValue{ + defsecTypes.String("1.2.3.4/32", defsecTypes.NewTestMetadata()), + }, + AddonProfile: container.AddonProfile{ + Metadata: defsecTypes.NewTestMetadata(), + OMSAgent: container.OMSAgent{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + RoleBasedAccessControl: container.RoleBasedAccessControl{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "rbac with a new syntax", + terraform: ` + resource "azurerm_kubernetes_cluster" "example" { + role_based_access_control_enabled = true + } +`, + expected: container.KubernetesCluster{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkProfile: container.NetworkProfile{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkPolicy: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + EnablePrivateCluster: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + AddonProfile: container.AddonProfile{ + Metadata: defsecTypes.NewTestMetadata(), + OMSAgent: container.OMSAgent{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + RoleBasedAccessControl: container.RoleBasedAccessControl{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_kubernetes_cluster" "example" { + } +`, + expected: container.KubernetesCluster{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkProfile: container.NetworkProfile{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkPolicy: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + EnablePrivateCluster: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + AddonProfile: container.AddonProfile{ + Metadata: defsecTypes.NewTestMetadata(), + OMSAgent: container.OMSAgent{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + RoleBasedAccessControl: container.RoleBasedAccessControl{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "rbac off with k8s rbac on", + terraform: ` +resource "azurerm_kubernetes_cluster" "misreporting_example" { + role_based_access_control_enabled = true # Enable k8s RBAC + azure_active_directory_role_based_access_control { + managed = true # Enable AKS-managed Azure AAD integration + azure_rbac_enabled = false # Explicitly disable Azure RBAC for Kubernetes Authorization + } + } +`, + expected: container.KubernetesCluster{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkProfile: container.NetworkProfile{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkPolicy: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + EnablePrivateCluster: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + AddonProfile: container.AddonProfile{ + Metadata: defsecTypes.NewTestMetadata(), + OMSAgent: container.OMSAgent{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + RoleBasedAccessControl: container.RoleBasedAccessControl{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_kubernetes_cluster" "example" { + private_cluster_enabled = true + + network_profile { + network_policy = "calico" + } + + api_server_access_profile { + + authorized_ip_ranges = [ + "1.2.3.4/32" + ] + + } + + addon_profile { + oms_agent { + enabled = true + } + } + + role_based_access_control { + enabled = true + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.KubernetesClusters, 1) + cluster := adapted.KubernetesClusters[0] + + assert.Equal(t, 3, cluster.EnablePrivateCluster.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, cluster.EnablePrivateCluster.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, cluster.NetworkProfile.Metadata.Range().GetStartLine()) + assert.Equal(t, 7, cluster.NetworkProfile.Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, cluster.NetworkProfile.NetworkPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, cluster.NetworkProfile.NetworkPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, cluster.APIServerAuthorizedIPRanges[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, cluster.APIServerAuthorizedIPRanges[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, cluster.AddonProfile.Metadata.Range().GetStartLine()) + assert.Equal(t, 21, cluster.AddonProfile.Metadata.Range().GetEndLine()) + + assert.Equal(t, 18, cluster.AddonProfile.OMSAgent.Metadata.Range().GetStartLine()) + assert.Equal(t, 20, cluster.AddonProfile.OMSAgent.Metadata.Range().GetEndLine()) + + assert.Equal(t, 19, cluster.AddonProfile.OMSAgent.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 19, cluster.AddonProfile.OMSAgent.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, cluster.RoleBasedAccessControl.Metadata.Range().GetStartLine()) + assert.Equal(t, 25, cluster.RoleBasedAccessControl.Metadata.Range().GetEndLine()) + + assert.Equal(t, 24, cluster.RoleBasedAccessControl.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 24, cluster.RoleBasedAccessControl.Enabled.GetMetadata().Range().GetEndLine()) +} + +func TestWithLocals(t *testing.T) { + src := ` + variable "ip_whitelist" { + description = "IP Ranges with allowed access." + type = list(string) + default = ["1.2.3.4"] +} + +locals { + ip_whitelist = concat(var.ip_whitelist, split(",", data.azurerm_public_ip.build_agents.ip_address)) +} + +resource "azurerm_kubernetes_cluster" "aks" { + # not working + api_server_access_profile { + authorized_ip_ranges = local.ip_whitelist + } + # working + api_server_access_profile { + authorized_ip_ranges = concat(var.ip_whitelist, split(",", data.azurerm_public_ip.example.ip_address)) + } +}` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.KubernetesClusters, 1) + cluster := adapted.KubernetesClusters[0] + require.Len(t, cluster.APIServerAuthorizedIPRanges, 1) + assert.False(t, cluster.APIServerAuthorizedIPRanges[0].GetMetadata().IsResolvable()) +} diff --git a/internal/adapters/terraform/azure/database/adapt.go b/internal/adapters/terraform/azure/database/adapt.go new file mode 100644 index 000000000000..4ec4027ab718 --- /dev/null +++ b/internal/adapters/terraform/azure/database/adapt.go @@ -0,0 +1,439 @@ +package database + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) database.Database { + + mssqlAdapter := mssqlAdapter{ + alertPolicyIDs: modules.GetChildResourceIDMapByType("azurerm_mssql_server_security_alert_policy"), + auditingPolicyIDs: modules.GetChildResourceIDMapByType("azurerm_mssql_server_extended_auditing_policy", "azurerm_mssql_database_extended_auditing_policy"), + firewallIDs: modules.GetChildResourceIDMapByType("azurerm_sql_firewall_rule", "azurerm_mssql_firewall_rule"), + } + + mysqlAdapter := mysqlAdapter{ + firewallIDs: modules.GetChildResourceIDMapByType("azurerm_mysql_firewall_rule"), + } + + mariaDBAdapter := mariaDBAdapter{ + firewallIDs: modules.GetChildResourceIDMapByType("azurerm_mariadb_firewall_rule"), + } + + postgresqlAdapter := postgresqlAdapter{ + firewallIDs: modules.GetChildResourceIDMapByType("azurerm_postgresql_firewall_rule"), + } + + return database.Database{ + MSSQLServers: mssqlAdapter.adaptMSSQLServers(modules), + MariaDBServers: mariaDBAdapter.adaptMariaDBServers(modules), + MySQLServers: mysqlAdapter.adaptMySQLServers(modules), + PostgreSQLServers: postgresqlAdapter.adaptPostgreSQLServers(modules), + } +} + +type mssqlAdapter struct { + alertPolicyIDs terraform.ResourceIDResolutions + auditingPolicyIDs terraform.ResourceIDResolutions + firewallIDs terraform.ResourceIDResolutions +} + +type mysqlAdapter struct { + firewallIDs terraform.ResourceIDResolutions +} + +type mariaDBAdapter struct { + firewallIDs terraform.ResourceIDResolutions +} + +type postgresqlAdapter struct { + firewallIDs terraform.ResourceIDResolutions +} + +func (a *mssqlAdapter) adaptMSSQLServers(modules terraform.Modules) []database.MSSQLServer { + var mssqlServers []database.MSSQLServer + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_sql_server") { + mssqlServers = append(mssqlServers, a.adaptMSSQLServer(resource, module)) + } + for _, resource := range module.GetResourcesByType("azurerm_mssql_server") { + mssqlServers = append(mssqlServers, a.adaptMSSQLServer(resource, module)) + } + } + + orphanResources := modules.GetResourceByIDs(a.alertPolicyIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.MSSQLServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableSSLEnforcement: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnablePublicNetworkAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + FirewallRules: nil, + }, + ExtendedAuditingPolicies: nil, + SecurityAlertPolicies: nil, + } + for _, policy := range orphanResources { + orphanage.SecurityAlertPolicies = append(orphanage.SecurityAlertPolicies, adaptMSSQLSecurityAlertPolicy(policy)) + } + mssqlServers = append(mssqlServers, orphanage) + + } + + orphanResources = modules.GetResourceByIDs(a.auditingPolicyIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.MSSQLServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableSSLEnforcement: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnablePublicNetworkAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + FirewallRules: nil, + }, + } + for _, policy := range orphanResources { + orphanage.ExtendedAuditingPolicies = append(orphanage.ExtendedAuditingPolicies, adaptMSSQLExtendedAuditingPolicy(policy)) + } + mssqlServers = append(mssqlServers, orphanage) + + } + + orphanResources = modules.GetResourceByIDs(a.firewallIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.MSSQLServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + } + for _, policy := range orphanResources { + orphanage.FirewallRules = append(orphanage.FirewallRules, adaptFirewallRule(policy)) + } + mssqlServers = append(mssqlServers, orphanage) + + } + + return mssqlServers +} +func (a *mysqlAdapter) adaptMySQLServers(modules terraform.Modules) []database.MySQLServer { + var mySQLServers []database.MySQLServer + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_mysql_server") { + mySQLServers = append(mySQLServers, a.adaptMySQLServer(resource, module)) + } + } + + orphanResources := modules.GetResourceByIDs(a.firewallIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.MySQLServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableSSLEnforcement: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnablePublicNetworkAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + FirewallRules: nil, + }, + } + for _, policy := range orphanResources { + orphanage.FirewallRules = append(orphanage.FirewallRules, adaptFirewallRule(policy)) + } + mySQLServers = append(mySQLServers, orphanage) + + } + + return mySQLServers +} + +func (a *mariaDBAdapter) adaptMariaDBServers(modules terraform.Modules) []database.MariaDBServer { + var mariaDBServers []database.MariaDBServer + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_mariadb_server") { + mariaDBServers = append(mariaDBServers, a.adaptMariaDBServer(resource, module)) + } + } + + orphanResources := modules.GetResourceByIDs(a.firewallIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.MariaDBServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableSSLEnforcement: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnablePublicNetworkAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + FirewallRules: nil, + }, + } + for _, policy := range orphanResources { + orphanage.FirewallRules = append(orphanage.FirewallRules, adaptFirewallRule(policy)) + } + mariaDBServers = append(mariaDBServers, orphanage) + + } + + return mariaDBServers +} + +func (a *postgresqlAdapter) adaptPostgreSQLServers(modules terraform.Modules) []database.PostgreSQLServer { + var postgreSQLServers []database.PostgreSQLServer + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_postgresql_server") { + postgreSQLServers = append(postgreSQLServers, a.adaptPostgreSQLServer(resource, module)) + } + } + + orphanResources := modules.GetResourceByIDs(a.firewallIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.PostgreSQLServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableSSLEnforcement: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnablePublicNetworkAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + FirewallRules: nil, + }, + Config: database.PostgresSQLConfig{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + LogCheckpoints: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + ConnectionThrottling: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + LogConnections: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + } + for _, policy := range orphanResources { + orphanage.FirewallRules = append(orphanage.FirewallRules, adaptFirewallRule(policy)) + } + postgreSQLServers = append(postgreSQLServers, orphanage) + + } + + return postgreSQLServers +} + +func (a *mssqlAdapter) adaptMSSQLServer(resource *terraform.Block, module *terraform.Module) database.MSSQLServer { + minTLSVersionVal := defsecTypes.StringDefault("", resource.GetMetadata()) + publicAccessVal := defsecTypes.BoolDefault(true, resource.GetMetadata()) + enableSSLEnforcementVal := defsecTypes.BoolDefault(false, resource.GetMetadata()) + + var auditingPolicies []database.ExtendedAuditingPolicy + var alertPolicies []database.SecurityAlertPolicy + var firewallRules []database.FirewallRule + + if resource.TypeLabel() == "azurerm_mssql_server" { + minTLSVersionAttr := resource.GetAttribute("minimum_tls_version") + minTLSVersionVal = minTLSVersionAttr.AsStringValueOrDefault("", resource) + + publicAccessAttr := resource.GetAttribute("public_network_access_enabled") + publicAccessVal = publicAccessAttr.AsBoolValueOrDefault(true, resource) + + } + + alertPolicyBlocks := module.GetReferencingResources(resource, "azurerm_mssql_server_security_alert_policy", "server_name") + for _, alertBlock := range alertPolicyBlocks { + a.alertPolicyIDs.Resolve(alertBlock.ID()) + alertPolicies = append(alertPolicies, adaptMSSQLSecurityAlertPolicy(alertBlock)) + } + + auditingPoliciesBlocks := module.GetReferencingResources(resource, "azurerm_mssql_server_extended_auditing_policy", "server_id") + if resource.HasChild("extended_auditing_policy") { + auditingPoliciesBlocks = append(auditingPoliciesBlocks, resource.GetBlocks("extended_auditing_policy")...) + } + + databasesRes := module.GetReferencingResources(resource, "azurerm_mssql_database", "server_id") + for _, databaseRes := range databasesRes { + dbAuditingBlocks := module.GetReferencingResources(databaseRes, "azurerm_mssql_database_extended_auditing_policy", "database_id") + auditingPoliciesBlocks = append(auditingPoliciesBlocks, dbAuditingBlocks...) + } + + for _, auditBlock := range auditingPoliciesBlocks { + a.auditingPolicyIDs.Resolve(auditBlock.ID()) + auditingPolicies = append(auditingPolicies, adaptMSSQLExtendedAuditingPolicy(auditBlock)) + } + + firewallRuleBlocks := module.GetReferencingResources(resource, "azurerm_sql_firewall_rule", "server_name") + firewallRuleBlocks = append(firewallRuleBlocks, module.GetReferencingResources(resource, "azurerm_mssql_firewall_rule", "server_id")...) + for _, firewallBlock := range firewallRuleBlocks { + a.firewallIDs.Resolve(firewallBlock.ID()) + firewallRules = append(firewallRules, adaptFirewallRule(firewallBlock)) + } + + return database.MSSQLServer{ + Metadata: resource.GetMetadata(), + Server: database.Server{ + Metadata: resource.GetMetadata(), + EnableSSLEnforcement: enableSSLEnforcementVal, + MinimumTLSVersion: minTLSVersionVal, + EnablePublicNetworkAccess: publicAccessVal, + FirewallRules: firewallRules, + }, + ExtendedAuditingPolicies: auditingPolicies, + SecurityAlertPolicies: alertPolicies, + } +} + +func (a *mysqlAdapter) adaptMySQLServer(resource *terraform.Block, module *terraform.Module) database.MySQLServer { + var firewallRules []database.FirewallRule + + enableSSLEnforcementAttr := resource.GetAttribute("ssl_enforcement_enabled") + enableSSLEnforcementVal := enableSSLEnforcementAttr.AsBoolValueOrDefault(false, resource) + + minTLSVersionAttr := resource.GetAttribute("ssl_minimal_tls_version_enforced") + minTLSVersionVal := minTLSVersionAttr.AsStringValueOrDefault("TLSEnforcementDisabled", resource) + + publicAccessAttr := resource.GetAttribute("public_network_access_enabled") + publicAccessVal := publicAccessAttr.AsBoolValueOrDefault(true, resource) + + firewallRuleBlocks := module.GetReferencingResources(resource, "azurerm_mysql_firewall_rule", "server_name") + for _, firewallBlock := range firewallRuleBlocks { + a.firewallIDs.Resolve(firewallBlock.ID()) + firewallRules = append(firewallRules, adaptFirewallRule(firewallBlock)) + } + + return database.MySQLServer{ + Metadata: resource.GetMetadata(), + Server: database.Server{ + Metadata: resource.GetMetadata(), + EnableSSLEnforcement: enableSSLEnforcementVal, + MinimumTLSVersion: minTLSVersionVal, + EnablePublicNetworkAccess: publicAccessVal, + FirewallRules: firewallRules, + }, + } +} + +func (a *mariaDBAdapter) adaptMariaDBServer(resource *terraform.Block, module *terraform.Module) database.MariaDBServer { + var firewallRules []database.FirewallRule + + enableSSLEnforcementAttr := resource.GetAttribute("ssl_enforcement_enabled") + enableSSLEnforcementVal := enableSSLEnforcementAttr.AsBoolValueOrDefault(false, resource) + + publicAccessAttr := resource.GetAttribute("public_network_access_enabled") + publicAccessVal := publicAccessAttr.AsBoolValueOrDefault(true, resource) + + firewallRuleBlocks := module.GetReferencingResources(resource, "azurerm_mariadb_firewall_rule", "server_name") + for _, firewallBlock := range firewallRuleBlocks { + a.firewallIDs.Resolve(firewallBlock.ID()) + firewallRules = append(firewallRules, adaptFirewallRule(firewallBlock)) + } + + return database.MariaDBServer{ + Metadata: resource.GetMetadata(), + Server: database.Server{ + Metadata: resource.GetMetadata(), + EnableSSLEnforcement: enableSSLEnforcementVal, + MinimumTLSVersion: defsecTypes.StringDefault("", resource.GetMetadata()), + EnablePublicNetworkAccess: publicAccessVal, + FirewallRules: firewallRules, + }, + } +} + +func (a *postgresqlAdapter) adaptPostgreSQLServer(resource *terraform.Block, module *terraform.Module) database.PostgreSQLServer { + var firewallRules []database.FirewallRule + + enableSSLEnforcementAttr := resource.GetAttribute("ssl_enforcement_enabled") + enableSSLEnforcementVal := enableSSLEnforcementAttr.AsBoolValueOrDefault(false, resource) + + minTLSVersionAttr := resource.GetAttribute("ssl_minimal_tls_version_enforced") + minTLSVersionVal := minTLSVersionAttr.AsStringValueOrDefault("TLSEnforcementDisabled", resource) + + publicAccessAttr := resource.GetAttribute("public_network_access_enabled") + publicAccessVal := publicAccessAttr.AsBoolValueOrDefault(true, resource) + + firewallRuleBlocks := module.GetReferencingResources(resource, "azurerm_postgresql_firewall_rule", "server_name") + for _, firewallBlock := range firewallRuleBlocks { + a.firewallIDs.Resolve(firewallBlock.ID()) + firewallRules = append(firewallRules, adaptFirewallRule(firewallBlock)) + } + + configBlocks := module.GetReferencingResources(resource, "azurerm_postgresql_configuration", "server_name") + config := adaptPostgreSQLConfig(resource, configBlocks) + + return database.PostgreSQLServer{ + Metadata: resource.GetMetadata(), + Server: database.Server{ + Metadata: resource.GetMetadata(), + EnableSSLEnforcement: enableSSLEnforcementVal, + MinimumTLSVersion: minTLSVersionVal, + EnablePublicNetworkAccess: publicAccessVal, + FirewallRules: firewallRules, + }, + Config: config, + } +} + +func adaptPostgreSQLConfig(resource *terraform.Block, configBlocks []*terraform.Block) database.PostgresSQLConfig { + config := database.PostgresSQLConfig{ + Metadata: resource.GetMetadata(), + LogCheckpoints: defsecTypes.BoolDefault(false, resource.GetMetadata()), + ConnectionThrottling: defsecTypes.BoolDefault(false, resource.GetMetadata()), + LogConnections: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + for _, configBlock := range configBlocks { + + nameAttr := configBlock.GetAttribute("name") + valAttr := configBlock.GetAttribute("value") + + if nameAttr.Equals("log_checkpoints") { + config.LogCheckpoints = defsecTypes.Bool(valAttr.Equals("on"), valAttr.GetMetadata()) + } + if nameAttr.Equals("connection_throttling") { + config.ConnectionThrottling = defsecTypes.Bool(valAttr.Equals("on"), valAttr.GetMetadata()) + } + if nameAttr.Equals("log_connections") { + config.LogConnections = defsecTypes.Bool(valAttr.Equals("on"), valAttr.GetMetadata()) + } + } + + return config +} + +func adaptMSSQLSecurityAlertPolicy(resource *terraform.Block) database.SecurityAlertPolicy { + + emailAddressesAttr := resource.GetAttribute("email_addresses") + disabledAlertsAttr := resource.GetAttribute("disabled_alerts") + + emailAccountAdminsAttr := resource.GetAttribute("email_account_admins") + emailAccountAdminsVal := emailAccountAdminsAttr.AsBoolValueOrDefault(false, resource) + + return database.SecurityAlertPolicy{ + Metadata: resource.GetMetadata(), + EmailAddresses: emailAddressesAttr.AsStringValues(), + DisabledAlerts: disabledAlertsAttr.AsStringValues(), + EmailAccountAdmins: emailAccountAdminsVal, + } +} + +func adaptFirewallRule(resource *terraform.Block) database.FirewallRule { + startIPAttr := resource.GetAttribute("start_ip_address") + startIPVal := startIPAttr.AsStringValueOrDefault("", resource) + + endIPAttr := resource.GetAttribute("end_ip_address") + endIPVal := endIPAttr.AsStringValueOrDefault("", resource) + + return database.FirewallRule{ + Metadata: resource.GetMetadata(), + StartIP: startIPVal, + EndIP: endIPVal, + } +} + +func adaptMSSQLExtendedAuditingPolicy(resource *terraform.Block) database.ExtendedAuditingPolicy { + retentionInDaysAttr := resource.GetAttribute("retention_in_days") + retentionInDaysVal := retentionInDaysAttr.AsIntValueOrDefault(0, resource) + + return database.ExtendedAuditingPolicy{ + Metadata: resource.GetMetadata(), + RetentionInDays: retentionInDaysVal, + } +} diff --git a/internal/adapters/terraform/azure/database/adapt_test.go b/internal/adapters/terraform/azure/database/adapt_test.go new file mode 100644 index 000000000000..eb449b08b35b --- /dev/null +++ b/internal/adapters/terraform/azure/database/adapt_test.go @@ -0,0 +1,454 @@ +package database + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected database.Database + }{ + { + name: "postgresql", + terraform: ` + resource "azurerm_postgresql_server" "example" { + name = "example" + + public_network_access_enabled = true + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "log_connections" + resource_group_name = azurerm_resource_group.example.name + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "log_checkpoints" + resource_group_name = azurerm_resource_group.example.name + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "connection_throttling" + resource_group_name = azurerm_resource_group.example.name + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_firewall_rule" "example" { + name = "office" + resource_group_name = azurerm_resource_group.example.name + server_name = azurerm_postgresql_server.example.name + start_ip_address = "40.112.8.12" + end_ip_address = "40.112.8.12" + } +`, + expected: database.Database{ + PostgreSQLServers: []database.PostgreSQLServer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewTestMetadata(), + EnableSSLEnforcement: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + MinimumTLSVersion: defsecTypes.String("TLS1_2", defsecTypes.NewTestMetadata()), + EnablePublicNetworkAccess: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + FirewallRules: []database.FirewallRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + StartIP: defsecTypes.String("40.112.8.12", defsecTypes.NewTestMetadata()), + EndIP: defsecTypes.String("40.112.8.12", defsecTypes.NewTestMetadata()), + }, + }, + }, + Config: database.PostgresSQLConfig{ + Metadata: defsecTypes.NewTestMetadata(), + LogConnections: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LogCheckpoints: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + ConnectionThrottling: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "mariadb", + terraform: ` + resource "azurerm_mariadb_server" "example" { + name = "example-mariadb-server" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + + public_network_access_enabled = false + ssl_enforcement_enabled = true + } + + resource "azurerm_mariadb_firewall_rule" "example" { + name = "test-rule" + server_name = azurerm_mariadb_server.example.name + start_ip_address = "40.112.0.0" + end_ip_address = "40.112.255.255" + } +`, + expected: database.Database{ + MariaDBServers: []database.MariaDBServer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewTestMetadata(), + EnableSSLEnforcement: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + MinimumTLSVersion: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EnablePublicNetworkAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + FirewallRules: []database.FirewallRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + StartIP: defsecTypes.String("40.112.0.0", defsecTypes.NewTestMetadata()), + EndIP: defsecTypes.String("40.112.255.255", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + }, + { + name: "mysql", + terraform: ` + resource "azurerm_mysql_server" "example" { + public_network_access_enabled = true + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" + } + + resource "azurerm_mysql_firewall_rule" "example" { + server_name = azurerm_mysql_server.example.name + start_ip_address = "40.112.8.12" + end_ip_address = "40.112.8.12" + } + `, + expected: database.Database{ + MySQLServers: []database.MySQLServer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewTestMetadata(), + EnableSSLEnforcement: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + MinimumTLSVersion: defsecTypes.String("TLS1_2", defsecTypes.NewTestMetadata()), + EnablePublicNetworkAccess: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + FirewallRules: []database.FirewallRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + StartIP: defsecTypes.String("40.112.8.12", defsecTypes.NewTestMetadata()), + EndIP: defsecTypes.String("40.112.8.12", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + }, + { + name: "ms sql", + terraform: ` + resource "azurerm_mssql_server" "example" { + name = "mssqlserver" + minimum_tls_version = "1.2" + public_network_access_enabled = false + } + + resource "azurerm_mssql_firewall_rule" "example" { + name = "FirewallRule1" + server_id = azurerm_mssql_server.example.id + start_ip_address = "10.0.17.62" + end_ip_address = "10.0.17.62" + } + + resource "azurerm_mssql_server_security_alert_policy" "example" { + resource_group_name = azurerm_resource_group.example.name + server_name = azurerm_mssql_server.example.name + disabled_alerts = [ + "Sql_Injection", + "Data_Exfiltration" + ] + email_account_admins = true + email_addresses = [ + "example@example.com" + ] + } + + resource "azurerm_mssql_server_extended_auditing_policy" "example" { + server_id = azurerm_mssql_server.example.id + retention_in_days = 6 + } + `, + expected: database.Database{ + MSSQLServers: []database.MSSQLServer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewTestMetadata(), + MinimumTLSVersion: defsecTypes.String("1.2", defsecTypes.NewTestMetadata()), + EnablePublicNetworkAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + EnableSSLEnforcement: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + FirewallRules: []database.FirewallRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + StartIP: defsecTypes.String("10.0.17.62", defsecTypes.NewTestMetadata()), + EndIP: defsecTypes.String("10.0.17.62", defsecTypes.NewTestMetadata()), + }, + }, + }, + ExtendedAuditingPolicies: []database.ExtendedAuditingPolicy{ + { + Metadata: defsecTypes.NewTestMetadata(), + RetentionInDays: defsecTypes.Int(6, defsecTypes.NewTestMetadata()), + }, + }, + SecurityAlertPolicies: []database.SecurityAlertPolicy{ + { + Metadata: defsecTypes.NewTestMetadata(), + EmailAddresses: []defsecTypes.StringValue{ + defsecTypes.String("example@example.com", defsecTypes.NewTestMetadata()), + }, + DisabledAlerts: []defsecTypes.StringValue{ + defsecTypes.String("Sql_Injection", defsecTypes.NewTestMetadata()), + defsecTypes.String("Data_Exfiltration", defsecTypes.NewTestMetadata()), + }, + EmailAccountAdmins: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_postgresql_server" "example" { + public_network_access_enabled = true + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "log_connections" + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "log_checkpoints" + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "connection_throttling" + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_firewall_rule" "example" { + name = "office" + server_name = azurerm_postgresql_server.example.name + start_ip_address = "40.112.8.12" + end_ip_address = "40.112.8.12" + } + + resource "azurerm_mariadb_server" "example" { + public_network_access_enabled = false + ssl_enforcement_enabled = true + } + + resource "azurerm_mariadb_firewall_rule" "example" { + name = "test-rule" + server_name = azurerm_mariadb_server.example.name + start_ip_address = "40.112.0.0" + end_ip_address = "40.112.255.255" + } + + resource "azurerm_mysql_server" "example" { + public_network_access_enabled = true + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" + } + + resource "azurerm_mysql_firewall_rule" "example" { + server_name = azurerm_mysql_server.example.name + start_ip_address = "40.112.8.12" + end_ip_address = "40.112.8.12" + } + + resource "azurerm_mssql_server" "example" { + name = "mssqlserver" + public_network_access_enabled = false + minimum_tls_version = "1.2" + } + + resource "azurerm_mssql_firewall_rule" "example" { + name = "FirewallRule1" + server_id = azurerm_mssql_server.example.id + start_ip_address = "10.0.17.62" + end_ip_address = "10.0.17.62" + } + + resource "azurerm_mssql_server_security_alert_policy" "example" { + server_name = azurerm_mssql_server.example.name + disabled_alerts = [ + "Sql_Injection", + "Data_Exfiltration" + ] + email_account_admins = true + email_addresses = [ + "example@example.com" + ] + } + + resource "azurerm_mssql_server_extended_auditing_policy" "example" { + server_id = azurerm_mssql_server.example.id + retention_in_days = 6 + } + ` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.PostgreSQLServers, 1) + require.Len(t, adapted.MariaDBServers, 1) + require.Len(t, adapted.MySQLServers, 1) + require.Len(t, adapted.MSSQLServers, 1) + + postgres := adapted.PostgreSQLServers[0] + mariadb := adapted.MariaDBServers[0] + mysql := adapted.MySQLServers[0] + mssql := adapted.MSSQLServers[0] + + assert.Equal(t, 2, postgres.Metadata.Range().GetStartLine()) + assert.Equal(t, 6, postgres.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, postgres.EnablePublicNetworkAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, postgres.EnablePublicNetworkAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, postgres.EnableSSLEnforcement.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, postgres.EnableSSLEnforcement.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, postgres.MinimumTLSVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, postgres.MinimumTLSVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, postgres.Config.LogConnections.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, postgres.Config.LogConnections.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, postgres.Config.LogCheckpoints.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, postgres.Config.LogCheckpoints.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, postgres.Config.ConnectionThrottling.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, postgres.Config.ConnectionThrottling.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, postgres.FirewallRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 31, postgres.FirewallRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 29, postgres.FirewallRules[0].StartIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 29, postgres.FirewallRules[0].StartIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 30, postgres.FirewallRules[0].EndIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 30, postgres.FirewallRules[0].EndIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 33, mariadb.Metadata.Range().GetStartLine()) + assert.Equal(t, 36, mariadb.Metadata.Range().GetEndLine()) + + assert.Equal(t, 34, mariadb.EnablePublicNetworkAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, mariadb.EnablePublicNetworkAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 35, mariadb.EnableSSLEnforcement.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 35, mariadb.EnableSSLEnforcement.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 38, mariadb.FirewallRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 43, mariadb.FirewallRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 41, mariadb.FirewallRules[0].StartIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 41, mariadb.FirewallRules[0].StartIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 42, mariadb.FirewallRules[0].EndIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 42, mariadb.FirewallRules[0].EndIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 45, mysql.Metadata.Range().GetStartLine()) + assert.Equal(t, 49, mysql.Metadata.Range().GetEndLine()) + + assert.Equal(t, 46, mysql.EnablePublicNetworkAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 46, mysql.EnablePublicNetworkAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 47, mysql.EnableSSLEnforcement.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 47, mysql.EnableSSLEnforcement.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 48, mysql.MinimumTLSVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 48, mysql.MinimumTLSVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 51, mysql.FirewallRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 55, mysql.FirewallRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 53, mysql.FirewallRules[0].StartIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 53, mysql.FirewallRules[0].StartIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 54, mysql.FirewallRules[0].EndIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 54, mysql.FirewallRules[0].EndIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 57, mssql.Metadata.Range().GetStartLine()) + assert.Equal(t, 61, mssql.Metadata.Range().GetEndLine()) + + assert.Equal(t, 59, mssql.EnablePublicNetworkAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 59, mssql.EnablePublicNetworkAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 60, mssql.MinimumTLSVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 60, mssql.MinimumTLSVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 63, mssql.FirewallRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 68, mssql.FirewallRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 66, mssql.FirewallRules[0].StartIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 66, mssql.FirewallRules[0].StartIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 67, mssql.FirewallRules[0].EndIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 67, mssql.FirewallRules[0].EndIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 70, mssql.SecurityAlertPolicies[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 80, mssql.SecurityAlertPolicies[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 72, mssql.SecurityAlertPolicies[0].DisabledAlerts[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 75, mssql.SecurityAlertPolicies[0].DisabledAlerts[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 76, mssql.SecurityAlertPolicies[0].EmailAccountAdmins.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 76, mssql.SecurityAlertPolicies[0].EmailAccountAdmins.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 77, mssql.SecurityAlertPolicies[0].EmailAddresses[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 79, mssql.SecurityAlertPolicies[0].EmailAddresses[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 82, mssql.ExtendedAuditingPolicies[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 85, mssql.ExtendedAuditingPolicies[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 84, mssql.ExtendedAuditingPolicies[0].RetentionInDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 84, mssql.ExtendedAuditingPolicies[0].RetentionInDays.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/datafactory/adapt.go b/internal/adapters/terraform/azure/datafactory/adapt.go new file mode 100644 index 000000000000..7fd12d1e4218 --- /dev/null +++ b/internal/adapters/terraform/azure/datafactory/adapt.go @@ -0,0 +1,33 @@ +package datafactory + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/datafactory" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) datafactory.DataFactory { + return datafactory.DataFactory{ + DataFactories: adaptFactories(modules), + } +} + +func adaptFactories(modules terraform.Modules) []datafactory.Factory { + var factories []datafactory.Factory + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_data_factory") { + factories = append(factories, adaptFactory(resource)) + } + } + return factories +} + +func adaptFactory(resource *terraform.Block) datafactory.Factory { + enablePublicNetworkAttr := resource.GetAttribute("public_network_enabled") + enablePublicNetworkVal := enablePublicNetworkAttr.AsBoolValueOrDefault(true, resource) + + return datafactory.Factory{ + Metadata: resource.GetMetadata(), + EnablePublicNetwork: enablePublicNetworkVal, + } +} diff --git a/internal/adapters/terraform/azure/datafactory/adapt_test.go b/internal/adapters/terraform/azure/datafactory/adapt_test.go new file mode 100644 index 000000000000..29bc62b09a0b --- /dev/null +++ b/internal/adapters/terraform/azure/datafactory/adapt_test.go @@ -0,0 +1,79 @@ +package datafactory + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/datafactory" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptFactory(t *testing.T) { + tests := []struct { + name string + terraform string + expected datafactory.Factory + }{ + { + name: "defined", + terraform: ` + resource "azurerm_data_factory" "example" { + name = "example" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + public_network_enabled = false + } +`, + expected: datafactory.Factory{ + Metadata: defsecTypes.NewTestMetadata(), + EnablePublicNetwork: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "default", + terraform: ` + resource "azurerm_data_factory" "example" { + name = "example" + } +`, + expected: datafactory.Factory{ + Metadata: defsecTypes.NewTestMetadata(), + EnablePublicNetwork: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptFactory(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_data_factory" "example" { + name = "example" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + public_network_enabled = false + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.DataFactories, 1) + dataFactory := adapted.DataFactories[0] + + assert.Equal(t, 6, dataFactory.EnablePublicNetwork.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, dataFactory.EnablePublicNetwork.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/azure/datalake/adapt.go b/internal/adapters/terraform/azure/datalake/adapt.go new file mode 100644 index 000000000000..b55bf7a2e581 --- /dev/null +++ b/internal/adapters/terraform/azure/datalake/adapt.go @@ -0,0 +1,38 @@ +package datalake + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/datalake" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) datalake.DataLake { + return datalake.DataLake{ + Stores: adaptStores(modules), + } +} + +func adaptStores(modules terraform.Modules) []datalake.Store { + var stores []datalake.Store + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_data_lake_store") { + stores = append(stores, adaptStore(resource)) + } + } + return stores +} + +func adaptStore(resource *terraform.Block) datalake.Store { + store := datalake.Store{ + Metadata: resource.GetMetadata(), + EnableEncryption: types.BoolDefault(true, resource.GetMetadata()), + } + encryptionStateAttr := resource.GetAttribute("encryption_state") + if encryptionStateAttr.Equals("Disabled") { + store.EnableEncryption = types.Bool(false, encryptionStateAttr.GetMetadata()) + } else if encryptionStateAttr.Equals("Enabled") { + store.EnableEncryption = types.Bool(true, encryptionStateAttr.GetMetadata()) + } + return store +} diff --git a/internal/adapters/terraform/azure/datalake/adapt_test.go b/internal/adapters/terraform/azure/datalake/adapt_test.go new file mode 100644 index 000000000000..f0c13133ceeb --- /dev/null +++ b/internal/adapters/terraform/azure/datalake/adapt_test.go @@ -0,0 +1,83 @@ +package datalake + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/datalake" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptStore(t *testing.T) { + tests := []struct { + name string + terraform string + expected datalake.Store + }{ + { + name: "enabled", + terraform: ` + resource "azurerm_data_lake_store" "good_example" { + encryption_state = "Enabled" + } +`, + expected: datalake.Store{ + Metadata: defsecTypes.NewTestMetadata(), + EnableEncryption: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "disabled", + terraform: ` + resource "azurerm_data_lake_store" "good_example" { + encryption_state = "Disabled" + } +`, + expected: datalake.Store{ + Metadata: defsecTypes.NewTestMetadata(), + EnableEncryption: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "enabled by default", + terraform: ` + resource "azurerm_data_lake_store" "good_example" { + } +`, + expected: datalake.Store{ + Metadata: defsecTypes.NewTestMetadata(), + EnableEncryption: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptStore(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_data_lake_store" "good_example" { + encryption_state = "Disabled" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Stores, 1) + store := adapted.Stores[0] + + assert.Equal(t, 3, store.EnableEncryption.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, store.EnableEncryption.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/keyvault/adapt.go b/internal/adapters/terraform/azure/keyvault/adapt.go new file mode 100644 index 000000000000..c78d39115bff --- /dev/null +++ b/internal/adapters/terraform/azure/keyvault/adapt.go @@ -0,0 +1,159 @@ +package keyvault + +import ( + "time" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/azure/keyvault" +) + +func Adapt(modules terraform.Modules) keyvault.KeyVault { + adapter := adapter{ + vaultSecretIDs: modules.GetChildResourceIDMapByType("azurerm_key_vault_secret"), + vaultKeyIDs: modules.GetChildResourceIDMapByType("azurerm_key_vault_key"), + } + + return keyvault.KeyVault{ + Vaults: adapter.adaptVaults(modules), + } +} + +type adapter struct { + vaultSecretIDs terraform.ResourceIDResolutions + vaultKeyIDs terraform.ResourceIDResolutions +} + +func (a *adapter) adaptVaults(modules terraform.Modules) []keyvault.Vault { + + var vaults []keyvault.Vault + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_key_vault") { + vaults = append(vaults, a.adaptVault(resource, module)) + + } + } + + orphanResources := modules.GetResourceByIDs(a.vaultSecretIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := keyvault.Vault{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Secrets: nil, + Keys: nil, + EnablePurgeProtection: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + SoftDeleteRetentionDays: defsecTypes.IntDefault(0, defsecTypes.NewUnmanagedMetadata()), + NetworkACLs: keyvault.NetworkACLs{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + DefaultAction: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + } + for _, secretResource := range orphanResources { + orphanage.Secrets = append(orphanage.Secrets, adaptSecret(secretResource)) + } + vaults = append(vaults, orphanage) + } + + orphanResources = modules.GetResourceByIDs(a.vaultKeyIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := keyvault.Vault{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Secrets: nil, + Keys: nil, + EnablePurgeProtection: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + SoftDeleteRetentionDays: defsecTypes.IntDefault(0, defsecTypes.NewUnmanagedMetadata()), + NetworkACLs: keyvault.NetworkACLs{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + DefaultAction: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + } + for _, secretResource := range orphanResources { + orphanage.Keys = append(orphanage.Keys, adaptKey(secretResource)) + } + vaults = append(vaults, orphanage) + } + + return vaults +} + +func (a *adapter) adaptVault(resource *terraform.Block, module *terraform.Module) keyvault.Vault { + var keys []keyvault.Key + var secrets []keyvault.Secret + + defaultActionVal := defsecTypes.StringDefault("", resource.GetMetadata()) + + secretBlocks := module.GetReferencingResources(resource, "azurerm_key_vault_secret", "key_vault_id") + for _, secretBlock := range secretBlocks { + a.vaultSecretIDs.Resolve(secretBlock.ID()) + secrets = append(secrets, adaptSecret(secretBlock)) + } + + keyBlocks := module.GetReferencingResources(resource, "azurerm_key_vault_key", "key_vault_id") + for _, keyBlock := range keyBlocks { + a.vaultKeyIDs.Resolve(keyBlock.ID()) + keys = append(keys, adaptKey(keyBlock)) + } + + purgeProtectionAttr := resource.GetAttribute("purge_protection_enabled") + purgeProtectionVal := purgeProtectionAttr.AsBoolValueOrDefault(false, resource) + + softDeleteRetentionDaysAttr := resource.GetAttribute("soft_delete_retention_days") + softDeleteRetentionDaysVal := softDeleteRetentionDaysAttr.AsIntValueOrDefault(0, resource) + + aclMetadata := defsecTypes.NewUnmanagedMetadata() + if aclBlock := resource.GetBlock("network_acls"); aclBlock.IsNotNil() { + aclMetadata = aclBlock.GetMetadata() + defaultActionAttr := aclBlock.GetAttribute("default_action") + defaultActionVal = defaultActionAttr.AsStringValueOrDefault("", resource.GetBlock("network_acls")) + } + + return keyvault.Vault{ + Metadata: resource.GetMetadata(), + Secrets: secrets, + Keys: keys, + EnablePurgeProtection: purgeProtectionVal, + SoftDeleteRetentionDays: softDeleteRetentionDaysVal, + NetworkACLs: keyvault.NetworkACLs{ + Metadata: aclMetadata, + DefaultAction: defaultActionVal, + }, + } +} + +func adaptSecret(resource *terraform.Block) keyvault.Secret { + contentTypeAttr := resource.GetAttribute("content_type") + contentTypeVal := contentTypeAttr.AsStringValueOrDefault("", resource) + + return keyvault.Secret{ + Metadata: resource.GetMetadata(), + ContentType: contentTypeVal, + ExpiryDate: resolveExpiryDate(resource), + } +} + +func adaptKey(resource *terraform.Block) keyvault.Key { + + return keyvault.Key{ + Metadata: resource.GetMetadata(), + ExpiryDate: resolveExpiryDate(resource), + } +} + +func resolveExpiryDate(resource *terraform.Block) defsecTypes.TimeValue { + expiryDateAttr := resource.GetAttribute("expiration_date") + expiryDateVal := defsecTypes.TimeDefault(time.Time{}, resource.GetMetadata()) + + if expiryDateAttr.IsString() { + expiryDateString := expiryDateAttr.Value().AsString() + if expiryDate, err := time.Parse(time.RFC3339, expiryDateString); err == nil { + expiryDateVal = defsecTypes.Time(expiryDate, expiryDateAttr.GetMetadata()) + } + } else if expiryDateAttr.IsNotNil() { + expiryDateVal = defsecTypes.TimeUnresolvable(expiryDateAttr.GetMetadata()) + } + + return expiryDateVal +} diff --git a/internal/adapters/terraform/azure/keyvault/adapt_test.go b/internal/adapters/terraform/azure/keyvault/adapt_test.go new file mode 100644 index 000000000000..6665f4ce4a2f --- /dev/null +++ b/internal/adapters/terraform/azure/keyvault/adapt_test.go @@ -0,0 +1,271 @@ +package keyvault + +import ( + "testing" + "time" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/keyvault" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected keyvault.KeyVault + }{ + { + name: "defined", + terraform: ` + resource "azurerm_key_vault" "example" { + name = "examplekeyvault" + enabled_for_disk_encryption = true + soft_delete_retention_days = 7 + purge_protection_enabled = true + + network_acls { + bypass = "AzureServices" + default_action = "Deny" + } + } +`, + expected: keyvault.KeyVault{ + Vaults: []keyvault.Vault{ + { + Metadata: defsecTypes.NewTestMetadata(), + EnablePurgeProtection: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + SoftDeleteRetentionDays: defsecTypes.Int(7, defsecTypes.NewTestMetadata()), + NetworkACLs: keyvault.NetworkACLs{ + Metadata: defsecTypes.NewTestMetadata(), + DefaultAction: defsecTypes.String("Deny", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_key_vault" "example" { + } +`, + expected: keyvault.KeyVault{ + Vaults: []keyvault.Vault{ + { + Metadata: defsecTypes.NewTestMetadata(), + EnablePurgeProtection: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + SoftDeleteRetentionDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + NetworkACLs: keyvault.NetworkACLs{ + Metadata: defsecTypes.NewTestMetadata(), + DefaultAction: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptSecret(t *testing.T) { + tests := []struct { + name string + terraform string + expected keyvault.Secret + }{ + { + name: "defaults", + terraform: ` + resource "azurerm_key_vault_secret" "example" { + } +`, + expected: keyvault.Secret{ + Metadata: defsecTypes.NewTestMetadata(), + ContentType: defsecTypes.String("", defsecTypes.NewTestMetadata()), + ExpiryDate: defsecTypes.Time(time.Time{}, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defined", + terraform: ` + resource "azurerm_key_vault_secret" "example" { + content_type = "password" + expiration_date = "1982-12-31T00:00:00Z" + } +`, + expected: keyvault.Secret{ + Metadata: defsecTypes.NewTestMetadata(), + ContentType: defsecTypes.String("password", defsecTypes.NewTestMetadata()), + ExpiryDate: defsecTypes.Time(func(timeVal string) time.Time { + parsed, _ := time.Parse(time.RFC3339, timeVal) + return parsed + }("1982-12-31T00:00:00Z"), defsecTypes.NewTestMetadata())}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptSecret(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptKey(t *testing.T) { + tests := []struct { + name string + terraform string + expected keyvault.Key + }{ + { + name: "defined", + terraform: ` + resource "azurerm_key_vault_key" "example" { + name = "generated-certificate" + expiration_date = "1982-12-31T00:00:00Z" + } +`, + expected: keyvault.Key{ + Metadata: defsecTypes.NewTestMetadata(), + ExpiryDate: defsecTypes.Time(func(timeVal string) time.Time { + parsed, _ := time.Parse(time.RFC3339, timeVal) + return parsed + }("1982-12-31T00:00:00Z"), defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_key_vault_key" "example" { + } +`, + expected: keyvault.Key{ + Metadata: defsecTypes.NewTestMetadata(), + ExpiryDate: defsecTypes.Time(time.Time{}, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "expiration date refers to the resource", + terraform: ` +terraform { + required_version = ">=1.3.0" + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = ">=3.0.0" + } + time = { + source = "hashicorp/time" + version = ">=0.9.0" + } + } +} + +resource "azurerm_key_vault" "this" { + name = "keyvault" + location = "us-west" + resource_group_name = "resource-group" + tenant_id = "tenant-id" + sku_name = "Standard" +} + +resource "time_offset" "expiry" { + offset_years = 1 + base_rfc3339 = "YYYY-MM-DDTHH:MM:SSZ" +} + +resource "azurerm_key_vault_key" "this" { + name = "key" + key_vault_id = azurerm_key_vault.this.id + key_type = "RSA" + key_size = 2048 + key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] + expiration_date = time_offset.expiry.rfc3339 +} +`, + expected: keyvault.Key{ + Metadata: defsecTypes.NewTestMetadata(), + ExpiryDate: defsecTypes.TimeUnresolvable(defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptKey(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_key_vault" "example" { + name = "examplekeyvault" + enabled_for_disk_encryption = true + soft_delete_retention_days = 7 + purge_protection_enabled = true + + network_acls { + bypass = "AzureServices" + default_action = "Deny" + } + } + + resource "azurerm_key_vault_key" "example" { + key_vault_id = azurerm_key_vault.example.id + name = "generated-certificate" + expiration_date = "1982-12-31T00:00:00Z" + } + + resource "azurerm_key_vault_secret" "example" { + key_vault_id = azurerm_key_vault.example.id + content_type = "password" + expiration_date = "1982-12-31T00:00:00Z" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Vaults, 1) + require.Len(t, adapted.Vaults[0].Keys, 1) + require.Len(t, adapted.Vaults[0].Secrets, 1) + + vault := adapted.Vaults[0] + key := vault.Keys[0] + secret := vault.Secrets[0] + + assert.Equal(t, 5, vault.SoftDeleteRetentionDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, vault.SoftDeleteRetentionDays.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, vault.EnablePurgeProtection.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, vault.EnablePurgeProtection.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, vault.NetworkACLs.DefaultAction.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, vault.NetworkACLs.DefaultAction.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, key.ExpiryDate.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, key.ExpiryDate.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 22, secret.ContentType.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, secret.ContentType.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, secret.ExpiryDate.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, secret.ExpiryDate.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/monitor/adapt.go b/internal/adapters/terraform/azure/monitor/adapt.go new file mode 100644 index 000000000000..f70648d92038 --- /dev/null +++ b/internal/adapters/terraform/azure/monitor/adapt.go @@ -0,0 +1,56 @@ +package monitor + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/monitor" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) monitor.Monitor { + return monitor.Monitor{ + LogProfiles: adaptLogProfiles(modules), + } +} + +func adaptLogProfiles(modules terraform.Modules) []monitor.LogProfile { + var logProfiles []monitor.LogProfile + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_monitor_log_profile") { + logProfiles = append(logProfiles, adaptLogProfile(resource)) + } + } + return logProfiles +} + +func adaptLogProfile(resource *terraform.Block) monitor.LogProfile { + + logProfile := monitor.LogProfile{ + Metadata: resource.GetMetadata(), + RetentionPolicy: monitor.RetentionPolicy{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + Days: defsecTypes.IntDefault(0, resource.GetMetadata()), + }, + Categories: nil, + Locations: nil, + } + + if retentionPolicyBlock := resource.GetBlock("retention_policy"); retentionPolicyBlock.IsNotNil() { + logProfile.RetentionPolicy.Metadata = retentionPolicyBlock.GetMetadata() + enabledAttr := retentionPolicyBlock.GetAttribute("enabled") + logProfile.RetentionPolicy.Enabled = enabledAttr.AsBoolValueOrDefault(false, resource) + daysAttr := retentionPolicyBlock.GetAttribute("days") + logProfile.RetentionPolicy.Days = daysAttr.AsIntValueOrDefault(0, resource) + } + + if categoriesAttr := resource.GetAttribute("categories"); categoriesAttr.IsNotNil() { + logProfile.Categories = categoriesAttr.AsStringValues() + } + + if locationsAttr := resource.GetAttribute("locations"); locationsAttr.IsNotNil() { + logProfile.Locations = locationsAttr.AsStringValues() + } + + return logProfile +} diff --git a/internal/adapters/terraform/azure/monitor/adapt_test.go b/internal/adapters/terraform/azure/monitor/adapt_test.go new file mode 100644 index 000000000000..509c6eeb002b --- /dev/null +++ b/internal/adapters/terraform/azure/monitor/adapt_test.go @@ -0,0 +1,128 @@ +package monitor + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/monitor" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptLogProfile(t *testing.T) { + tests := []struct { + name string + terraform string + expected monitor.LogProfile + }{ + { + name: "defined", + terraform: ` + resource "azurerm_monitor_log_profile" "example" { + categories = [ + "Action", + "Delete", + "Write", + ] + + retention_policy { + enabled = true + days = 365 + } + + locations = [ + "eastus", + "eastus2", + "southcentralus" + ] + } +`, + expected: monitor.LogProfile{ + Metadata: defsecTypes.NewTestMetadata(), + Categories: []defsecTypes.StringValue{ + defsecTypes.String("Action", defsecTypes.NewTestMetadata()), + defsecTypes.String("Delete", defsecTypes.NewTestMetadata()), + defsecTypes.String("Write", defsecTypes.NewTestMetadata()), + }, + RetentionPolicy: monitor.RetentionPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Days: defsecTypes.Int(365, defsecTypes.NewTestMetadata()), + }, + Locations: []defsecTypes.StringValue{ + defsecTypes.String("eastus", defsecTypes.NewTestMetadata()), + defsecTypes.String("eastus2", defsecTypes.NewTestMetadata()), + defsecTypes.String("southcentralus", defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "default", + terraform: ` + resource "azurerm_monitor_log_profile" "example" { + } +`, + expected: monitor.LogProfile{ + Metadata: defsecTypes.NewTestMetadata(), + RetentionPolicy: monitor.RetentionPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Days: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptLogProfile(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_monitor_log_profile" "example" { + categories = [ + "Action", + "Delete", + "Write", + ] + + retention_policy { + enabled = true + days = 365 + } + + locations = [ + "eastus", + "eastus2", + "southcentralus" + ] + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.LogProfiles, 1) + logProfile := adapted.LogProfiles[0] + + assert.Equal(t, 3, logProfile.Categories[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, logProfile.Categories[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, logProfile.RetentionPolicy.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, logProfile.RetentionPolicy.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, logProfile.RetentionPolicy.Days.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, logProfile.RetentionPolicy.Days.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 14, logProfile.Locations[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 18, logProfile.Locations[0].GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/network/adapt.go b/internal/adapters/terraform/azure/network/adapt.go new file mode 100644 index 000000000000..899c0fe767d5 --- /dev/null +++ b/internal/adapters/terraform/azure/network/adapt.go @@ -0,0 +1,220 @@ +package network + +import ( + "strconv" + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/azure/network" + + "github.com/google/uuid" +) + +func Adapt(modules terraform.Modules) network.Network { + return network.Network{ + SecurityGroups: (&adapter{ + modules: modules, + groups: make(map[string]network.SecurityGroup), + }).adaptSecurityGroups(), + NetworkWatcherFlowLogs: adaptWatcherLogs(modules), + } +} + +type adapter struct { + modules terraform.Modules + groups map[string]network.SecurityGroup +} + +func (a *adapter) adaptSecurityGroups() []network.SecurityGroup { + + for _, module := range a.modules { + for _, resource := range module.GetResourcesByType("azurerm_network_security_group") { + a.adaptSecurityGroup(resource) + } + } + + for _, ruleBlock := range a.modules.GetResourcesByType("azurerm_network_security_rule") { + rule := a.adaptSGRule(ruleBlock) + + groupAttr := ruleBlock.GetAttribute("network_security_group_name") + if groupAttr.IsNotNil() { + if referencedBlock, err := a.modules.GetReferencedBlock(groupAttr, ruleBlock); err == nil { + if group, ok := a.groups[referencedBlock.ID()]; ok { + group.Rules = append(group.Rules, rule) + a.groups[referencedBlock.ID()] = group + continue + } + } + + } + + a.groups[uuid.NewString()] = network.SecurityGroup{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Rules: []network.SecurityGroupRule{rule}, + } + } + + var securityGroups []network.SecurityGroup + for _, group := range a.groups { + securityGroups = append(securityGroups, group) + } + + return securityGroups +} + +func adaptWatcherLogs(modules terraform.Modules) []network.NetworkWatcherFlowLog { + var watcherLogs []network.NetworkWatcherFlowLog + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_network_watcher_flow_log") { + watcherLogs = append(watcherLogs, adaptWatcherLog(resource)) + } + } + return watcherLogs +} + +func (a *adapter) adaptSecurityGroup(resource *terraform.Block) { + var rules []network.SecurityGroupRule + for _, ruleBlock := range resource.GetBlocks("security_rule") { + rules = append(rules, a.adaptSGRule(ruleBlock)) + } + a.groups[resource.ID()] = network.SecurityGroup{ + Metadata: resource.GetMetadata(), + Rules: rules, + } +} + +func (a *adapter) adaptSGRule(ruleBlock *terraform.Block) network.SecurityGroupRule { + + rule := network.SecurityGroupRule{ + Metadata: ruleBlock.GetMetadata(), + Outbound: defsecTypes.BoolDefault(false, ruleBlock.GetMetadata()), + Allow: defsecTypes.BoolDefault(true, ruleBlock.GetMetadata()), + SourceAddresses: nil, + SourcePorts: nil, + DestinationAddresses: nil, + DestinationPorts: nil, + Protocol: ruleBlock.GetAttribute("protocol").AsStringValueOrDefault("", ruleBlock), + } + + accessAttr := ruleBlock.GetAttribute("access") + if accessAttr.Equals("Allow") { + rule.Allow = defsecTypes.Bool(true, accessAttr.GetMetadata()) + } else if accessAttr.Equals("Deny") { + rule.Allow = defsecTypes.Bool(false, accessAttr.GetMetadata()) + } + + directionAttr := ruleBlock.GetAttribute("direction") + if directionAttr.Equals("Inbound") { + rule.Outbound = defsecTypes.Bool(false, directionAttr.GetMetadata()) + } else if directionAttr.Equals("Outbound") { + rule.Outbound = defsecTypes.Bool(true, directionAttr.GetMetadata()) + } + + a.adaptSource(ruleBlock, &rule) + a.adaptDestination(ruleBlock, &rule) + + return rule +} + +func (a *adapter) adaptSource(ruleBlock *terraform.Block, rule *network.SecurityGroupRule) { + if sourceAddressAttr := ruleBlock.GetAttribute("source_address_prefix"); sourceAddressAttr.IsString() { + rule.SourceAddresses = append(rule.SourceAddresses, sourceAddressAttr.AsStringValueOrDefault("", ruleBlock)) + } else if sourceAddressPrefixesAttr := ruleBlock.GetAttribute("source_address_prefixes"); sourceAddressPrefixesAttr.IsNotNil() { + rule.SourceAddresses = append(rule.SourceAddresses, sourceAddressPrefixesAttr.AsStringValues()...) + } + + if sourcePortRangesAttr := ruleBlock.GetAttribute("source_port_ranges"); sourcePortRangesAttr.IsNotNil() { + ports := sourcePortRangesAttr.AsStringValues() + for _, value := range ports { + rule.SourcePorts = append(rule.SourcePorts, expandRange(value.Value(), value.GetMetadata())) + } + } else if sourcePortRangeAttr := ruleBlock.GetAttribute("source_port_range"); sourcePortRangeAttr.IsString() { + rule.SourcePorts = append(rule.SourcePorts, expandRange(sourcePortRangeAttr.Value().AsString(), sourcePortRangeAttr.GetMetadata())) + } else if sourcePortRangeAttr := ruleBlock.GetAttribute("source_port_range"); sourcePortRangeAttr.IsNumber() { + f := sourcePortRangeAttr.AsNumber() + rule.SourcePorts = append(rule.SourcePorts, network.PortRange{ + Metadata: sourcePortRangeAttr.GetMetadata(), + Start: int(f), + End: int(f), + }) + } +} + +func (a *adapter) adaptDestination(ruleBlock *terraform.Block, rule *network.SecurityGroupRule) { + if destAddressAttr := ruleBlock.GetAttribute("destination_address_prefix"); destAddressAttr.IsString() { + rule.DestinationAddresses = append(rule.DestinationAddresses, destAddressAttr.AsStringValueOrDefault("", ruleBlock)) + } else if destAddressPrefixesAttr := ruleBlock.GetAttribute("destination_address_prefixes"); destAddressPrefixesAttr.IsNotNil() { + rule.DestinationAddresses = append(rule.DestinationAddresses, destAddressPrefixesAttr.AsStringValues()...) + } + + if destPortRangesAttr := ruleBlock.GetAttribute("destination_port_ranges"); destPortRangesAttr.IsNotNil() { + ports := destPortRangesAttr.AsStringValues() + for _, value := range ports { + rule.DestinationPorts = append(rule.DestinationPorts, expandRange(value.Value(), destPortRangesAttr.GetMetadata())) + } + } else if destPortRangeAttr := ruleBlock.GetAttribute("destination_port_range"); destPortRangeAttr.IsString() { + rule.DestinationPorts = append(rule.DestinationPorts, expandRange(destPortRangeAttr.Value().AsString(), destPortRangeAttr.GetMetadata())) + } else if destPortRangeAttr := ruleBlock.GetAttribute("destination_port_range"); destPortRangeAttr.IsNumber() { + f := destPortRangeAttr.AsNumber() + rule.DestinationPorts = append(rule.DestinationPorts, network.PortRange{ + Metadata: destPortRangeAttr.GetMetadata(), + Start: int(f), + End: int(f), + }) + } +} + +func expandRange(r string, m defsecTypes.Metadata) network.PortRange { + start := 0 + end := 65535 + switch { + case r == "*": + case strings.Contains(r, "-"): + if parts := strings.Split(r, "-"); len(parts) == 2 { + if p1, err := strconv.ParseInt(parts[0], 10, 32); err == nil { + start = int(p1) + } + if p2, err := strconv.ParseInt(parts[1], 10, 32); err == nil { + end = int(p2) + } + } + default: + if val, err := strconv.ParseInt(r, 10, 32); err == nil { + start = int(val) + end = int(val) + } + } + + return network.PortRange{ + Metadata: m, + Start: start, + End: end, + } +} + +func adaptWatcherLog(resource *terraform.Block) network.NetworkWatcherFlowLog { + flowLog := network.NetworkWatcherFlowLog{ + Metadata: resource.GetMetadata(), + RetentionPolicy: network.RetentionPolicy{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + Days: defsecTypes.IntDefault(0, resource.GetMetadata()), + }, + } + + if retentionPolicyBlock := resource.GetBlock("retention_policy"); retentionPolicyBlock.IsNotNil() { + flowLog.RetentionPolicy.Metadata = retentionPolicyBlock.GetMetadata() + + enabledAttr := retentionPolicyBlock.GetAttribute("enabled") + flowLog.RetentionPolicy.Enabled = enabledAttr.AsBoolValueOrDefault(false, retentionPolicyBlock) + + daysAttr := retentionPolicyBlock.GetAttribute("days") + flowLog.RetentionPolicy.Days = daysAttr.AsIntValueOrDefault(0, retentionPolicyBlock) + } + + return flowLog +} diff --git a/internal/adapters/terraform/azure/network/adapt_test.go b/internal/adapters/terraform/azure/network/adapt_test.go new file mode 100644 index 000000000000..af27eb117d85 --- /dev/null +++ b/internal/adapters/terraform/azure/network/adapt_test.go @@ -0,0 +1,262 @@ +package network + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/network" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected network.Network + }{ + { + name: "defined", + terraform: ` + resource "azurerm_network_security_rule" "example" { + name = "example_security_rule" + network_security_group_name = azurerm_network_security_group.example.name + direction = "Inbound" + access = "Allow" + protocol = "TCP" + source_port_range = "*" + destination_port_ranges = ["3389"] + source_address_prefix = "4.53.160.75" + destination_address_prefix = "*" + } + + resource "azurerm_network_security_group" "example" { + name = "tf-appsecuritygroup" + } + + resource "azurerm_network_watcher_flow_log" "example" { + resource_group_name = azurerm_resource_group.example.name + name = "example-log" + + retention_policy { + enabled = true + days = 7 + } + } +`, + expected: network.Network{ + SecurityGroups: []network.SecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Rules: []network.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Outbound: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Allow: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + SourceAddresses: []defsecTypes.StringValue{ + defsecTypes.String("4.53.160.75", defsecTypes.NewTestMetadata()), + }, + DestinationAddresses: []defsecTypes.StringValue{ + defsecTypes.String("*", defsecTypes.NewTestMetadata()), + }, + SourcePorts: []network.PortRange{ + { + Metadata: defsecTypes.NewTestMetadata(), + Start: 0, + End: 65535, + }, + }, + DestinationPorts: []network.PortRange{ + { + Metadata: defsecTypes.NewTestMetadata(), + Start: 3389, + End: 3389, + }, + }, + Protocol: defsecTypes.String("TCP", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + NetworkWatcherFlowLogs: []network.NetworkWatcherFlowLog{ + { + Metadata: defsecTypes.NewTestMetadata(), + RetentionPolicy: network.RetentionPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Days: defsecTypes.Int(7, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_network_security_group" "example" { + name = "tf-appsecuritygroup" + security_rule { + } + } +`, + expected: network.Network{ + SecurityGroups: []network.SecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Rules: []network.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Outbound: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Allow: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Protocol: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptWatcherLog(t *testing.T) { + tests := []struct { + name string + terraform string + expected network.NetworkWatcherFlowLog + }{ + { + name: "defined", + terraform: ` + resource "azurerm_network_watcher_flow_log" "watcher" { + retention_policy { + enabled = true + days = 90 + } + } +`, + expected: network.NetworkWatcherFlowLog{ + Metadata: defsecTypes.NewTestMetadata(), + RetentionPolicy: network.RetentionPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Days: defsecTypes.Int(90, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_network_watcher_flow_log" "watcher" { + retention_policy { + } + } +`, + expected: network.NetworkWatcherFlowLog{ + Metadata: defsecTypes.NewTestMetadata(), + RetentionPolicy: network.RetentionPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Days: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptWatcherLog(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_network_security_group" "example" { + name = "tf-appsecuritygroup" + } + + resource "azurerm_network_security_rule" "example" { + name = "example_security_rule" + network_security_group_name = azurerm_network_security_group.example.name + direction = "Inbound" + access = "Allow" + protocol = "TCP" + source_port_range = "*" + destination_port_ranges = ["3389"] + source_address_prefix = "4.53.160.75" + destination_address_prefix = "*" + } + + resource "azurerm_network_watcher_flow_log" "example" { + resource_group_name = azurerm_resource_group.example.name + name = "example-log" + + retention_policy { + enabled = true + days = 7 + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.SecurityGroups, 1) + require.Len(t, adapted.NetworkWatcherFlowLogs, 1) + + securityGroup := adapted.SecurityGroups[0] + rule := securityGroup.Rules[0] + watcher := adapted.NetworkWatcherFlowLogs[0] + + assert.Equal(t, 2, securityGroup.Metadata.Range().GetStartLine()) + assert.Equal(t, 4, securityGroup.Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, rule.Metadata.Range().GetStartLine()) + assert.Equal(t, 16, rule.Metadata.Range().GetEndLine()) + + assert.Equal(t, 9, rule.Outbound.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 9, rule.Outbound.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, rule.Allow.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, rule.Allow.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, rule.Protocol.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, rule.Protocol.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 12, rule.SourcePorts[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 12, rule.SourcePorts[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 13, rule.DestinationPorts[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 13, rule.DestinationPorts[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, rule.SourceAddresses[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, rule.SourceAddresses[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 15, rule.DestinationAddresses[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 15, rule.DestinationAddresses[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, watcher.Metadata.Range().GetStartLine()) + assert.Equal(t, 26, watcher.Metadata.Range().GetEndLine()) + + assert.Equal(t, 22, watcher.RetentionPolicy.Metadata.Range().GetStartLine()) + assert.Equal(t, 25, watcher.RetentionPolicy.Metadata.Range().GetEndLine()) + + assert.Equal(t, 23, watcher.RetentionPolicy.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, watcher.RetentionPolicy.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 24, watcher.RetentionPolicy.Days.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 24, watcher.RetentionPolicy.Days.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/securitycenter/adapt.go b/internal/adapters/terraform/azure/securitycenter/adapt.go new file mode 100644 index 000000000000..90e02d933a3a --- /dev/null +++ b/internal/adapters/terraform/azure/securitycenter/adapt.go @@ -0,0 +1,59 @@ +package securitycenter + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/securitycenter" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) securitycenter.SecurityCenter { + return securitycenter.SecurityCenter{ + Contacts: adaptContacts(modules), + Subscriptions: adaptSubscriptions(modules), + } +} + +func adaptContacts(modules terraform.Modules) []securitycenter.Contact { + var contacts []securitycenter.Contact + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_security_center_contact") { + contacts = append(contacts, adaptContact(resource)) + } + } + return contacts +} + +func adaptSubscriptions(modules terraform.Modules) []securitycenter.SubscriptionPricing { + var subscriptions []securitycenter.SubscriptionPricing + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_security_center_subscription_pricing") { + subscriptions = append(subscriptions, adaptSubscription(resource)) + } + } + return subscriptions +} + +func adaptContact(resource *terraform.Block) securitycenter.Contact { + enableAlertNotifAttr := resource.GetAttribute("alert_notifications") + enableAlertNotifVal := enableAlertNotifAttr.AsBoolValueOrDefault(false, resource) + + phoneAttr := resource.GetAttribute("phone") + phoneVal := phoneAttr.AsStringValueOrDefault("", resource) + + return securitycenter.Contact{ + Metadata: resource.GetMetadata(), + EnableAlertNotifications: enableAlertNotifVal, + Phone: phoneVal, + } +} + +func adaptSubscription(resource *terraform.Block) securitycenter.SubscriptionPricing { + tierAttr := resource.GetAttribute("tier") + tierVal := tierAttr.AsStringValueOrDefault("Free", resource) + + return securitycenter.SubscriptionPricing{ + Metadata: resource.GetMetadata(), + Tier: tierVal, + } +} diff --git a/internal/adapters/terraform/azure/securitycenter/adapt_test.go b/internal/adapters/terraform/azure/securitycenter/adapt_test.go new file mode 100644 index 000000000000..70faef6f27b1 --- /dev/null +++ b/internal/adapters/terraform/azure/securitycenter/adapt_test.go @@ -0,0 +1,137 @@ +package securitycenter + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/securitycenter" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptContact(t *testing.T) { + tests := []struct { + name string + terraform string + expected securitycenter.Contact + }{ + { + name: "defined", + terraform: ` + resource "azurerm_security_center_contact" "example" { + phone = "+1-555-555-5555" + alert_notifications = true + } +`, + expected: securitycenter.Contact{ + Metadata: defsecTypes.NewTestMetadata(), + EnableAlertNotifications: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Phone: defsecTypes.String("+1-555-555-5555", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_security_center_contact" "example" { + } +`, + expected: securitycenter.Contact{ + Metadata: defsecTypes.NewTestMetadata(), + EnableAlertNotifications: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Phone: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptContact(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptSubscription(t *testing.T) { + tests := []struct { + name string + terraform string + expected securitycenter.SubscriptionPricing + }{ + { + name: "free tier", + terraform: ` + resource "azurerm_security_center_subscription_pricing" "example" { + tier = "Free" + }`, + expected: securitycenter.SubscriptionPricing{ + Metadata: defsecTypes.NewTestMetadata(), + Tier: defsecTypes.String("Free", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "default - free tier", + terraform: ` + resource "azurerm_security_center_subscription_pricing" "example" { + }`, + expected: securitycenter.SubscriptionPricing{ + Metadata: defsecTypes.NewTestMetadata(), + Tier: defsecTypes.String("Free", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "standard tier", + terraform: ` + resource "azurerm_security_center_subscription_pricing" "example" { + tier = "Standard" + }`, + expected: securitycenter.SubscriptionPricing{ + Metadata: defsecTypes.NewTestMetadata(), + Tier: defsecTypes.String("Standard", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptSubscription(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_security_center_contact" "example" { + phone = "+1-555-555-5555" + alert_notifications = true + } + + resource "azurerm_security_center_subscription_pricing" "example" { + tier = "Standard" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Contacts, 1) + require.Len(t, adapted.Subscriptions, 1) + + contact := adapted.Contacts[0] + sub := adapted.Subscriptions[0] + + assert.Equal(t, 3, contact.Phone.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, contact.Phone.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, contact.EnableAlertNotifications.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, contact.EnableAlertNotifications.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 8, sub.Tier.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 8, sub.Tier.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/storage/adapt.go b/internal/adapters/terraform/azure/storage/adapt.go new file mode 100644 index 000000000000..4519460b5eb2 --- /dev/null +++ b/internal/adapters/terraform/azure/storage/adapt.go @@ -0,0 +1,173 @@ +package storage + +import ( + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/storage" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) storage.Storage { + accounts, containers, networkRules := adaptAccounts(modules) + + orphanAccount := storage.Account{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + NetworkRules: adaptOrphanNetworkRules(modules, networkRules), + EnforceHTTPS: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Containers: adaptOrphanContainers(modules, containers), + QueueProperties: storage.QueueProperties{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableLogging: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + } + + accounts = append(accounts, orphanAccount) + + return storage.Storage{ + Accounts: accounts, + } +} + +func adaptOrphanContainers(modules terraform.Modules, containers []string) (orphans []storage.Container) { + accountedFor := make(map[string]bool) + for _, container := range containers { + accountedFor[container] = true + } + for _, module := range modules { + for _, containerResource := range module.GetResourcesByType("azurerm_storage_container") { + if _, ok := accountedFor[containerResource.ID()]; ok { + continue + } + orphans = append(orphans, adaptContainer(containerResource)) + } + } + + return orphans +} + +func adaptOrphanNetworkRules(modules terraform.Modules, networkRules []string) (orphans []storage.NetworkRule) { + accountedFor := make(map[string]bool) + for _, networkRule := range networkRules { + accountedFor[networkRule] = true + } + + for _, module := range modules { + for _, networkRuleResource := range module.GetResourcesByType("azurerm_storage_account_network_rules") { + if _, ok := accountedFor[networkRuleResource.ID()]; ok { + continue + } + + orphans = append(orphans, adaptNetworkRule(networkRuleResource)) + } + } + + return orphans +} + +func adaptAccounts(modules terraform.Modules) ([]storage.Account, []string, []string) { + var accounts []storage.Account + var accountedForContainers []string + var accountedForNetworkRules []string + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_storage_account") { + account := adaptAccount(resource) + containerResource := module.GetReferencingResources(resource, "azurerm_storage_container", "storage_account_name") + for _, containerBlock := range containerResource { + accountedForContainers = append(accountedForContainers, containerBlock.ID()) + account.Containers = append(account.Containers, adaptContainer(containerBlock)) + } + networkRulesResource := module.GetReferencingResources(resource, "azurerm_storage_account_network_rules", "storage_account_name") + for _, networkRuleBlock := range networkRulesResource { + accountedForNetworkRules = append(accountedForNetworkRules, networkRuleBlock.ID()) + account.NetworkRules = append(account.NetworkRules, adaptNetworkRule(networkRuleBlock)) + } + for _, queueBlock := range module.GetReferencingResources(resource, "azurerm_storage_queue", "storage_account_name") { + queue := storage.Queue{ + Metadata: queueBlock.GetMetadata(), + Name: queueBlock.GetAttribute("name").AsStringValueOrDefault("", queueBlock), + } + account.Queues = append(account.Queues, queue) + } + accounts = append(accounts, account) + } + } + + return accounts, accountedForContainers, accountedForNetworkRules +} + +func adaptAccount(resource *terraform.Block) storage.Account { + account := storage.Account{ + Metadata: resource.GetMetadata(), + NetworkRules: nil, + EnforceHTTPS: defsecTypes.BoolDefault(true, resource.GetMetadata()), + Containers: nil, + QueueProperties: storage.QueueProperties{ + Metadata: resource.GetMetadata(), + EnableLogging: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + MinimumTLSVersion: defsecTypes.StringDefault("TLS1_2", resource.GetMetadata()), + } + + networkRulesBlocks := resource.GetBlocks("network_rules") + for _, networkBlock := range networkRulesBlocks { + account.NetworkRules = append(account.NetworkRules, adaptNetworkRule(networkBlock)) + } + + httpsOnlyAttr := resource.GetAttribute("enable_https_traffic_only") + account.EnforceHTTPS = httpsOnlyAttr.AsBoolValueOrDefault(true, resource) + + queuePropertiesBlock := resource.GetBlock("queue_properties") + if queuePropertiesBlock.IsNotNil() { + account.QueueProperties.Metadata = queuePropertiesBlock.GetMetadata() + loggingBlock := queuePropertiesBlock.GetBlock("logging") + if loggingBlock.IsNotNil() { + account.QueueProperties.EnableLogging = defsecTypes.Bool(true, loggingBlock.GetMetadata()) + } + } + + minTLSVersionAttr := resource.GetAttribute("min_tls_version") + account.MinimumTLSVersion = minTLSVersionAttr.AsStringValueOrDefault("TLS1_0", resource) + return account +} + +func adaptContainer(resource *terraform.Block) storage.Container { + accessTypeAttr := resource.GetAttribute("container_access_type") + publicAccess := defsecTypes.StringDefault(storage.PublicAccessOff, resource.GetMetadata()) + + if accessTypeAttr.Equals("blob") { + publicAccess = defsecTypes.String(storage.PublicAccessBlob, accessTypeAttr.GetMetadata()) + } else if accessTypeAttr.Equals("container") { + publicAccess = defsecTypes.String(storage.PublicAccessContainer, accessTypeAttr.GetMetadata()) + } + + return storage.Container{ + Metadata: resource.GetMetadata(), + PublicAccess: publicAccess, + } +} + +func adaptNetworkRule(resource *terraform.Block) storage.NetworkRule { + var allowByDefault defsecTypes.BoolValue + var bypass []defsecTypes.StringValue + + defaultActionAttr := resource.GetAttribute("default_action") + + if defaultActionAttr.IsNotNil() { + allowByDefault = defsecTypes.Bool(defaultActionAttr.Equals("Allow", terraform.IgnoreCase), defaultActionAttr.GetMetadata()) + } else { + allowByDefault = defsecTypes.BoolDefault(false, resource.GetMetadata()) + } + + if resource.HasChild("bypass") { + bypassAttr := resource.GetAttribute("bypass") + bypass = bypassAttr.AsStringValues() + } + + return storage.NetworkRule{ + Metadata: resource.GetMetadata(), + Bypass: bypass, + AllowByDefault: allowByDefault, + } +} diff --git a/internal/adapters/terraform/azure/storage/adapt_test.go b/internal/adapters/terraform/azure/storage/adapt_test.go new file mode 100644 index 000000000000..664c33f83abe --- /dev/null +++ b/internal/adapters/terraform/azure/storage/adapt_test.go @@ -0,0 +1,252 @@ +package storage + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/storage" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected storage.Storage + }{ + { + name: "defined", + terraform: ` + resource "azurerm_resource_group" "example" { + name = "example" + } + + resource "azurerm_storage_account" "example" { + name = "storageaccountname" + resource_group_name = azurerm_resource_group.example.name + + network_rules { + default_action = "Deny" + bypass = ["Metrics", "AzureServices"] + } + + enable_https_traffic_only = true + queue_properties { + logging { + delete = true + read = true + write = true + version = "1.0" + retention_policy_days = 10 + } + } + min_tls_version = "TLS1_2" + } + + resource "azurerm_storage_account_network_rules" "test" { + resource_group_name = azurerm_resource_group.example.name + storage_account_name = azurerm_storage_account.example.name + + default_action = "Allow" + bypass = ["Metrics"] + } + + resource "azurerm_storage_container" "example" { + storage_account_name = azurerm_storage_account.example.name + resource_group_name = azurerm_resource_group.example.name + container_access_type = "blob" + } +`, + expected: storage.Storage{ + Accounts: []storage.Account{ + + { + Metadata: defsecTypes.NewTestMetadata(), + EnforceHTTPS: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + MinimumTLSVersion: defsecTypes.String("TLS1_2", defsecTypes.NewTestMetadata()), + NetworkRules: []storage.NetworkRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Bypass: []defsecTypes.StringValue{ + defsecTypes.String("Metrics", defsecTypes.NewTestMetadata()), + defsecTypes.String("AzureServices", defsecTypes.NewTestMetadata()), + }, + AllowByDefault: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + { + Metadata: defsecTypes.NewTestMetadata(), + Bypass: []defsecTypes.StringValue{ + defsecTypes.String("Metrics", defsecTypes.NewTestMetadata()), + }, + AllowByDefault: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + QueueProperties: storage.QueueProperties{ + Metadata: defsecTypes.NewTestMetadata(), + EnableLogging: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Containers: []storage.Container{ + { + Metadata: defsecTypes.NewTestMetadata(), + PublicAccess: defsecTypes.String("blob", defsecTypes.NewTestMetadata()), + }, + }, + }, + { + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnforceHTTPS: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + QueueProperties: storage.QueueProperties{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableLogging: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + }, + }, + }, + { + name: "orphans", + terraform: ` + resource "azurerm_storage_account_network_rules" "test" { + default_action = "Allow" + bypass = ["Metrics"] + } + + resource "azurerm_storage_container" "example" { + container_access_type = "blob" + } +`, + expected: storage.Storage{ + Accounts: []storage.Account{ + { + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnforceHTTPS: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + NetworkRules: []storage.NetworkRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Bypass: []defsecTypes.StringValue{ + defsecTypes.String("Metrics", defsecTypes.NewTestMetadata()), + }, + AllowByDefault: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + QueueProperties: storage.QueueProperties{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableLogging: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + Containers: []storage.Container{ + { + Metadata: defsecTypes.NewTestMetadata(), + PublicAccess: defsecTypes.String("blob", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_resource_group" "example" { + name = "example" + location = "West Europe" + } + + resource "azurerm_storage_account" "example" { + resource_group_name = azurerm_resource_group.example.name + + enable_https_traffic_only = true + min_tls_version = "TLS1_2" + + queue_properties { + logging { + delete = true + read = true + write = true + version = "1.0" + retention_policy_days = 10 + } + } + + network_rules { + default_action = "Deny" + bypass = ["Metrics", "AzureServices"] + } + } + + resource "azurerm_storage_account_network_rules" "test" { + resource_group_name = azurerm_resource_group.example.name + storage_account_name = azurerm_storage_account.example.name + + default_action = "Allow" + bypass = ["Metrics"] + } + + resource "azurerm_storage_container" "example" { + storage_account_name = azurerm_storage_account.example.name + resource_group_name = azurerm_resource_group.example.name + container_access_type = "blob" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Accounts, 2) //+orphans holder + account := adapted.Accounts[0] + + assert.Equal(t, 7, account.Metadata.Range().GetStartLine()) + assert.Equal(t, 27, account.Metadata.Range().GetEndLine()) + + assert.Equal(t, 10, account.EnforceHTTPS.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, account.EnforceHTTPS.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, account.MinimumTLSVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, account.MinimumTLSVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, account.QueueProperties.Metadata.Range().GetStartLine()) + assert.Equal(t, 21, account.QueueProperties.Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, account.QueueProperties.EnableLogging.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 20, account.QueueProperties.EnableLogging.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, account.NetworkRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 26, account.NetworkRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 24, account.NetworkRules[0].AllowByDefault.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 24, account.NetworkRules[0].AllowByDefault.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 25, account.NetworkRules[0].Bypass[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 25, account.NetworkRules[0].Bypass[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 29, account.NetworkRules[1].Metadata.Range().GetStartLine()) + assert.Equal(t, 35, account.NetworkRules[1].Metadata.Range().GetEndLine()) + + assert.Equal(t, 33, account.NetworkRules[1].AllowByDefault.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 33, account.NetworkRules[1].AllowByDefault.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 34, account.NetworkRules[1].Bypass[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, account.NetworkRules[1].Bypass[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 37, account.Containers[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 41, account.Containers[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 40, account.Containers[0].PublicAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 40, account.Containers[0].PublicAccess.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/azure/synapse/adapt.go b/internal/adapters/terraform/azure/synapse/adapt.go new file mode 100644 index 000000000000..6e5743dccc80 --- /dev/null +++ b/internal/adapters/terraform/azure/synapse/adapt.go @@ -0,0 +1,32 @@ +package synapse + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/synapse" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) synapse.Synapse { + return synapse.Synapse{ + Workspaces: adaptWorkspaces(modules), + } +} + +func adaptWorkspaces(modules terraform.Modules) []synapse.Workspace { + var workspaces []synapse.Workspace + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_synapse_workspace") { + workspaces = append(workspaces, adaptWorkspace(resource)) + } + } + return workspaces +} + +func adaptWorkspace(resource *terraform.Block) synapse.Workspace { + enableManagedVNAttr := resource.GetAttribute("managed_virtual_network_enabled") + enableManagedVNVal := enableManagedVNAttr.AsBoolValueOrDefault(false, resource) + + return synapse.Workspace{ + Metadata: resource.GetMetadata(), + EnableManagedVirtualNetwork: enableManagedVNVal, + } +} diff --git a/internal/adapters/terraform/azure/synapse/adapt_test.go b/internal/adapters/terraform/azure/synapse/adapt_test.go new file mode 100644 index 000000000000..48951957d5dc --- /dev/null +++ b/internal/adapters/terraform/azure/synapse/adapt_test.go @@ -0,0 +1,83 @@ +package synapse + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/synapse" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptWorkspace(t *testing.T) { + tests := []struct { + name string + terraform string + expected synapse.Workspace + }{ + { + name: "enabled", + terraform: ` + resource "azurerm_synapse_workspace" "example" { + managed_virtual_network_enabled = true + } +`, + expected: synapse.Workspace{ + Metadata: defsecTypes.NewTestMetadata(), + EnableManagedVirtualNetwork: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "disabled", + terraform: ` + resource "azurerm_synapse_workspace" "example" { + managed_virtual_network_enabled = false + } +`, + expected: synapse.Workspace{ + Metadata: defsecTypes.NewTestMetadata(), + EnableManagedVirtualNetwork: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "default", + terraform: ` + resource "azurerm_synapse_workspace" "example" { + } +`, + expected: synapse.Workspace{ + Metadata: defsecTypes.NewTestMetadata(), + EnableManagedVirtualNetwork: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptWorkspace(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_synapse_workspace" "example" { + managed_virtual_network_enabled = true + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Workspaces, 1) + workspace := adapted.Workspaces[0] + + assert.Equal(t, 3, workspace.EnableManagedVirtualNetwork.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, workspace.EnableManagedVirtualNetwork.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/cloudstack/adapt.go b/internal/adapters/terraform/cloudstack/adapt.go new file mode 100644 index 000000000000..6be5887cf6b5 --- /dev/null +++ b/internal/adapters/terraform/cloudstack/adapt.go @@ -0,0 +1,13 @@ +package cloudstack + +import ( + "github.com/aquasecurity/defsec/pkg/providers/cloudstack" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/internal/adapters/terraform/cloudstack/compute" +) + +func Adapt(modules terraform.Modules) cloudstack.CloudStack { + return cloudstack.CloudStack{ + Compute: compute.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/cloudstack/compute/adapt.go b/internal/adapters/terraform/cloudstack/compute/adapt.go new file mode 100644 index 000000000000..7104f74e2846 --- /dev/null +++ b/internal/adapters/terraform/cloudstack/compute/adapt.go @@ -0,0 +1,49 @@ +package compute + +import ( + "encoding/base64" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/cloudstack/compute" +) + +func Adapt(modules terraform.Modules) compute.Compute { + return compute.Compute{ + Instances: adaptInstances(modules), + } +} + +func adaptInstances(modules terraform.Modules) []compute.Instance { + var instances []compute.Instance + for _, module := range modules { + for _, resource := range module.GetResourcesByType("cloudstack_instance") { + instances = append(instances, adaptInstance(resource)) + } + } + return instances +} + +func adaptInstance(resource *terraform.Block) compute.Instance { + userDataAttr := resource.GetAttribute("user_data") + var encoded []byte + var err error + + if userDataAttr.IsNotNil() && userDataAttr.IsString() { + encoded, err = base64.StdEncoding.DecodeString(userDataAttr.Value().AsString()) + if err != nil { + encoded = []byte(userDataAttr.Value().AsString()) + } + return compute.Instance{ + Metadata: resource.GetMetadata(), + UserData: types.String(string(encoded), userDataAttr.GetMetadata()), + } + } + + return compute.Instance{ + Metadata: resource.GetMetadata(), + UserData: types.StringDefault("", resource.GetMetadata()), + } +} diff --git a/internal/adapters/terraform/cloudstack/compute/adapt_test.go b/internal/adapters/terraform/cloudstack/compute/adapt_test.go new file mode 100644 index 000000000000..36fce11efd7f --- /dev/null +++ b/internal/adapters/terraform/cloudstack/compute/adapt_test.go @@ -0,0 +1,91 @@ +package compute + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/cloudstack/compute" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptInstance(t *testing.T) { + tests := []struct { + name string + terraform string + expected compute.Instance + }{ + { + name: "sensitive user data", + terraform: ` + resource "cloudstack_instance" "web" { + name = "server-1" + user_data = < 0 { + cluster.NodeConfig = cluster.NodePools[0].NodeConfig + a.clusterMap[id] = cluster + } + } + + var clusters []gke.Cluster + for _, cluster := range a.clusterMap { + clusters = append(clusters, cluster) + } + return clusters +} + +func (a *adapter) adaptCluster(resource *terraform.Block, module *terraform.Module) { + + cluster := gke.Cluster{ + Metadata: resource.GetMetadata(), + NodePools: nil, + IPAllocationPolicy: gke.IPAllocationPolicy{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + MasterAuthorizedNetworks: gke.MasterAuthorizedNetworks{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + CIDRs: []defsecTypes.StringValue{}, + }, + NetworkPolicy: gke.NetworkPolicy{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + DatapathProvider: resource.GetAttribute("datapath_provider"). + AsStringValueOrDefault("DATAPATH_PROVIDER_UNSPECIFIED", resource), + PrivateCluster: gke.PrivateCluster{ + Metadata: resource.GetMetadata(), + EnablePrivateNodes: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + LoggingService: defsecTypes.StringDefault("logging.googleapis.com/kubernetes", resource.GetMetadata()), + MonitoringService: defsecTypes.StringDefault("monitoring.googleapis.com/kubernetes", resource.GetMetadata()), + MasterAuth: gke.MasterAuth{ + Metadata: resource.GetMetadata(), + ClientCertificate: gke.ClientCertificate{ + Metadata: resource.GetMetadata(), + IssueCertificate: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + Username: defsecTypes.StringDefault("", resource.GetMetadata()), + Password: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + NodeConfig: gke.NodeConfig{ + Metadata: resource.GetMetadata(), + ImageType: defsecTypes.StringDefault("", resource.GetMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: resource.GetMetadata(), + NodeMetadata: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + ServiceAccount: defsecTypes.StringDefault("", resource.GetMetadata()), + EnableLegacyEndpoints: defsecTypes.BoolDefault(true, resource.GetMetadata()), + }, + EnableShieldedNodes: defsecTypes.BoolDefault(true, resource.GetMetadata()), + EnableLegacyABAC: defsecTypes.BoolDefault(false, resource.GetMetadata()), + ResourceLabels: defsecTypes.MapDefault(make(map[string]string), resource.GetMetadata()), + RemoveDefaultNodePool: defsecTypes.BoolDefault(false, resource.GetMetadata()), + EnableAutpilot: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if allocBlock := resource.GetBlock("ip_allocation_policy"); allocBlock.IsNotNil() { + cluster.IPAllocationPolicy.Metadata = allocBlock.GetMetadata() + cluster.IPAllocationPolicy.Enabled = defsecTypes.Bool(true, allocBlock.GetMetadata()) + } + + if blocks := resource.GetBlocks("master_authorized_networks_config"); len(blocks) > 0 { + cluster.MasterAuthorizedNetworks = adaptMasterAuthNetworksAsBlocks(resource, blocks) + } + + if policyBlock := resource.GetBlock("network_policy"); policyBlock.IsNotNil() { + enabledAttr := policyBlock.GetAttribute("enabled") + cluster.NetworkPolicy.Metadata = policyBlock.GetMetadata() + cluster.NetworkPolicy.Enabled = enabledAttr.AsBoolValueOrDefault(false, policyBlock) + } + + if privBlock := resource.GetBlock("private_cluster_config"); privBlock.IsNotNil() { + privateNodesEnabledAttr := privBlock.GetAttribute("enable_private_nodes") + cluster.PrivateCluster.Metadata = privBlock.GetMetadata() + cluster.PrivateCluster.EnablePrivateNodes = privateNodesEnabledAttr.AsBoolValueOrDefault(false, privBlock) + } + + loggingAttr := resource.GetAttribute("logging_service") + cluster.LoggingService = loggingAttr.AsStringValueOrDefault("logging.googleapis.com/kubernetes", resource) + monitoringServiceAttr := resource.GetAttribute("monitoring_service") + cluster.MonitoringService = monitoringServiceAttr.AsStringValueOrDefault("monitoring.googleapis.com/kubernetes", resource) + + if masterBlock := resource.GetBlock("master_auth"); masterBlock.IsNotNil() { + cluster.MasterAuth = adaptMasterAuth(masterBlock) + } + + if configBlock := resource.GetBlock("node_config"); configBlock.IsNotNil() { + if configBlock.GetBlock("metadata").IsNotNil() { + cluster.NodeConfig.Metadata = configBlock.GetBlock("metadata").GetMetadata() + } + cluster.NodeConfig = adaptNodeConfig(configBlock) + } + + cluster.EnableShieldedNodes = resource.GetAttribute("enable_shielded_nodes").AsBoolValueOrDefault(true, resource) + + enableLegacyABACAttr := resource.GetAttribute("enable_legacy_abac") + cluster.EnableLegacyABAC = enableLegacyABACAttr.AsBoolValueOrDefault(false, resource) + + cluster.EnableAutpilot = resource.GetAttribute("enable_autopilot").AsBoolValueOrDefault(false, resource) + + resourceLabelsAttr := resource.GetAttribute("resource_labels") + if resourceLabelsAttr.IsNotNil() { + cluster.ResourceLabels = resourceLabelsAttr.AsMapValue() + } + + cluster.RemoveDefaultNodePool = resource.GetAttribute("remove_default_node_pool").AsBoolValueOrDefault(false, resource) + + a.clusterMap[resource.ID()] = cluster +} + +func (a *adapter) adaptNodePools() { + for _, nodePoolBlock := range a.modules.GetResourcesByType("google_container_node_pool") { + a.adaptNodePool(nodePoolBlock) + } +} + +func (a *adapter) adaptNodePool(resource *terraform.Block) { + nodeConfig := gke.NodeConfig{ + Metadata: resource.GetMetadata(), + ImageType: defsecTypes.StringDefault("", resource.GetMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: resource.GetMetadata(), + NodeMetadata: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + ServiceAccount: defsecTypes.StringDefault("", resource.GetMetadata()), + EnableLegacyEndpoints: defsecTypes.BoolDefault(true, resource.GetMetadata()), + } + + management := gke.Management{ + Metadata: resource.GetMetadata(), + EnableAutoRepair: defsecTypes.BoolDefault(false, resource.GetMetadata()), + EnableAutoUpgrade: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if resource.HasChild("management") { + management.Metadata = resource.GetBlock("management").GetMetadata() + + autoRepairAttr := resource.GetBlock("management").GetAttribute("auto_repair") + management.EnableAutoRepair = autoRepairAttr.AsBoolValueOrDefault(false, resource.GetBlock("management")) + + autoUpgradeAttr := resource.GetBlock("management").GetAttribute("auto_upgrade") + management.EnableAutoUpgrade = autoUpgradeAttr.AsBoolValueOrDefault(false, resource.GetBlock("management")) + } + + if resource.HasChild("node_config") { + nodeConfig = adaptNodeConfig(resource.GetBlock("node_config")) + } + + nodePool := gke.NodePool{ + Metadata: resource.GetMetadata(), + Management: management, + NodeConfig: nodeConfig, + } + + clusterAttr := resource.GetAttribute("cluster") + if referencedCluster, err := a.modules.GetReferencedBlock(clusterAttr, resource); err == nil { + if referencedCluster.TypeLabel() == "google_container_cluster" { + if cluster, ok := a.clusterMap[referencedCluster.ID()]; ok { + cluster.NodePools = append(cluster.NodePools, nodePool) + a.clusterMap[referencedCluster.ID()] = cluster + return + } + } + } + + // we didn't find a cluster to put the nodepool in, so create a placeholder + a.clusterMap[uuid.NewString()] = gke.Cluster{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + NodePools: []gke.NodePool{nodePool}, + IPAllocationPolicy: gke.IPAllocationPolicy{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Enabled: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + MasterAuthorizedNetworks: gke.MasterAuthorizedNetworks{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Enabled: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + CIDRs: nil, + }, + NetworkPolicy: gke.NetworkPolicy{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Enabled: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + PrivateCluster: gke.PrivateCluster{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnablePrivateNodes: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + LoggingService: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + MonitoringService: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + MasterAuth: gke.MasterAuth{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + ClientCertificate: gke.ClientCertificate{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + IssueCertificate: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + Username: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + Password: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + NodeConfig: gke.NodeConfig{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + ImageType: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + NodeMetadata: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + ServiceAccount: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnableLegacyEndpoints: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + EnableShieldedNodes: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + EnableLegacyABAC: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + ResourceLabels: defsecTypes.MapDefault(nil, defsecTypes.NewUnmanagedMetadata()), + RemoveDefaultNodePool: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + EnableAutpilot: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + } +} + +func adaptNodeConfig(resource *terraform.Block) gke.NodeConfig { + + config := gke.NodeConfig{ + Metadata: resource.GetMetadata(), + ImageType: resource.GetAttribute("image_type").AsStringValueOrDefault("", resource), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: resource.GetMetadata(), + NodeMetadata: defsecTypes.StringDefault("UNSPECIFIED", resource.GetMetadata()), + }, + ServiceAccount: resource.GetAttribute("service_account").AsStringValueOrDefault("", resource), + EnableLegacyEndpoints: defsecTypes.BoolDefault(true, resource.GetMetadata()), + } + + if metadata := resource.GetAttribute("metadata"); metadata.IsNotNil() { + legacyMetadata := metadata.MapValue("disable-legacy-endpoints") + if legacyMetadata.IsWhollyKnown() && legacyMetadata.Type() == cty.Bool { + config.EnableLegacyEndpoints = defsecTypes.Bool(legacyMetadata.False(), metadata.GetMetadata()) + } + } + + workloadBlock := resource.GetBlock("workload_metadata_config") + if workloadBlock.IsNotNil() { + config.WorkloadMetadataConfig.Metadata = workloadBlock.GetMetadata() + modeAttr := workloadBlock.GetAttribute("node_metadata") + if modeAttr.IsNil() { + modeAttr = workloadBlock.GetAttribute("mode") // try newest version + } + config.WorkloadMetadataConfig.NodeMetadata = modeAttr.AsStringValueOrDefault("UNSPECIFIED", workloadBlock) + } + + return config +} + +func adaptMasterAuth(resource *terraform.Block) gke.MasterAuth { + clientCert := gke.ClientCertificate{ + Metadata: resource.GetMetadata(), + IssueCertificate: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if resource.HasChild("client_certificate_config") { + clientCertAttr := resource.GetBlock("client_certificate_config").GetAttribute("issue_client_certificate") + clientCert.IssueCertificate = clientCertAttr.AsBoolValueOrDefault(false, resource.GetBlock("client_certificate_config")) + clientCert.Metadata = resource.GetBlock("client_certificate_config").GetMetadata() + } + + username := resource.GetAttribute("username").AsStringValueOrDefault("", resource) + password := resource.GetAttribute("password").AsStringValueOrDefault("", resource) + + return gke.MasterAuth{ + Metadata: resource.GetMetadata(), + ClientCertificate: clientCert, + Username: username, + Password: password, + } +} + +func adaptMasterAuthNetworksAsBlocks(parent *terraform.Block, blocks terraform.Blocks) gke.MasterAuthorizedNetworks { + var cidrs []defsecTypes.StringValue + for _, block := range blocks { + for _, cidrBlock := range block.GetBlocks("cidr_blocks") { + if cidrAttr := cidrBlock.GetAttribute("cidr_block"); cidrAttr.IsNotNil() { + cidrs = append(cidrs, cidrAttr.AsStringValues()...) + } + } + } + enabled := defsecTypes.Bool(true, blocks[0].GetMetadata()) + return gke.MasterAuthorizedNetworks{ + Metadata: blocks[0].GetMetadata(), + Enabled: enabled, + CIDRs: cidrs, + } +} diff --git a/internal/adapters/terraform/google/gke/adapt_test.go b/internal/adapters/terraform/google/gke/adapt_test.go new file mode 100644 index 000000000000..5f6fcc4f4c97 --- /dev/null +++ b/internal/adapters/terraform/google/gke/adapt_test.go @@ -0,0 +1,416 @@ +package gke + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/gke" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected gke.GKE + }{ + { + name: "separately defined pool", + terraform: ` +resource "google_service_account" "default" { + account_id = "service-account-id" + display_name = "Service Account" +} + +resource "google_container_cluster" "example" { + name = "my-gke-cluster" + + node_config { + metadata = { + disable-legacy-endpoints = true + } + } + + pod_security_policy_config { + enabled = "true" + } + + enable_legacy_abac = "true" + enable_shielded_nodes = "true" + + remove_default_node_pool = true + initial_node_count = 1 + monitoring_service = "monitoring.googleapis.com/kubernetes" + logging_service = "logging.googleapis.com/kubernetes" + + master_auth { + client_certificate_config { + issue_client_certificate = true + } + } + + master_authorized_networks_config { + cidr_blocks { + cidr_block = "10.10.128.0/24" + display_name = "internal" + } + } + + resource_labels = { + "env" = "staging" + } + + private_cluster_config { + enable_private_nodes = true + } + + network_policy { + enabled = true + } + + ip_allocation_policy {} + + enable_autopilot = true + + datapath_provider = "ADVANCED_DATAPATH" +} + +resource "google_container_node_pool" "primary_preemptible_nodes" { + cluster = google_container_cluster.example.name + node_count = 1 + + node_config { + service_account = google_service_account.default.email + metadata = { + disable-legacy-endpoints = true + } + image_type = "COS_CONTAINERD" + workload_metadata_config { + mode = "GCE_METADATA" + } + } + management { + auto_repair = true + auto_upgrade = true + } +} +`, + expected: gke.GKE{ + Clusters: []gke.Cluster{ + { + Metadata: defsecTypes.NewTestMetadata(), + NodeConfig: gke.NodeConfig{ + Metadata: defsecTypes.NewTestMetadata(), + ImageType: defsecTypes.String("COS_CONTAINERD", defsecTypes.NewTestMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: defsecTypes.NewTestMetadata(), + NodeMetadata: defsecTypes.String("GCE_METADATA", defsecTypes.NewTestMetadata()), + }, + ServiceAccount: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EnableLegacyEndpoints: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + NodePools: []gke.NodePool{ + { + Metadata: defsecTypes.NewTestMetadata(), + Management: gke.Management{ + Metadata: defsecTypes.NewTestMetadata(), + EnableAutoRepair: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EnableAutoUpgrade: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + NodeConfig: gke.NodeConfig{ + Metadata: defsecTypes.NewTestMetadata(), + ImageType: defsecTypes.String("COS_CONTAINERD", defsecTypes.NewTestMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: defsecTypes.NewTestMetadata(), + NodeMetadata: defsecTypes.String("GCE_METADATA", defsecTypes.NewTestMetadata()), + }, + ServiceAccount: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EnableLegacyEndpoints: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + IPAllocationPolicy: gke.IPAllocationPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + MasterAuthorizedNetworks: gke.MasterAuthorizedNetworks{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("10.10.128.0/24", defsecTypes.NewTestMetadata()), + }, + }, + NetworkPolicy: gke.NetworkPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + DatapathProvider: defsecTypes.String("ADVANCED_DATAPATH", defsecTypes.NewTestMetadata()), + PrivateCluster: gke.PrivateCluster{ + Metadata: defsecTypes.NewTestMetadata(), + EnablePrivateNodes: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + LoggingService: defsecTypes.String("logging.googleapis.com/kubernetes", defsecTypes.NewTestMetadata()), + MonitoringService: defsecTypes.String("monitoring.googleapis.com/kubernetes", defsecTypes.NewTestMetadata()), + MasterAuth: gke.MasterAuth{ + Metadata: defsecTypes.NewTestMetadata(), + ClientCertificate: gke.ClientCertificate{ + Metadata: defsecTypes.NewTestMetadata(), + IssueCertificate: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Username: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Password: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + EnableShieldedNodes: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EnableLegacyABAC: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + ResourceLabels: defsecTypes.Map(map[string]string{ + "env": "staging", + }, defsecTypes.NewTestMetadata()), + RemoveDefaultNodePool: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EnableAutpilot: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + { + name: "default node pool", + terraform: ` +resource "google_container_cluster" "example" { + node_config { + service_account = "service-account" + metadata = { + disable-legacy-endpoints = true + } + image_type = "COS" + workload_metadata_config { + mode = "GCE_METADATA" + } + } +} +`, + expected: gke.GKE{ + Clusters: []gke.Cluster{ + { + Metadata: defsecTypes.NewTestMetadata(), + NodeConfig: gke.NodeConfig{ + Metadata: defsecTypes.NewTestMetadata(), + ImageType: defsecTypes.String("COS", defsecTypes.NewTestMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: defsecTypes.NewTestMetadata(), + NodeMetadata: defsecTypes.String("GCE_METADATA", defsecTypes.NewTestMetadata()), + }, + ServiceAccount: defsecTypes.String("service-account", defsecTypes.NewTestMetadata()), + EnableLegacyEndpoints: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + + IPAllocationPolicy: gke.IPAllocationPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + MasterAuthorizedNetworks: gke.MasterAuthorizedNetworks{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{}, + }, + NetworkPolicy: gke.NetworkPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + DatapathProvider: defsecTypes.StringDefault("DATAPATH_PROVIDER_UNSPECIFIED", defsecTypes.NewTestMetadata()), + PrivateCluster: gke.PrivateCluster{ + Metadata: defsecTypes.NewTestMetadata(), + EnablePrivateNodes: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + LoggingService: defsecTypes.String("logging.googleapis.com/kubernetes", defsecTypes.NewTestMetadata()), + MonitoringService: defsecTypes.String("monitoring.googleapis.com/kubernetes", defsecTypes.NewTestMetadata()), + MasterAuth: gke.MasterAuth{ + Metadata: defsecTypes.NewTestMetadata(), + ClientCertificate: gke.ClientCertificate{ + Metadata: defsecTypes.NewTestMetadata(), + IssueCertificate: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + Username: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Password: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + EnableShieldedNodes: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EnableLegacyABAC: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + ResourceLabels: defsecTypes.Map(map[string]string{}, defsecTypes.NewTestMetadata()), + RemoveDefaultNodePool: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` +resource "google_container_cluster" "example" { + + node_config { + metadata = { + disable-legacy-endpoints = true + } + } + pod_security_policy_config { + enabled = "true" + } + + enable_legacy_abac = "true" + enable_shielded_nodes = "true" + + remove_default_node_pool = true + monitoring_service = "monitoring.googleapis.com/kubernetes" + logging_service = "logging.googleapis.com/kubernetes" + + master_auth { + client_certificate_config { + issue_client_certificate = true + } + } + + master_authorized_networks_config { + cidr_blocks { + cidr_block = "10.10.128.0/24" + } + } + + resource_labels = { + "env" = "staging" + } + + private_cluster_config { + enable_private_nodes = true + } + + network_policy { + enabled = true + } + ip_allocation_policy {} +} + +resource "google_container_node_pool" "primary_preemptible_nodes" { + cluster = google_container_cluster.example.name + + node_config { + metadata = { + disable-legacy-endpoints = true + } + service_account = google_service_account.default.email + image_type = "COS_CONTAINERD" + + workload_metadata_config { + mode = "GCE_METADATA" + } + } + management { + auto_repair = true + auto_upgrade = true + } +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + cluster := adapted.Clusters[0] + nodePool := cluster.NodePools[0] + + assert.Equal(t, 2, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 44, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 49, cluster.NodeConfig.Metadata.Range().GetStartLine()) + assert.Equal(t, 59, cluster.NodeConfig.Metadata.Range().GetEndLine()) + + assert.Equal(t, 50, cluster.NodeConfig.EnableLegacyEndpoints.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 52, cluster.NodeConfig.EnableLegacyEndpoints.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, cluster.EnableLegacyABAC.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, cluster.EnableLegacyABAC.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 14, cluster.EnableShieldedNodes.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, cluster.EnableShieldedNodes.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 16, cluster.RemoveDefaultNodePool.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 16, cluster.RemoveDefaultNodePool.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, cluster.MonitoringService.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, cluster.MonitoringService.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, cluster.LoggingService.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 18, cluster.LoggingService.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 20, cluster.MasterAuth.Metadata.Range().GetStartLine()) + assert.Equal(t, 24, cluster.MasterAuth.Metadata.Range().GetEndLine()) + + assert.Equal(t, 21, cluster.MasterAuth.ClientCertificate.Metadata.Range().GetStartLine()) + assert.Equal(t, 23, cluster.MasterAuth.ClientCertificate.Metadata.Range().GetEndLine()) + + assert.Equal(t, 22, cluster.MasterAuth.ClientCertificate.IssueCertificate.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, cluster.MasterAuth.ClientCertificate.IssueCertificate.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, cluster.MasterAuthorizedNetworks.Metadata.Range().GetStartLine()) + assert.Equal(t, 30, cluster.MasterAuthorizedNetworks.Metadata.Range().GetEndLine()) + + assert.Equal(t, 28, cluster.MasterAuthorizedNetworks.CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 28, cluster.MasterAuthorizedNetworks.CIDRs[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 32, cluster.ResourceLabels.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, cluster.ResourceLabels.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 36, cluster.PrivateCluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 38, cluster.PrivateCluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 37, cluster.PrivateCluster.EnablePrivateNodes.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 37, cluster.PrivateCluster.EnablePrivateNodes.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 40, cluster.NetworkPolicy.Metadata.Range().GetStartLine()) + assert.Equal(t, 42, cluster.NetworkPolicy.Metadata.Range().GetEndLine()) + + assert.Equal(t, 41, cluster.NetworkPolicy.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 41, cluster.NetworkPolicy.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 43, cluster.IPAllocationPolicy.Metadata.Range().GetStartLine()) + assert.Equal(t, 43, cluster.IPAllocationPolicy.Metadata.Range().GetEndLine()) + + assert.Equal(t, 46, nodePool.Metadata.Range().GetStartLine()) + assert.Equal(t, 64, nodePool.Metadata.Range().GetEndLine()) + + assert.Equal(t, 49, nodePool.NodeConfig.Metadata.Range().GetStartLine()) + assert.Equal(t, 59, nodePool.NodeConfig.Metadata.Range().GetEndLine()) + + assert.Equal(t, 53, nodePool.NodeConfig.ServiceAccount.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 53, nodePool.NodeConfig.ServiceAccount.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 54, nodePool.NodeConfig.ImageType.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 54, nodePool.NodeConfig.ImageType.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 56, nodePool.NodeConfig.WorkloadMetadataConfig.Metadata.Range().GetStartLine()) + assert.Equal(t, 58, nodePool.NodeConfig.WorkloadMetadataConfig.Metadata.Range().GetEndLine()) + + assert.Equal(t, 57, nodePool.NodeConfig.WorkloadMetadataConfig.NodeMetadata.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 57, nodePool.NodeConfig.WorkloadMetadataConfig.NodeMetadata.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 60, nodePool.Management.Metadata.Range().GetStartLine()) + assert.Equal(t, 63, nodePool.Management.Metadata.Range().GetEndLine()) + + assert.Equal(t, 61, nodePool.Management.EnableAutoRepair.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 61, nodePool.Management.EnableAutoRepair.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 62, nodePool.Management.EnableAutoUpgrade.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 62, nodePool.Management.EnableAutoUpgrade.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/google/iam/adapt.go b/internal/adapters/terraform/google/iam/adapt.go new file mode 100644 index 000000000000..45d082af945b --- /dev/null +++ b/internal/adapters/terraform/google/iam/adapt.go @@ -0,0 +1,108 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/google/uuid" +) + +func Adapt(modules terraform.Modules) iam.IAM { + return (&adapter{ + orgs: make(map[string]iam.Organization), + modules: modules, + }).Adapt() +} + +type adapter struct { + modules terraform.Modules + orgs map[string]iam.Organization + folders []parentedFolder + projects []parentedProject + workloadIdentityPoolProviders []iam.WorkloadIdentityPoolProvider +} + +func (a *adapter) Adapt() iam.IAM { + a.adaptOrganizationIAM() + a.adaptFolders() + a.adaptFolderIAM() + a.adaptProjects() + a.adaptProjectIAM() + a.adaptWorkloadIdentityPoolProviders() + return a.merge() +} + +func (a *adapter) addOrg(blockID string) { + if _, ok := a.orgs[blockID]; !ok { + a.orgs[blockID] = iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + } + } +} + +func (a *adapter) merge() iam.IAM { + + // add projects to folders, orgs +PROJECT: + for _, project := range a.projects { + for i, folder := range a.folders { + if project.folderBlockID != "" && project.folderBlockID == folder.blockID { + folder.folder.Projects = append(folder.folder.Projects, project.project) + a.folders[i] = folder + continue PROJECT + } + } + if project.orgBlockID != "" { + if org, ok := a.orgs[project.orgBlockID]; ok { + org.Projects = append(org.Projects, project.project) + a.orgs[project.orgBlockID] = org + continue PROJECT + } + } + + org := iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + Projects: []iam.Project{project.project}, + } + a.orgs[uuid.NewString()] = org + } + + // add folders to folders, orgs +FOLDER_NESTED: + for _, folder := range a.folders { + for i, existing := range a.folders { + if folder.parentBlockID != "" && folder.parentBlockID == existing.blockID { + existing.folder.Folders = append(existing.folder.Folders, folder.folder) + a.folders[i] = existing + continue FOLDER_NESTED + } + + } + } +FOLDER_ORG: + for _, folder := range a.folders { + if folder.parentBlockID != "" { + if org, ok := a.orgs[folder.parentBlockID]; ok { + org.Folders = append(org.Folders, folder.folder) + a.orgs[folder.parentBlockID] = org + continue FOLDER_ORG + } + } else { + // add to placeholder? + org := iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + Folders: []iam.Folder{folder.folder}, + } + a.orgs[uuid.NewString()] = org + } + } + + output := iam.IAM{ + Organizations: nil, + WorkloadIdentityPoolProviders: a.workloadIdentityPoolProviders, + } + for _, org := range a.orgs { + output.Organizations = append(output.Organizations, org) + } + return output +} diff --git a/internal/adapters/terraform/google/iam/adapt_test.go b/internal/adapters/terraform/google/iam/adapt_test.go new file mode 100644 index 000000000000..19c684b70f6d --- /dev/null +++ b/internal/adapters/terraform/google/iam/adapt_test.go @@ -0,0 +1,266 @@ +package iam + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected iam.IAM + }{ + { + name: "basic", + terraform: ` + data "google_organization" "org" { + domain = "example.com" + } + + resource "google_project" "my_project" { + name = "My Project" + project_id = "your-project-id" + org_id = data.google_organization.org.id + auto_create_network = true + } + + resource "google_folder" "department1" { + display_name = "Department 1" + parent = data.google_organization.org.id + } + + resource "google_folder_iam_member" "admin" { + folder = google_folder.department1.name + role = "roles/editor" + member = "user:alice@gmail.com" + } + + resource "google_folder_iam_binding" "folder-123" { + folder = google_folder.department1.name + role = "roles/nothing" + members = [ + "user:not-alice@gmail.com", + ] + } + + resource "google_organization_iam_member" "org-123" { + org_id = data.google_organization.org.id + role = "roles/whatever" + member = "user:member@gmail.com" + } + + resource "google_organization_iam_binding" "binding" { + org_id = data.google_organization.org.id + role = "roles/browser" + + members = [ + "user:member_2@gmail.com", + ] + } + + resource "google_iam_workload_identity_pool_provider" "example" { + workload_identity_pool_id = "example-pool" + workload_identity_pool_provider_id = "example-provider" + attribute_condition = "assertion.repository_owner=='your-github-organization'" + } +`, + expected: iam.IAM{ + Organizations: []iam.Organization{ + { + Metadata: defsecTypes.NewTestMetadata(), + + Projects: []iam.Project{ + { + Metadata: defsecTypes.NewTestMetadata(), + AutoCreateNetwork: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + + Folders: []iam.Folder{ + { + Metadata: defsecTypes.NewTestMetadata(), + Members: []iam.Member{ + { + Metadata: defsecTypes.NewTestMetadata(), + Member: defsecTypes.String("user:alice@gmail.com", defsecTypes.NewTestMetadata()), + Role: defsecTypes.String("roles/editor", defsecTypes.NewTestMetadata()), + DefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Bindings: []iam.Binding{ + { + Metadata: defsecTypes.NewTestMetadata(), + Members: []defsecTypes.StringValue{ + defsecTypes.String("user:not-alice@gmail.com", defsecTypes.NewTestMetadata()), + }, + Role: defsecTypes.String("roles/nothing", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + Members: []iam.Member{ + { + Metadata: defsecTypes.NewTestMetadata(), + Member: defsecTypes.String("user:member@gmail.com", defsecTypes.NewTestMetadata()), + Role: defsecTypes.String("roles/whatever", defsecTypes.NewTestMetadata()), + DefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Bindings: []iam.Binding{ + { + Metadata: defsecTypes.NewTestMetadata(), + Members: []defsecTypes.StringValue{ + defsecTypes.String("user:member_2@gmail.com", defsecTypes.NewTestMetadata())}, + Role: defsecTypes.String("roles/browser", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + WorkloadIdentityPoolProviders: []iam.WorkloadIdentityPoolProvider{ + { + Metadata: defsecTypes.NewTestMetadata(), + + WorkloadIdentityPoolId: defsecTypes.String("example-pool", defsecTypes.NewTestMetadata()), + WorkloadIdentityPoolProviderId: defsecTypes.String("example-provider", defsecTypes.NewTestMetadata()), + AttributeCondition: defsecTypes.String("assertion.repository_owner=='your-github-organization'", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + data "google_organization" "org" { + domain = "example.com" + } + + resource "google_project" "my_project" { + name = "My Project" + project_id = "your-project-id" + org_id = data.google_organization.org.id + auto_create_network = true + } + + resource "google_folder" "department1" { + display_name = "Department 1" + parent = data.google_organization.org.id + } + + resource "google_folder_iam_binding" "folder-123" { + folder = google_folder.department1.name + role = "roles/nothing" + members = [ + "user:not-alice@gmail.com", + ] + } + + resource "google_folder_iam_member" "admin" { + folder = google_folder.department1.name + role = "roles/editor" + member = "user:alice@gmail.com" + } + + resource "google_organization_iam_member" "org-123" { + org_id = data.google_organization.org.id + role = "roles/whatever" + member = "user:member@gmail.com" + } + + resource "google_organization_iam_binding" "binding" { + org_id = data.google_organization.org.id + role = "roles/browser" + + members = [ + "user:member_2@gmail.com", + ] + } + + resource "google_iam_workload_identity_pool_provider" "example" { + workload_identity_pool_id = "example-pool" + workload_identity_pool_provider_id = "example-provider" + attribute_condition = "assertion.repository_owner=='your-github-organization'" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Organizations, 1) + require.Len(t, adapted.Organizations[0].Projects, 1) + require.Len(t, adapted.Organizations[0].Folders, 1) + require.Len(t, adapted.Organizations[0].Bindings, 1) + require.Len(t, adapted.Organizations[0].Members, 1) + require.Len(t, adapted.WorkloadIdentityPoolProviders, 1) + + project := adapted.Organizations[0].Projects[0] + folder := adapted.Organizations[0].Folders[0] + binding := adapted.Organizations[0].Bindings[0] + member := adapted.Organizations[0].Members[0] + pool := adapted.WorkloadIdentityPoolProviders[0] + + assert.Equal(t, 6, project.Metadata.Range().GetStartLine()) + assert.Equal(t, 11, project.Metadata.Range().GetEndLine()) + + assert.Equal(t, 10, project.AutoCreateNetwork.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, project.AutoCreateNetwork.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, folder.Metadata.Range().GetStartLine()) + assert.Equal(t, 16, folder.Metadata.Range().GetEndLine()) + + assert.Equal(t, 18, folder.Bindings[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 24, folder.Bindings[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 20, folder.Bindings[0].Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 20, folder.Bindings[0].Role.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 21, folder.Bindings[0].Members[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, folder.Bindings[0].Members[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, folder.Members[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 30, folder.Members[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 29, folder.Members[0].Member.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 29, folder.Members[0].Member.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 28, folder.Members[0].Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 28, folder.Members[0].Role.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 32, member.Metadata.Range().GetStartLine()) + assert.Equal(t, 36, member.Metadata.Range().GetEndLine()) + + assert.Equal(t, 34, member.Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, member.Role.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 35, member.Member.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 35, member.Member.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 38, binding.Metadata.Range().GetStartLine()) + assert.Equal(t, 45, binding.Metadata.Range().GetEndLine()) + + assert.Equal(t, 40, binding.Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 40, binding.Role.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 42, binding.Members[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 44, binding.Members[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 51, pool.Metadata.Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/google/iam/convert.go b/internal/adapters/terraform/google/iam/convert.go new file mode 100644 index 000000000000..f364f02a8f9c --- /dev/null +++ b/internal/adapters/terraform/google/iam/convert.go @@ -0,0 +1,26 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func ParsePolicyBlock(block *terraform.Block) []iam.Binding { + var bindings []iam.Binding + for _, bindingBlock := range block.GetBlocks("binding") { + binding := iam.Binding{ + Metadata: bindingBlock.GetMetadata(), + Members: nil, + Role: bindingBlock.GetAttribute("role").AsStringValueOrDefault("", bindingBlock), + IncludesDefaultServiceAccount: defsecTypes.BoolDefault(false, bindingBlock.GetMetadata()), + } + membersAttr := bindingBlock.GetAttribute("members") + members := membersAttr.AsStringValues().AsStrings() + for _, member := range members { + binding.Members = append(binding.Members, defsecTypes.String(member, membersAttr.GetMetadata())) + } + bindings = append(bindings, binding) + } + return bindings +} diff --git a/internal/adapters/terraform/google/iam/folder_iam.go b/internal/adapters/terraform/google/iam/folder_iam.go new file mode 100644 index 000000000000..51b09f185ba8 --- /dev/null +++ b/internal/adapters/terraform/google/iam/folder_iam.go @@ -0,0 +1,117 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/types" +) + +// see https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_folder_iam + +func (a *adapter) adaptFolderIAM() { + a.adaptFolderMembers() + a.adaptFolderBindings() +} + +func (a *adapter) adaptFolderMembers() { + for _, iamBlock := range a.modules.GetResourcesByType("google_folder_iam_member") { + member := a.adaptMember(iamBlock) + folderAttr := iamBlock.GetAttribute("folder") + if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_folder" { + var foundFolder bool + for i, folder := range a.folders { + if folder.blockID == refBlock.ID() { + folder.folder.Members = append(folder.folder.Members, member) + a.folders[i] = folder + foundFolder = true + break + } + } + if foundFolder { + continue + } + } + } + + // we didn't find the folder - add an unmanaged one + a.folders = append(a.folders, parentedFolder{ + folder: iam.Folder{ + Metadata: types.NewUnmanagedMetadata(), + Members: []iam.Member{member}, + }, + }) + } +} + +func (a *adapter) adaptFolderBindings() { + + for _, iamBlock := range a.modules.GetResourcesByType("google_folder_iam_policy") { + + policyAttr := iamBlock.GetAttribute("policy_data") + if policyAttr.IsNil() { + continue + } + policyBlock, err := a.modules.GetReferencedBlock(policyAttr, iamBlock) + if err != nil { + continue + } + bindings := ParsePolicyBlock(policyBlock) + folderAttr := iamBlock.GetAttribute("folder") + + if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_folder" { + var foundFolder bool + for i, folder := range a.folders { + if folder.blockID == refBlock.ID() { + folder.folder.Bindings = append(folder.folder.Bindings, bindings...) + a.folders[i] = folder + foundFolder = true + break + } + } + if foundFolder { + continue + } + + } + } + + // we didn't find the project - add an unmanaged one + a.folders = append(a.folders, parentedFolder{ + folder: iam.Folder{ + Metadata: types.NewUnmanagedMetadata(), + Bindings: bindings, + }, + }) + } + + for _, iamBlock := range a.modules.GetResourcesByType("google_folder_iam_binding") { + binding := a.adaptBinding(iamBlock) + folderAttr := iamBlock.GetAttribute("folder") + if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_folder" { + var foundFolder bool + for i, folder := range a.folders { + if folder.blockID == refBlock.ID() { + folder.folder.Bindings = append(folder.folder.Bindings, binding) + a.folders[i] = folder + foundFolder = true + break + } + } + if foundFolder { + continue + } + + } + } + + // we didn't find the folder - add an unmanaged one + a.folders = append(a.folders, parentedFolder{ + folder: iam.Folder{ + Metadata: types.NewUnmanagedMetadata(), + Bindings: []iam.Binding{binding}, + }, + }) + } +} diff --git a/internal/adapters/terraform/google/iam/folders.go b/internal/adapters/terraform/google/iam/folders.go new file mode 100644 index 000000000000..6e8de9641c6f --- /dev/null +++ b/internal/adapters/terraform/google/iam/folders.go @@ -0,0 +1,40 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" +) + +type parentedFolder struct { + blockID string + parentBlockID string + parentRef string + folder iam.Folder +} + +func (a *adapter) adaptFolders() { + for _, folderBlock := range a.modules.GetResourcesByType("google_folder") { + var folder parentedFolder + parentAttr := folderBlock.GetAttribute("parent") + if parentAttr.IsNil() { + continue + } + + folder.folder.Metadata = folderBlock.GetMetadata() + folder.blockID = folderBlock.ID() + if parentAttr.IsString() { + folder.parentRef = parentAttr.Value().AsString() + } + + if referencedBlock, err := a.modules.GetReferencedBlock(parentAttr, folderBlock); err == nil { + if referencedBlock.TypeLabel() == "google_folder" { + folder.parentBlockID = referencedBlock.ID() + } + if referencedBlock.TypeLabel() == "google_organization" { + folder.parentBlockID = referencedBlock.ID() + a.addOrg(folder.parentBlockID) + } + } + + a.folders = append(a.folders, folder) + } +} diff --git a/internal/adapters/terraform/google/iam/org_iam.go b/internal/adapters/terraform/google/iam/org_iam.go new file mode 100644 index 000000000000..bf56dabd3866 --- /dev/null +++ b/internal/adapters/terraform/google/iam/org_iam.go @@ -0,0 +1,113 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/google/uuid" +) + +// see https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_organization_iam + +func (a *adapter) adaptOrganizationIAM() { + a.adaptOrganizationMembers() + a.adaptOrganizationBindings() +} + +func (a *adapter) adaptOrganizationMembers() { + for _, iamBlock := range a.modules.GetResourcesByType("google_organization_iam_member") { + member := a.adaptMember(iamBlock) + organizationAttr := iamBlock.GetAttribute("organization") + if organizationAttr.IsNil() { + organizationAttr = iamBlock.GetAttribute("org_id") + } + + if refBlock, err := a.modules.GetReferencedBlock(organizationAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_organization" { + a.addOrg(refBlock.ID()) + org, ok := a.orgs[refBlock.ID()] + if !ok { + org = iam.Organization{ + Metadata: refBlock.GetMetadata(), + Folders: nil, + Projects: nil, + Members: []iam.Member{member}, + Bindings: nil, + } + } + org.Members = append(org.Members, member) + a.orgs[refBlock.ID()] = org + continue + } + } + + // we didn't find the organization - add an unmanaged one + placeholderID := uuid.NewString() + org := iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + Members: []iam.Member{member}, + } + a.orgs[placeholderID] = org + + } +} + +func (a *adapter) adaptOrganizationBindings() { + + for _, iamBlock := range a.modules.GetResourcesByType("google_organization_iam_policy") { + + policyAttr := iamBlock.GetAttribute("policy_data") + if policyAttr.IsNil() { + continue + } + policyBlock, err := a.modules.GetReferencedBlock(policyAttr, iamBlock) + if err != nil { + continue + } + bindings := ParsePolicyBlock(policyBlock) + orgAttr := iamBlock.GetAttribute("organization") + + if refBlock, err := a.modules.GetReferencedBlock(orgAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_organization" { + if org, ok := a.orgs[refBlock.ID()]; ok { + org.Bindings = append(org.Bindings, bindings...) + a.orgs[refBlock.ID()] = org + continue + } + } + } + + // we didn't find the organization - add an unmanaged one + placeholderID := uuid.NewString() + org := iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + Bindings: bindings, + } + a.orgs[placeholderID] = org + } + + for _, iamBlock := range a.modules.GetResourcesByType("google_organization_iam_binding") { + binding := a.adaptBinding(iamBlock) + organizationAttr := iamBlock.GetAttribute("organization") + if organizationAttr.IsNil() { + organizationAttr = iamBlock.GetAttribute("org_id") + } + + if refBlock, err := a.modules.GetReferencedBlock(organizationAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_organization" { + a.addOrg(refBlock.ID()) + org := a.orgs[refBlock.ID()] + org.Bindings = append(org.Bindings, binding) + a.orgs[refBlock.ID()] = org + continue + } + } + + // we didn't find the organization - add an unmanaged one + placeholderID := uuid.NewString() + org := iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + Bindings: []iam.Binding{binding}, + } + a.orgs[placeholderID] = org + } +} diff --git a/internal/adapters/terraform/google/iam/project_iam.go b/internal/adapters/terraform/google/iam/project_iam.go new file mode 100644 index 000000000000..bac596af7569 --- /dev/null +++ b/internal/adapters/terraform/google/iam/project_iam.go @@ -0,0 +1,287 @@ +package iam + +import ( + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/google/iam" +) + +// see https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_project_iam + +func (a *adapter) adaptProjectIAM() { + a.adaptProjectMembers() + a.adaptProjectBindings() +} + +func (a *adapter) adaptMember(iamBlock *terraform.Block) iam.Member { + return AdaptMember(iamBlock, a.modules) +} + +func AdaptMember(iamBlock *terraform.Block, modules terraform.Modules) iam.Member { + member := iam.Member{ + Metadata: iamBlock.GetMetadata(), + Member: iamBlock.GetAttribute("member").AsStringValueOrDefault("", iamBlock), + Role: iamBlock.GetAttribute("role").AsStringValueOrDefault("", iamBlock), + DefaultServiceAccount: defsecTypes.BoolDefault(false, iamBlock.GetMetadata()), + } + + memberAttr := iamBlock.GetAttribute("member") + if referencedBlock, err := modules.GetReferencedBlock(memberAttr, iamBlock); err == nil { + if strings.HasSuffix(referencedBlock.TypeLabel(), "_default_service_account") { + member.DefaultServiceAccount = defsecTypes.Bool(true, memberAttr.GetMetadata()) + } + } + + return member +} + +var projectMemberResources = []string{ + "google_project_iam_member", + "google_cloud_run_service_iam_member", + "google_compute_instance_iam_member", + "google_compute_subnetwork_iam_member", + "google_data_catalog_entry_group_iam_member", + "google_folder_iam_member", + "google_pubsub_subscription_iam_member", + "google_pubsub_topic_iam_member", + "google_sourcerepo_repository_iam_member", + "google_spanner_database_iam_member", + "google_spanner_instance_iam_member", + "google_storage_bucket_iam_member", +} + +func (a *adapter) adaptProjectMembers() { + + for _, memberType := range projectMemberResources { + for _, iamBlock := range a.modules.GetResourcesByType(memberType) { + member := a.adaptMember(iamBlock) + projectAttr := iamBlock.GetAttribute("project") + if projectAttr.IsString() { + var foundProject bool + projectID := projectAttr.Value().AsString() + for i, project := range a.projects { + if project.id == projectID { + project.project.Members = append(project.project.Members, member) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + } + + if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_project" { + var foundProject bool + for i, project := range a.projects { + if project.blockID == refBlock.ID() { + project.project.Members = append(project.project.Members, member) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + + } + } + + // we didn't find the project - add an unmanaged one + // unless it already belongs to an existing folder + var foundFolder bool + if refBlock, err := a.modules.GetReferencedBlock(iamBlock.GetAttribute("folder"), iamBlock); err == nil { + for _, folder := range a.folders { + if folder.blockID == refBlock.ID() { + foundFolder = true + } + } + } + if foundFolder { + continue + } + + a.projects = append(a.projects, parentedProject{ + project: iam.Project{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + AutoCreateNetwork: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Members: []iam.Member{member}, + Bindings: nil, + }, + }) + } + } +} + +func (a *adapter) adaptBinding(iamBlock *terraform.Block) iam.Binding { + return AdaptBinding(iamBlock, a.modules) +} + +func AdaptBinding(iamBlock *terraform.Block, modules terraform.Modules) iam.Binding { + binding := iam.Binding{ + Metadata: iamBlock.GetMetadata(), + Members: nil, + Role: iamBlock.GetAttribute("role").AsStringValueOrDefault("", iamBlock), + IncludesDefaultServiceAccount: defsecTypes.BoolDefault(false, iamBlock.GetMetadata()), + } + membersAttr := iamBlock.GetAttribute("members") + members := membersAttr.AsStringValues().AsStrings() + for _, member := range members { + binding.Members = append(binding.Members, defsecTypes.String(member, membersAttr.GetMetadata())) + } + if referencedBlock, err := modules.GetReferencedBlock(membersAttr, iamBlock); err == nil { + if strings.HasSuffix(referencedBlock.TypeLabel(), "_default_service_account") { + binding.IncludesDefaultServiceAccount = defsecTypes.Bool(true, membersAttr.GetMetadata()) + } + } + return binding +} + +var projectBindingResources = []string{ + "google_project_iam_binding", + "google_cloud_run_service_iam_binding", + "google_compute_instance_iam_binding", + "google_compute_subnetwork_iam_binding", + "google_data_catalog_entry_group_iam_binding", + "google_folder_iam_binding", + "google_pubsub_subscription_iam_binding", + "google_pubsub_topic_iam_binding", + "google_sourcerepo_repository_iam_binding", + "google_spanner_database_iam_binding", + "google_spanner_instance_iam_binding", + "google_storage_bucket_iam_binding", +} + +func (a *adapter) adaptProjectDataBindings() { + for _, iamBlock := range a.modules.GetResourcesByType("google_project_iam_policy") { + + policyAttr := iamBlock.GetAttribute("policy_data") + if policyAttr.IsNil() { + continue + } + policyBlock, err := a.modules.GetReferencedBlock(policyAttr, iamBlock) + if err != nil { + continue + } + bindings := ParsePolicyBlock(policyBlock) + projectAttr := iamBlock.GetAttribute("project") + if projectAttr.IsString() { + var foundProject bool + projectID := projectAttr.Value().AsString() + for i, project := range a.projects { + if project.id == projectID { + project.project.Bindings = append(project.project.Bindings, bindings...) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + } + + if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_project" { + var foundProject bool + for i, project := range a.projects { + if project.blockID == refBlock.ID() { + project.project.Bindings = append(project.project.Bindings, bindings...) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + + } + } + + // we didn't find the project - add an unmanaged one + a.projects = append(a.projects, parentedProject{ + project: iam.Project{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + AutoCreateNetwork: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Members: nil, + Bindings: bindings, + }, + }) + } + +} + +func (a *adapter) adaptProjectBindings() { + + a.adaptProjectDataBindings() + + for _, bindingType := range projectBindingResources { + for _, iamBlock := range a.modules.GetResourcesByType(bindingType) { + binding := a.adaptBinding(iamBlock) + projectAttr := iamBlock.GetAttribute("project") + if projectAttr.IsString() { + var foundProject bool + projectID := projectAttr.Value().AsString() + for i, project := range a.projects { + if project.id == projectID { + project.project.Bindings = append(project.project.Bindings, binding) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + } + + if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_project" { + var foundProject bool + for i, project := range a.projects { + if project.blockID == refBlock.ID() { + project.project.Bindings = append(project.project.Bindings, binding) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + + } + } + + // we didn't find the project - add an unmanaged one + // unless it already belongs to an existing folder + var foundFolder bool + if refBlock, err := a.modules.GetReferencedBlock(iamBlock.GetAttribute("folder"), iamBlock); err == nil { + for _, folder := range a.folders { + if folder.blockID == refBlock.ID() { + foundFolder = true + } + } + } + if foundFolder { + continue + } + a.projects = append(a.projects, parentedProject{ + project: iam.Project{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + AutoCreateNetwork: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Members: nil, + Bindings: []iam.Binding{binding}, + }, + }) + } + } +} diff --git a/internal/adapters/terraform/google/iam/project_iam_test.go b/internal/adapters/terraform/google/iam/project_iam_test.go new file mode 100644 index 000000000000..5c2f75187f4a --- /dev/null +++ b/internal/adapters/terraform/google/iam/project_iam_test.go @@ -0,0 +1,59 @@ +package iam + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_AdaptBinding(t *testing.T) { + tests := []struct { + name string + terraform string + expected iam.Binding + }{ + { + name: "defined", + terraform: ` + resource "google_organization_iam_binding" "binding" { + org_id = data.google_organization.org.id + role = "roles/browser" + + members = [ + "user:alice@gmail.com", + ] + }`, + expected: iam.Binding{ + Metadata: defsecTypes.NewTestMetadata(), + Members: []defsecTypes.StringValue{ + defsecTypes.String("user:alice@gmail.com", defsecTypes.NewTestMetadata())}, + Role: defsecTypes.String("roles/browser", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "google_organization_iam_binding" "binding" { + }`, + expected: iam.Binding{ + Metadata: defsecTypes.NewTestMetadata(), + Role: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := AdaptBinding(modules.GetBlocks()[0], modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/google/iam/projects.go b/internal/adapters/terraform/google/iam/projects.go new file mode 100644 index 000000000000..e064dc8d0bd8 --- /dev/null +++ b/internal/adapters/terraform/google/iam/projects.go @@ -0,0 +1,58 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" +) + +type parentedProject struct { + blockID string + orgBlockID string + folderBlockID string + id string + orgID string + folderID string + project iam.Project +} + +func (a *adapter) adaptProjects() { + for _, projectBlock := range a.modules.GetResourcesByType("google_project") { + var project parentedProject + project.project.Metadata = projectBlock.GetMetadata() + idAttr := projectBlock.GetAttribute("project_id") + if !idAttr.IsString() { + continue + } + project.id = idAttr.Value().AsString() + + project.blockID = projectBlock.ID() + + orgAttr := projectBlock.GetAttribute("org_id") + if orgAttr.IsString() { + project.orgID = orgAttr.Value().AsString() + } + folderAttr := projectBlock.GetAttribute("folder_id") + if folderAttr.IsString() { + project.folderID = folderAttr.Value().AsString() + } + + autoCreateNetworkAttr := projectBlock.GetAttribute("auto_create_network") + project.project.AutoCreateNetwork = autoCreateNetworkAttr.AsBoolValueOrDefault(true, projectBlock) + + if orgAttr.IsNotNil() { + if referencedBlock, err := a.modules.GetReferencedBlock(orgAttr, projectBlock); err == nil { + if referencedBlock.TypeLabel() == "google_organization" { + project.orgBlockID = referencedBlock.ID() + a.addOrg(project.orgBlockID) + } + } + } + if folderAttr.IsNotNil() { + if referencedBlock, err := a.modules.GetReferencedBlock(folderAttr, projectBlock); err == nil { + if referencedBlock.TypeLabel() == "google_folder" { + project.folderBlockID = referencedBlock.ID() + } + } + } + a.projects = append(a.projects, project) + } +} diff --git a/internal/adapters/terraform/google/iam/workload_identity_pool_providers.go b/internal/adapters/terraform/google/iam/workload_identity_pool_providers.go new file mode 100644 index 000000000000..70d68511ad70 --- /dev/null +++ b/internal/adapters/terraform/google/iam/workload_identity_pool_providers.go @@ -0,0 +1,18 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" +) + +// See https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/iam_workload_identity_pool_provider + +func (a *adapter) adaptWorkloadIdentityPoolProviders() { + for _, resource := range a.modules.GetResourcesByType("google_iam_workload_identity_pool_provider") { + a.workloadIdentityPoolProviders = append(a.workloadIdentityPoolProviders, iam.WorkloadIdentityPoolProvider{ + Metadata: resource.GetMetadata(), + WorkloadIdentityPoolId: resource.GetAttribute("workload_identity_pool_id").AsStringValueOrDefault("", resource), + WorkloadIdentityPoolProviderId: resource.GetAttribute("workload_identity_pool_provider_id").AsStringValueOrDefault("", resource), + AttributeCondition: resource.GetAttribute("attribute_condition").AsStringValueOrDefault("", resource), + }) + } +} diff --git a/internal/adapters/terraform/google/kms/adapt.go b/internal/adapters/terraform/google/kms/adapt.go new file mode 100644 index 000000000000..1b76b7d8b501 --- /dev/null +++ b/internal/adapters/terraform/google/kms/adapt.go @@ -0,0 +1,60 @@ +package kms + +import ( + "strconv" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/google/kms" +) + +func Adapt(modules terraform.Modules) kms.KMS { + return kms.KMS{ + KeyRings: adaptKeyRings(modules), + } +} + +func adaptKeyRings(modules terraform.Modules) []kms.KeyRing { + var keyRings []kms.KeyRing + for _, module := range modules { + for _, resource := range module.GetResourcesByType("google_kms_key_ring") { + var keys []kms.Key + + keyBlocks := module.GetReferencingResources(resource, "google_kms_crypto_key", "key_ring") + for _, keyBlock := range keyBlocks { + keys = append(keys, adaptKey(keyBlock)) + } + keyRings = append(keyRings, kms.KeyRing{ + Metadata: resource.GetMetadata(), + Keys: keys, + }) + } + } + return keyRings +} + +func adaptKey(resource *terraform.Block) kms.Key { + + key := kms.Key{ + Metadata: resource.GetMetadata(), + RotationPeriodSeconds: types.IntDefault(-1, resource.GetMetadata()), + } + + rotationPeriodAttr := resource.GetAttribute("rotation_period") + if !rotationPeriodAttr.IsString() { + return key + } + rotationStr := rotationPeriodAttr.Value().AsString() + if rotationStr[len(rotationStr)-1:] != "s" { + return key + } + seconds, err := strconv.Atoi(rotationStr[:len(rotationStr)-1]) + if err != nil { + return key + } + + key.RotationPeriodSeconds = types.Int(seconds, rotationPeriodAttr.GetMetadata()) + return key +} diff --git a/internal/adapters/terraform/google/kms/adapt_test.go b/internal/adapters/terraform/google/kms/adapt_test.go new file mode 100644 index 000000000000..f6f2b7c323ba --- /dev/null +++ b/internal/adapters/terraform/google/kms/adapt_test.go @@ -0,0 +1,126 @@ +package kms + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/kms" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptKeyRings(t *testing.T) { + tests := []struct { + name string + terraform string + expected []kms.KeyRing + }{ + { + name: "configured", + terraform: ` + resource "google_kms_key_ring" "keyring" { + name = "keyring-example" + } + + resource "google_kms_crypto_key" "example-key" { + name = "crypto-key-example" + key_ring = google_kms_key_ring.keyring.id + rotation_period = "7776000s" + } +`, + expected: []kms.KeyRing{ + { + Metadata: defsecTypes.NewTestMetadata(), + Keys: []kms.Key{ + { + Metadata: defsecTypes.NewTestMetadata(), + RotationPeriodSeconds: defsecTypes.Int(7776000, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "no keys", + terraform: ` + resource "google_kms_key_ring" "keyring" { + name = "keyring-example" + } + +`, + expected: []kms.KeyRing{ + { + Metadata: defsecTypes.NewTestMetadata(), + }, + }, + }, + { + name: "default rotation period", + terraform: ` + resource "google_kms_key_ring" "keyring" { + name = "keyring-example" + } + + resource "google_kms_crypto_key" "example-key" { + name = "crypto-key-example" + key_ring = google_kms_key_ring.keyring.id + } +`, + expected: []kms.KeyRing{ + { + Metadata: defsecTypes.NewTestMetadata(), + Keys: []kms.Key{ + { + Metadata: defsecTypes.NewTestMetadata(), + RotationPeriodSeconds: defsecTypes.Int(-1, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptKeyRings(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "google_kms_key_ring" "keyring" { + name = "keyring-example" + } + + resource "google_kms_crypto_key" "example-key" { + name = "crypto-key-example" + key_ring = google_kms_key_ring.keyring.id + rotation_period = "7776000s" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.KeyRings, 1) + require.Len(t, adapted.KeyRings[0].Keys, 1) + + key := adapted.KeyRings[0].Keys[0] + + assert.Equal(t, 2, adapted.KeyRings[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 4, adapted.KeyRings[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, key.Metadata.Range().GetStartLine()) + assert.Equal(t, 10, key.Metadata.Range().GetEndLine()) + + assert.Equal(t, 9, key.RotationPeriodSeconds.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 9, key.RotationPeriodSeconds.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/google/sql/adapt.go b/internal/adapters/terraform/google/sql/adapt.go new file mode 100644 index 000000000000..b6cb39f39c3c --- /dev/null +++ b/internal/adapters/terraform/google/sql/adapt.go @@ -0,0 +1,156 @@ +package sql + +import ( + "strconv" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/google/sql" +) + +func Adapt(modules terraform.Modules) sql.SQL { + return sql.SQL{ + Instances: adaptInstances(modules), + } +} + +func adaptInstances(modules terraform.Modules) []sql.DatabaseInstance { + var instances []sql.DatabaseInstance + for _, module := range modules { + for _, resource := range module.GetResourcesByType("google_sql_database_instance") { + instances = append(instances, adaptInstance(resource)) + } + } + return instances +} + +func adaptInstance(resource *terraform.Block) sql.DatabaseInstance { + + instance := sql.DatabaseInstance{ + Metadata: resource.GetMetadata(), + DatabaseVersion: resource.GetAttribute("database_version").AsStringValueOrDefault("", resource), + IsReplica: defsecTypes.BoolDefault(false, resource.GetMetadata()), + Settings: sql.Settings{ + Metadata: resource.GetMetadata(), + Flags: sql.Flags{ + Metadata: resource.GetMetadata(), + LogTempFileSize: defsecTypes.IntDefault(-1, resource.GetMetadata()), + LocalInFile: defsecTypes.BoolDefault(false, resource.GetMetadata()), + ContainedDatabaseAuthentication: defsecTypes.BoolDefault(true, resource.GetMetadata()), + CrossDBOwnershipChaining: defsecTypes.BoolDefault(true, resource.GetMetadata()), + LogCheckpoints: defsecTypes.BoolDefault(false, resource.GetMetadata()), + LogConnections: defsecTypes.BoolDefault(false, resource.GetMetadata()), + LogDisconnections: defsecTypes.BoolDefault(false, resource.GetMetadata()), + LogLockWaits: defsecTypes.BoolDefault(false, resource.GetMetadata()), + LogMinMessages: defsecTypes.StringDefault("", resource.GetMetadata()), + LogMinDurationStatement: defsecTypes.IntDefault(-1, resource.GetMetadata()), + }, + Backups: sql.Backups{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + IPConfiguration: sql.IPConfiguration{ + Metadata: resource.GetMetadata(), + RequireTLS: defsecTypes.BoolDefault(false, resource.GetMetadata()), + EnableIPv4: defsecTypes.BoolDefault(true, resource.GetMetadata()), + AuthorizedNetworks: nil, + }, + }, + } + + if attr := resource.GetAttribute("master_instance_name"); attr.IsNotNil() { + instance.IsReplica = defsecTypes.Bool(true, attr.GetMetadata()) + } + + if settingsBlock := resource.GetBlock("settings"); settingsBlock.IsNotNil() { + instance.Settings.Metadata = settingsBlock.GetMetadata() + if blocks := settingsBlock.GetBlocks("database_flags"); len(blocks) > 0 { + adaptFlags(blocks, &instance.Settings.Flags) + } + if backupBlock := settingsBlock.GetBlock("backup_configuration"); backupBlock.IsNotNil() { + instance.Settings.Backups.Metadata = backupBlock.GetMetadata() + backupConfigEnabledAttr := backupBlock.GetAttribute("enabled") + instance.Settings.Backups.Enabled = backupConfigEnabledAttr.AsBoolValueOrDefault(false, backupBlock) + } + if settingsBlock.HasChild("ip_configuration") { + instance.Settings.IPConfiguration = adaptIPConfig(settingsBlock.GetBlock("ip_configuration")) + } + } + return instance +} + +// nolint +func adaptFlags(resources terraform.Blocks, flags *sql.Flags) { + for _, resource := range resources { + + nameAttr := resource.GetAttribute("name") + valueAttr := resource.GetAttribute("value") + + if !nameAttr.IsString() || valueAttr.IsNil() { + continue + } + + switch nameAttr.Value().AsString() { + case "log_temp_files": + if logTempInt, err := strconv.Atoi(valueAttr.Value().AsString()); err == nil { + flags.LogTempFileSize = defsecTypes.Int(logTempInt, nameAttr.GetMetadata()) + } + case "log_min_messages": + flags.LogMinMessages = valueAttr.AsStringValueOrDefault("", resource) + case "log_min_duration_statement": + if logMinDS, err := strconv.Atoi(valueAttr.Value().AsString()); err == nil { + flags.LogMinDurationStatement = defsecTypes.Int(logMinDS, nameAttr.GetMetadata()) + } + case "local_infile": + flags.LocalInFile = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "log_checkpoints": + flags.LogCheckpoints = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "log_connections": + flags.LogConnections = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "log_disconnections": + flags.LogDisconnections = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "log_lock_waits": + flags.LogLockWaits = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "contained database authentication": + flags.ContainedDatabaseAuthentication = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "cross db ownership chaining": + flags.CrossDBOwnershipChaining = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + } + } +} + +func adaptIPConfig(resource *terraform.Block) sql.IPConfiguration { + var authorizedNetworks []struct { + Name defsecTypes.StringValue + CIDR defsecTypes.StringValue + } + + tlsRequiredAttr := resource.GetAttribute("require_ssl") + tlsRequiredVal := tlsRequiredAttr.AsBoolValueOrDefault(false, resource) + + ipv4enabledAttr := resource.GetAttribute("ipv4_enabled") + ipv4enabledVal := ipv4enabledAttr.AsBoolValueOrDefault(true, resource) + + authNetworksBlocks := resource.GetBlocks("authorized_networks") + for _, authBlock := range authNetworksBlocks { + nameVal := authBlock.GetAttribute("name").AsStringValueOrDefault("", authBlock) + cidrVal := authBlock.GetAttribute("value").AsStringValueOrDefault("", authBlock) + + authorizedNetworks = append(authorizedNetworks, struct { + Name defsecTypes.StringValue + CIDR defsecTypes.StringValue + }{ + Name: nameVal, + CIDR: cidrVal, + }) + } + + return sql.IPConfiguration{ + Metadata: resource.GetMetadata(), + RequireTLS: tlsRequiredVal, + EnableIPv4: ipv4enabledVal, + AuthorizedNetworks: authorizedNetworks, + } +} diff --git a/internal/adapters/terraform/google/sql/adapt_test.go b/internal/adapters/terraform/google/sql/adapt_test.go new file mode 100644 index 000000000000..89c1a58e95ff --- /dev/null +++ b/internal/adapters/terraform/google/sql/adapt_test.go @@ -0,0 +1,278 @@ +package sql + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/sql" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected sql.SQL + }{ + { + name: "default flags", + terraform: ` + resource "google_sql_database_instance" "db" { + database_version = "POSTGRES_12" + settings { + backup_configuration { + enabled = true + } + ip_configuration { + ipv4_enabled = false + authorized_networks { + value = "108.12.12.0/24" + name = "internal" + } + require_ssl = true + } + } + } +`, + expected: sql.SQL{ + Instances: []sql.DatabaseInstance{ + { + Metadata: defsecTypes.NewTestMetadata(), + IsReplica: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + DatabaseVersion: defsecTypes.String("POSTGRES_12", defsecTypes.NewTestMetadata()), + Settings: sql.Settings{ + Metadata: defsecTypes.NewTestMetadata(), + Backups: sql.Backups{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Flags: sql.Flags{ + Metadata: defsecTypes.NewTestMetadata(), + LogMinDurationStatement: defsecTypes.Int(-1, defsecTypes.NewTestMetadata()), + ContainedDatabaseAuthentication: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + CrossDBOwnershipChaining: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LocalInFile: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogCheckpoints: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogConnections: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogDisconnections: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogLockWaits: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogMinMessages: defsecTypes.String("", defsecTypes.NewTestMetadata()), + LogTempFileSize: defsecTypes.Int(-1, defsecTypes.NewTestMetadata()), + }, + IPConfiguration: sql.IPConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + RequireTLS: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EnableIPv4: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + AuthorizedNetworks: []struct { + Name defsecTypes.StringValue + CIDR defsecTypes.StringValue + }{ + { + Name: defsecTypes.String("internal", defsecTypes.NewTestMetadata()), + CIDR: defsecTypes.String("108.12.12.0/24", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptInstances(t *testing.T) { + tests := []struct { + name string + terraform string + expected []sql.DatabaseInstance + }{ + { + name: "all flags", + terraform: ` +resource "google_sql_database_instance" "backup_source_instance" { + name = "test-instance" + database_version = "POSTGRES_11" + + project = "test-project" + region = "europe-west6" + deletion_protection = false + settings { + tier = "db-f1-micro" + backup_configuration { + enabled = true + } + ip_configuration { + ipv4_enabled = false + private_network = "test-network" + require_ssl = true + } + database_flags { + name = "log_connections" + value = "on" + } + database_flags { + name = "log_temp_files" + value = "0" + } + database_flags { + name = "log_checkpoints" + value = "on" + } + database_flags { + name = "log_disconnections" + value = "on" + } + database_flags { + name = "log_lock_waits" + value = "on" + } + } +} + `, + expected: []sql.DatabaseInstance{ + { + Metadata: defsecTypes.NewTestMetadata(), + DatabaseVersion: defsecTypes.String("POSTGRES_11", defsecTypes.NewTestMetadata()), + IsReplica: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Settings: sql.Settings{ + Backups: sql.Backups{ + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Flags: sql.Flags{ + LogConnections: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LogTempFileSize: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + LogCheckpoints: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LogDisconnections: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LogLockWaits: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + ContainedDatabaseAuthentication: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + CrossDBOwnershipChaining: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LocalInFile: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogMinDurationStatement: defsecTypes.Int(-1, defsecTypes.NewTestMetadata()), + LogMinMessages: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + IPConfiguration: sql.IPConfiguration{ + EnableIPv4: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + RequireTLS: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptInstances(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "google_sql_database_instance" "backup_source_instance" { + name = "test-instance" + database_version = "POSTGRES_11" + + settings { + backup_configuration { + enabled = true + } + + ip_configuration { + ipv4_enabled = false + require_ssl = true + authorized_networks { + name = "internal" + value = "108.12.12.0/24" + } + } + + database_flags { + name = "log_connections" + value = "on" + } + database_flags { + name = "log_temp_files" + value = "0" + } + database_flags { + name = "log_checkpoints" + value = "on" + } + database_flags { + name = "log_disconnections" + value = "on" + } + database_flags { + name = "log_lock_waits" + value = "on" + } + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Instances, 1) + instance := adapted.Instances[0] + + assert.Equal(t, 2, instance.Metadata.Range().GetStartLine()) + assert.Equal(t, 41, instance.Metadata.Range().GetEndLine()) + + assert.Equal(t, 4, instance.DatabaseVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, instance.DatabaseVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, instance.Settings.Metadata.Range().GetStartLine()) + assert.Equal(t, 40, instance.Settings.Metadata.Range().GetEndLine()) + + assert.Equal(t, 7, instance.Settings.Backups.Metadata.Range().GetStartLine()) + assert.Equal(t, 9, instance.Settings.Backups.Metadata.Range().GetEndLine()) + + assert.Equal(t, 8, instance.Settings.Backups.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 8, instance.Settings.Backups.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, instance.Settings.IPConfiguration.Metadata.Range().GetStartLine()) + assert.Equal(t, 18, instance.Settings.IPConfiguration.Metadata.Range().GetEndLine()) + + assert.Equal(t, 12, instance.Settings.IPConfiguration.EnableIPv4.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 12, instance.Settings.IPConfiguration.EnableIPv4.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, instance.Settings.IPConfiguration.RequireTLS.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, instance.Settings.IPConfiguration.RequireTLS.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 15, instance.Settings.IPConfiguration.AuthorizedNetworks[0].Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 15, instance.Settings.IPConfiguration.AuthorizedNetworks[0].Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 16, instance.Settings.IPConfiguration.AuthorizedNetworks[0].CIDR.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 16, instance.Settings.IPConfiguration.AuthorizedNetworks[0].CIDR.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 22, instance.Settings.Flags.LogConnections.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, instance.Settings.Flags.LogConnections.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 25, instance.Settings.Flags.LogTempFileSize.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 25, instance.Settings.Flags.LogTempFileSize.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 34, instance.Settings.Flags.LogDisconnections.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, instance.Settings.Flags.LogDisconnections.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 38, instance.Settings.Flags.LogLockWaits.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 38, instance.Settings.Flags.LogLockWaits.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/google/storage/adapt.go b/internal/adapters/terraform/google/storage/adapt.go new file mode 100644 index 000000000000..36aff1b0e2a9 --- /dev/null +++ b/internal/adapters/terraform/google/storage/adapt.go @@ -0,0 +1,129 @@ +package storage + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/storage" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) storage.Storage { + return storage.Storage{ + Buckets: (&adapter{modules: modules}).adaptBuckets(), + } +} + +type adapter struct { + modules terraform.Modules + bindings []parentedBinding + members []parentedMember + bindingMap terraform.ResourceIDResolutions + memberMap terraform.ResourceIDResolutions +} + +func (a *adapter) adaptBuckets() []storage.Bucket { + + a.bindingMap = a.modules.GetChildResourceIDMapByType("google_storage_bucket_iam_binding", "google_storage_bucket_iam_policy") + a.memberMap = a.modules.GetChildResourceIDMapByType("google_storage_bucket_iam_member") + + a.adaptMembers() + a.adaptBindings() + + var buckets []storage.Bucket + for _, module := range a.modules { + for _, resource := range module.GetResourcesByType("google_storage_bucket") { + buckets = append(buckets, a.adaptBucketResource(resource)) + } + } + + orphanage := storage.Bucket{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Name: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + Location: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnableUniformBucketLevelAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Members: nil, + Bindings: nil, + } + for _, orphanedBindingID := range a.bindingMap.Orphans() { + for _, binding := range a.bindings { + if binding.blockID == orphanedBindingID { + orphanage.Bindings = append(orphanage.Bindings, binding.bindings...) + break + } + } + } + for _, orphanedMemberID := range a.memberMap.Orphans() { + for _, member := range a.members { + if member.blockID == orphanedMemberID { + orphanage.Members = append(orphanage.Members, member.member) + break + } + } + } + if len(orphanage.Bindings) > 0 || len(orphanage.Members) > 0 { + buckets = append(buckets, orphanage) + } + + return buckets +} + +func (a *adapter) adaptBucketResource(resourceBlock *terraform.Block) storage.Bucket { + + nameAttr := resourceBlock.GetAttribute("name") + nameValue := nameAttr.AsStringValueOrDefault("", resourceBlock) + + locationAttr := resourceBlock.GetAttribute("location") + locationValue := locationAttr.AsStringValueOrDefault("", resourceBlock) + + // See https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/storage_bucket#uniform_bucket_level_access + ublaAttr := resourceBlock.GetAttribute("uniform_bucket_level_access") + ublaValue := ublaAttr.AsBoolValueOrDefault(false, resourceBlock) + + bucket := storage.Bucket{ + Metadata: resourceBlock.GetMetadata(), + Name: nameValue, + Location: locationValue, + EnableUniformBucketLevelAccess: ublaValue, + Members: nil, + Bindings: nil, + Encryption: storage.BucketEncryption{ + Metadata: resourceBlock.GetMetadata(), + DefaultKMSKeyName: defsecTypes.StringDefault("", resourceBlock.GetMetadata()), + }, + } + + if encBlock := resourceBlock.GetBlock("encryption"); encBlock.IsNotNil() { + bucket.Encryption.Metadata = encBlock.GetMetadata() + kmsKeyNameAttr := encBlock.GetAttribute("default_kms_key_name") + bucket.Encryption.DefaultKMSKeyName = kmsKeyNameAttr.AsStringValueOrDefault("", encBlock) + } + + var name string + if nameAttr.IsString() { + name = nameAttr.Value().AsString() + } + + for _, member := range a.members { + if member.bucketBlockID == resourceBlock.ID() { + bucket.Members = append(bucket.Members, member.member) + a.memberMap.Resolve(member.blockID) + continue + } + if name != "" && name == member.bucketID { + bucket.Members = append(bucket.Members, member.member) + a.memberMap.Resolve(member.blockID) + } + } + for _, binding := range a.bindings { + if binding.bucketBlockID == resourceBlock.ID() { + bucket.Bindings = append(bucket.Bindings, binding.bindings...) + a.bindingMap.Resolve(binding.blockID) + continue + } + if name != "" && name == binding.bucketID { + bucket.Bindings = append(bucket.Bindings, binding.bindings...) + a.bindingMap.Resolve(binding.blockID) + } + } + + return bucket +} diff --git a/internal/adapters/terraform/google/storage/adapt_test.go b/internal/adapters/terraform/google/storage/adapt_test.go new file mode 100644 index 000000000000..069d39e1051a --- /dev/null +++ b/internal/adapters/terraform/google/storage/adapt_test.go @@ -0,0 +1,198 @@ +package storage + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/providers/google/storage" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected storage.Storage + }{ + { + name: "defined", + terraform: ` + resource "google_storage_bucket" "static-site" { + name = "image-store.com" + location = "EU" + uniform_bucket_level_access = true + + encryption { + default_kms_key_name = "default-kms-key-name" + } + } + + resource "google_storage_bucket_iam_binding" "binding" { + bucket = google_storage_bucket.static-site.name + role = "roles/storage.admin #1" + members = [ + "group:test@example.com", + ] + } + + resource "google_storage_bucket_iam_member" "example" { + member = "serviceAccount:test@example.com" + bucket = google_storage_bucket.static-site.name + role = "roles/storage.admin #2" + }`, + expected: storage.Storage{ + Buckets: []storage.Bucket{ + { + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("image-store.com", defsecTypes.NewTestMetadata()), + Location: defsecTypes.String("EU", defsecTypes.NewTestMetadata()), + EnableUniformBucketLevelAccess: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Bindings: []iam.Binding{ + { + Metadata: defsecTypes.NewTestMetadata(), + Members: []defsecTypes.StringValue{ + defsecTypes.String("group:test@example.com", defsecTypes.NewTestMetadata()), + }, + Role: defsecTypes.String("roles/storage.admin #1", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Members: []iam.Member{ + { + Metadata: defsecTypes.NewTestMetadata(), + Member: defsecTypes.String("serviceAccount:test@example.com", defsecTypes.NewTestMetadata()), + Role: defsecTypes.String("roles/storage.admin #2", defsecTypes.NewTestMetadata()), + DefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Encryption: storage.BucketEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + DefaultKMSKeyName: defsecTypes.String("default-kms-key-name", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "google_storage_bucket" "static-site" { + } + + resource "google_storage_bucket_iam_binding" "binding" { + bucket = google_storage_bucket.static-site.name + } + + resource "google_storage_bucket_iam_member" "example" { + bucket = google_storage_bucket.static-site.name + }`, + expected: storage.Storage{ + Buckets: []storage.Bucket{ + { + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Location: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EnableUniformBucketLevelAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Bindings: []iam.Binding{ + { + Metadata: defsecTypes.NewTestMetadata(), + Role: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Members: []iam.Member{ + { + Metadata: defsecTypes.NewTestMetadata(), + Member: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Role: defsecTypes.String("", defsecTypes.NewTestMetadata()), + DefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Encryption: storage.BucketEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + DefaultKMSKeyName: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "google_storage_bucket" "static-site" { + name = "image-store.com" + location = "EU" + uniform_bucket_level_access = true + } + + resource "google_storage_bucket_iam_binding" "binding" { + bucket = google_storage_bucket.static-site.name + role = "roles/storage.admin #1" + members = [ + "group:test@example.com", + ] + } + + resource "google_storage_bucket_iam_member" "example" { + member = "serviceAccount:test@example.com" + bucket = google_storage_bucket.static-site.name + role = "roles/storage.admin #2" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Buckets, 1) + require.Len(t, adapted.Buckets[0].Bindings, 1) + require.Len(t, adapted.Buckets[0].Members, 1) + + bucket := adapted.Buckets[0] + binding := adapted.Buckets[0].Bindings[0] + member := adapted.Buckets[0].Members[0] + + assert.Equal(t, 2, bucket.Metadata.Range().GetStartLine()) + assert.Equal(t, 6, bucket.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, bucket.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, bucket.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, bucket.Location.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, bucket.Location.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, bucket.EnableUniformBucketLevelAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, bucket.EnableUniformBucketLevelAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 8, binding.Metadata.Range().GetStartLine()) + assert.Equal(t, 14, binding.Metadata.Range().GetEndLine()) + + assert.Equal(t, 10, binding.Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, binding.Role.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, binding.Members[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, binding.Members[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 16, member.Metadata.Range().GetStartLine()) + assert.Equal(t, 20, member.Metadata.Range().GetEndLine()) + + assert.Equal(t, 17, member.Member.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, member.Member.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 19, member.Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 19, member.Role.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/google/storage/iam.go b/internal/adapters/terraform/google/storage/iam.go new file mode 100644 index 000000000000..24b88a657f94 --- /dev/null +++ b/internal/adapters/terraform/google/storage/iam.go @@ -0,0 +1,96 @@ +package storage + +import ( + iamTypes "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/trivy/internal/adapters/terraform/google/iam" +) + +type parentedBinding struct { + blockID string + bucketID string + bucketBlockID string + bindings []iamTypes.Binding +} + +type parentedMember struct { + blockID string + bucketID string + bucketBlockID string + member iamTypes.Member +} + +func (a *adapter) adaptBindings() { + + for _, iamBlock := range a.modules.GetResourcesByType("google_storage_bucket_iam_policy") { + var parented parentedBinding + parented.blockID = iamBlock.ID() + + bucketAttr := iamBlock.GetAttribute("bucket") + if bucketAttr.IsString() { + parented.bucketID = bucketAttr.Value().AsString() + } + + if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_storage_bucket" { + parented.bucketBlockID = refBlock.ID() + } + } + + policyAttr := iamBlock.GetAttribute("policy_data") + if policyAttr.IsNil() { + continue + } + + policyBlock, err := a.modules.GetReferencedBlock(policyAttr, iamBlock) + if err != nil { + continue + } + + parented.bindings = iam.ParsePolicyBlock(policyBlock) + a.bindings = append(a.bindings, parented) + } + + for _, iamBlock := range a.modules.GetResourcesByType("google_storage_bucket_iam_binding") { + + var parented parentedBinding + parented.blockID = iamBlock.ID() + parented.bindings = []iamTypes.Binding{iam.AdaptBinding(iamBlock, a.modules)} + + bucketAttr := iamBlock.GetAttribute("bucket") + if bucketAttr.IsString() { + parented.bucketID = bucketAttr.Value().AsString() + } + + if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_storage_bucket" { + parented.bucketBlockID = refBlock.ID() + } + } + + a.bindings = append(a.bindings, parented) + } +} + +func (a *adapter) adaptMembers() { + + for _, iamBlock := range a.modules.GetResourcesByType("google_storage_bucket_iam_member") { + + var parented parentedMember + parented.blockID = iamBlock.ID() + parented.member = iam.AdaptMember(iamBlock, a.modules) + + bucketAttr := iamBlock.GetAttribute("bucket") + if bucketAttr.IsString() { + parented.bucketID = bucketAttr.Value().AsString() + } + + if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_storage_bucket" { + parented.bucketBlockID = refBlock.ID() + } + } + + a.members = append(a.members, parented) + } + +} diff --git a/internal/adapters/terraform/kubernetes/adapt.go b/internal/adapters/terraform/kubernetes/adapt.go new file mode 100644 index 000000000000..459a4d823e4d --- /dev/null +++ b/internal/adapters/terraform/kubernetes/adapt.go @@ -0,0 +1,123 @@ +package kubernetes + +import ( + "regexp" + "strings" + + "github.com/aquasecurity/defsec/pkg/providers/kubernetes" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +var versionRegex = regexp.MustCompile(`^v\d+(beta\d+)?$`) + +func Adapt(modules terraform.Modules) kubernetes.Kubernetes { + return kubernetes.Kubernetes{ + NetworkPolicies: adaptNetworkPolicies(modules), + } +} + +func adaptNetworkPolicies(modules terraform.Modules) []kubernetes.NetworkPolicy { + var networkPolicies []kubernetes.NetworkPolicy + for _, module := range modules { + for _, resource := range getBlocksIgnoreVersion(module, "resource", "kubernetes_network_policy") { + networkPolicies = append(networkPolicies, adaptNetworkPolicy(resource)) + } + } + return networkPolicies +} + +func adaptNetworkPolicy(resourceBlock *terraform.Block) kubernetes.NetworkPolicy { + + policy := kubernetes.NetworkPolicy{ + Metadata: resourceBlock.GetMetadata(), + Spec: kubernetes.NetworkPolicySpec{ + Metadata: resourceBlock.GetMetadata(), + Egress: kubernetes.Egress{ + Metadata: resourceBlock.GetMetadata(), + Ports: nil, + DestinationCIDRs: nil, + }, + Ingress: kubernetes.Ingress{ + Metadata: resourceBlock.GetMetadata(), + Ports: nil, + SourceCIDRs: nil, + }, + }, + } + + if specBlock := resourceBlock.GetBlock("spec"); specBlock.IsNotNil() { + if egressBlock := specBlock.GetBlock("egress"); egressBlock.IsNotNil() { + policy.Spec.Egress.Metadata = egressBlock.GetMetadata() + for _, port := range egressBlock.GetBlocks("ports") { + numberAttr := port.GetAttribute("number") + numberVal := numberAttr.AsStringValueOrDefault("", port) + + protocolAttr := port.GetAttribute("protocol") + protocolVal := protocolAttr.AsStringValueOrDefault("", port) + + policy.Spec.Egress.Ports = append(policy.Spec.Egress.Ports, kubernetes.Port{ + Metadata: port.GetMetadata(), + Number: numberVal, + Protocol: protocolVal, + }) + } + + for _, to := range egressBlock.GetBlocks("to") { + cidrAtrr := to.GetBlock("ip_block").GetAttribute("cidr") + cidrVal := cidrAtrr.AsStringValueOrDefault("", to) + + policy.Spec.Egress.DestinationCIDRs = append(policy.Spec.Egress.DestinationCIDRs, cidrVal) + } + } + + if ingressBlock := specBlock.GetBlock("ingress"); ingressBlock.IsNotNil() { + policy.Spec.Ingress.Metadata = ingressBlock.GetMetadata() + for _, port := range ingressBlock.GetBlocks("ports") { + numberAttr := port.GetAttribute("number") + numberVal := numberAttr.AsStringValueOrDefault("", port) + + protocolAttr := port.GetAttribute("protocol") + protocolVal := protocolAttr.AsStringValueOrDefault("", port) + + policy.Spec.Ingress.Ports = append(policy.Spec.Ingress.Ports, kubernetes.Port{ + Metadata: port.GetMetadata(), + Number: numberVal, + Protocol: protocolVal, + }) + } + + for _, from := range ingressBlock.GetBlocks("from") { + cidrAtrr := from.GetBlock("ip_block").GetAttribute("cidr") + cidrVal := cidrAtrr.AsStringValueOrDefault("", from) + + policy.Spec.Ingress.SourceCIDRs = append(policy.Spec.Ingress.SourceCIDRs, cidrVal) + } + } + } + + return policy +} + +// https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/guides/versioned-resources +func getBlocksIgnoreVersion(module *terraform.Module, blockType string, resourceType string) terraform.Blocks { + var res terraform.Blocks + for _, block := range module.GetBlocks().OfType(blockType) { + if isMatchingTypeLabel(block.TypeLabel(), resourceType) { + res = append(res, block) + } + } + return res +} + +func isMatchingTypeLabel(typeLabel string, resourceType string) bool { + if typeLabel == resourceType { + return true + } + + versionPart, found := strings.CutPrefix(typeLabel, resourceType+"_") + if !found { + return false + } + + return versionRegex.MatchString(versionPart) +} diff --git a/internal/adapters/terraform/kubernetes/adapt_test.go b/internal/adapters/terraform/kubernetes/adapt_test.go new file mode 100644 index 000000000000..eea390bd2e01 --- /dev/null +++ b/internal/adapters/terraform/kubernetes/adapt_test.go @@ -0,0 +1,60 @@ +package kubernetes + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIsMatchingTypeLabel(t *testing.T) { + tests := []struct { + name string + typeLabel string + resourceType string + expected bool + }{ + { + name: "without version", + typeLabel: "kubernetes_network_policy", + resourceType: "kubernetes_network_policy", + expected: true, + }, + { + name: "v1", + typeLabel: "kubernetes_network_policy_v1", + resourceType: "kubernetes_network_policy", + expected: true, + }, + { + name: "beta version", + typeLabel: "kubernetes_horizontal_pod_autoscaler_v2beta2", + resourceType: "kubernetes_horizontal_pod_autoscaler", + expected: true, + }, + { + name: "another type of resource", + typeLabel: "kubernetes_network_policy", + resourceType: "kubernetes_horizontal_pod_autoscaler", + expected: false, + }, + { + name: "similar resource type", + typeLabel: "kubernetes_network_policy_test_v1", + resourceType: "kubernetes_network_policy", + expected: false, + }, + { + name: "empty resource type", + typeLabel: "kubernetes_network_policy_test_v1", + resourceType: "", + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := isMatchingTypeLabel(tt.typeLabel, tt.resourceType) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/computing/adapt.go b/internal/adapters/terraform/nifcloud/computing/adapt.go new file mode 100644 index 000000000000..e58f4e2bb59e --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/adapt.go @@ -0,0 +1,16 @@ +package computing + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) computing.Computing { + + sgAdapter := sgAdapter{sgRuleIDs: modules.GetChildResourceIDMapByType("nifcloud_security_group_rule")} + + return computing.Computing{ + SecurityGroups: sgAdapter.adaptSecurityGroups(modules), + Instances: adaptInstances(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/computing/adapt_test.go b/internal/adapters/terraform/nifcloud/computing/adapt_test.go new file mode 100644 index 000000000000..d92848402304 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/adapt_test.go @@ -0,0 +1,61 @@ +package computing + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_instance" "example" { + security_group = nifcloud_security_group.example.group_name + + network_interface { + network_id = "net-COMMON_PRIVATE" + } +} + +resource "nifcloud_security_group" "example" { + group_name = "example" + description = "memo" +} + +resource "nifcloud_security_group_rule" "example" { + type = "IN" + security_group_names = [nifcloud_security_group.example.group_name] + from_port = 22 + to_port = 22 + protocol = "TCP" + description = "memo" + cidr_ip = "1.2.3.4/32" +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Instances, 1) + require.Len(t, adapted.SecurityGroups, 1) + + instance := adapted.Instances[0] + sg := adapted.SecurityGroups[0] + + assert.Equal(t, 3, instance.SecurityGroup.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, instance.SecurityGroup.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, instance.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, instance.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 12, sg.Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 12, sg.Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 21, sg.IngressRules[0].Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 21, sg.IngressRules[0].Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 22, sg.IngressRules[0].CIDR.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, sg.IngressRules[0].CIDR.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/nifcloud/computing/instance.go b/internal/adapters/terraform/nifcloud/computing/instance.go new file mode 100644 index 000000000000..afb91e58f1fb --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/instance.go @@ -0,0 +1,35 @@ +package computing + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptInstances(modules terraform.Modules) []computing.Instance { + var instances []computing.Instance + + for _, resource := range modules.GetResourcesByType("nifcloud_instance") { + instances = append(instances, adaptInstance(resource)) + } + return instances +} + +func adaptInstance(resource *terraform.Block) computing.Instance { + var networkInterfaces []computing.NetworkInterface + networkInterfaceBlocks := resource.GetBlocks("network_interface") + for _, networkInterfaceBlock := range networkInterfaceBlocks { + networkInterfaces = append( + networkInterfaces, + computing.NetworkInterface{ + Metadata: networkInterfaceBlock.GetMetadata(), + NetworkID: networkInterfaceBlock.GetAttribute("network_id").AsStringValueOrDefault("", resource), + }, + ) + } + + return computing.Instance{ + Metadata: resource.GetMetadata(), + SecurityGroup: resource.GetAttribute("security_group").AsStringValueOrDefault("", resource), + NetworkInterfaces: networkInterfaces, + } +} diff --git a/internal/adapters/terraform/nifcloud/computing/instance_test.go b/internal/adapters/terraform/nifcloud/computing/instance_test.go new file mode 100644 index 000000000000..a8316b77b231 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/instance_test.go @@ -0,0 +1,71 @@ +package computing + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptInstances(t *testing.T) { + tests := []struct { + name string + terraform string + expected []computing.Instance + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_instance" "my_example" { + security_group = "example-security-group" + network_interface { + network_id = "net-COMMON_PRIVATE" + } + } +`, + expected: []computing.Instance{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("example-security-group", defsecTypes.NewTestMetadata()), + NetworkInterfaces: []computing.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("net-COMMON_PRIVATE", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_instance" "my_example" { + network_interface { + } + } +`, + + expected: []computing.Instance{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("", defsecTypes.NewTestMetadata()), + NetworkInterfaces: []computing.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptInstances(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/computing/security_group.go b/internal/adapters/terraform/nifcloud/computing/security_group.go new file mode 100644 index 000000000000..575a02dd5723 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/security_group.go @@ -0,0 +1,76 @@ +package computing + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +type sgAdapter struct { + sgRuleIDs terraform.ResourceIDResolutions +} + +func (a *sgAdapter) adaptSecurityGroups(modules terraform.Modules) []computing.SecurityGroup { + var securityGroups []computing.SecurityGroup + for _, resource := range modules.GetResourcesByType("nifcloud_security_group") { + securityGroups = append(securityGroups, a.adaptSecurityGroup(resource, modules)) + } + orphanResources := modules.GetResourceByIDs(a.sgRuleIDs.Orphans()...) + if len(orphanResources) > 0 { + orphanage := computing.SecurityGroup{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Description: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + IngressRules: nil, + } + for _, sgRule := range orphanResources { + if sgRule.GetAttribute("type").Equals("IN") { + orphanage.IngressRules = append(orphanage.IngressRules, adaptSGRule(sgRule, modules)) + } + if sgRule.GetAttribute("type").Equals("OUT") { + orphanage.EgressRules = append(orphanage.EgressRules, adaptSGRule(sgRule, modules)) + } + } + securityGroups = append(securityGroups, orphanage) + } + + return securityGroups +} + +func (a *sgAdapter) adaptSecurityGroup(resource *terraform.Block, module terraform.Modules) computing.SecurityGroup { + var ingressRules, egressRules []computing.SecurityGroupRule + + descriptionAttr := resource.GetAttribute("description") + descriptionVal := descriptionAttr.AsStringValueOrDefault("", resource) + + rulesBlocks := module.GetReferencingResources(resource, "nifcloud_security_group_rule", "security_group_names") + for _, ruleBlock := range rulesBlocks { + a.sgRuleIDs.Resolve(ruleBlock.ID()) + if ruleBlock.GetAttribute("type").Equals("IN") { + ingressRules = append(ingressRules, adaptSGRule(ruleBlock, module)) + } + if ruleBlock.GetAttribute("type").Equals("OUT") { + egressRules = append(egressRules, adaptSGRule(ruleBlock, module)) + } + } + + return computing.SecurityGroup{ + Metadata: resource.GetMetadata(), + Description: descriptionVal, + IngressRules: ingressRules, + EgressRules: egressRules, + } +} + +func adaptSGRule(resource *terraform.Block, modules terraform.Modules) computing.SecurityGroupRule { + ruleDescAttr := resource.GetAttribute("description") + ruleDescVal := ruleDescAttr.AsStringValueOrDefault("", resource) + + cidrAttr := resource.GetAttribute("cidr_ip") + cidrVal := cidrAttr.AsStringValueOrDefault("", resource) + + return computing.SecurityGroupRule{ + Metadata: resource.GetMetadata(), + Description: ruleDescVal, + CIDR: cidrVal, + } +} diff --git a/internal/adapters/terraform/nifcloud/computing/security_group_test.go b/internal/adapters/terraform/nifcloud/computing/security_group_test.go new file mode 100644 index 000000000000..026d6cd451b7 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/security_group_test.go @@ -0,0 +1,86 @@ +package computing + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptSecurityGroups(t *testing.T) { + tests := []struct { + name string + terraform string + expected []computing.SecurityGroup + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_security_group" "example" { + group_name = "example" + description = "memo" + } + + resource "nifcloud_security_group_rule" "example" { + type = "IN" + security_group_names = [nifcloud_security_group.example.group_name] + from_port = 22 + to_port = 22 + protocol = "TCP" + description = "memo" + cidr_ip = "1.2.3.4/32" + } +`, + expected: []computing.SecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("memo", defsecTypes.NewTestMetadata()), + IngressRules: []computing.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + CIDR: defsecTypes.String("1.2.3.4/32", defsecTypes.NewTestMetadata()), + Description: defsecTypes.String("memo", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_security_group" "example" { + } + + resource "nifcloud_security_group_rule" "example" { + type = "IN" + security_group_names = [nifcloud_security_group.example.group_name] + } + +`, + + expected: []computing.SecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IngressRules: []computing.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + CIDR: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + sgAdapter := sgAdapter{sgRuleIDs: modules.GetChildResourceIDMapByType("nifcloud_security_group_rule")} + adapted := sgAdapter.adaptSecurityGroups(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/dns/adapt.go b/internal/adapters/terraform/nifcloud/dns/adapt.go new file mode 100644 index 000000000000..5abe9697a8a7 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/dns/adapt.go @@ -0,0 +1,12 @@ +package dns + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/dns" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) dns.DNS { + return dns.DNS{ + Records: adaptRecords(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/dns/adapt_test.go b/internal/adapters/terraform/nifcloud/dns/adapt_test.go new file mode 100644 index 000000000000..e5e60e9d9853 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/dns/adapt_test.go @@ -0,0 +1,32 @@ +package dns + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_dns_record" "example" { + type = "A" + record = "example-record" +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Records, 1) + + record := adapted.Records[0] + + assert.Equal(t, 3, record.Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, record.Type.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, record.Record.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, record.Record.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/nifcloud/dns/record.go b/internal/adapters/terraform/nifcloud/dns/record.go new file mode 100644 index 000000000000..c0fdcc010a78 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/dns/record.go @@ -0,0 +1,23 @@ +package dns + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/dns" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptRecords(modules terraform.Modules) []dns.Record { + var records []dns.Record + + for _, resource := range modules.GetResourcesByType("nifcloud_dns_record") { + records = append(records, adaptRecord(resource)) + } + return records +} + +func adaptRecord(resource *terraform.Block) dns.Record { + return dns.Record{ + Metadata: resource.GetMetadata(), + Record: resource.GetAttribute("record").AsStringValueOrDefault("", resource), + Type: resource.GetAttribute("type").AsStringValueOrDefault("", resource), + } +} diff --git a/internal/adapters/terraform/nifcloud/dns/record_test.go b/internal/adapters/terraform/nifcloud/dns/record_test.go new file mode 100644 index 000000000000..65867c90b579 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/dns/record_test.go @@ -0,0 +1,56 @@ +package dns + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/dns" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptRecords(t *testing.T) { + tests := []struct { + name string + terraform string + expected []dns.Record + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_dns_record" "example" { + type = "A" + record = "example-record" + } +`, + expected: []dns.Record{{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("A", defsecTypes.NewTestMetadata()), + Record: defsecTypes.String("example-record", defsecTypes.NewTestMetadata()), + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_dns_record" "example" { + } +`, + + expected: []dns.Record{{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Record: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptRecords(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/nas/adapt.go b/internal/adapters/terraform/nifcloud/nas/adapt.go new file mode 100644 index 000000000000..615eac7a8df6 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/adapt.go @@ -0,0 +1,13 @@ +package nas + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) nas.NAS { + return nas.NAS{ + NASSecurityGroups: adaptNASSecurityGroups(modules), + NASInstances: adaptNASInstances(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/nas/adapt_test.go b/internal/adapters/terraform/nifcloud/nas/adapt_test.go new file mode 100644 index 000000000000..0998303dbed6 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/adapt_test.go @@ -0,0 +1,44 @@ +package nas + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_nas_instance" "example" { + network_id = "example-network" +} + +resource "nifcloud_nas_security_group" "example" { + description = "memo" + + rule { + cidr_ip = "0.0.0.0/0" + } +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.NASInstances, 1) + require.Len(t, adapted.NASSecurityGroups, 1) + + nasInstance := adapted.NASInstances[0] + nasSecurityGroup := adapted.NASSecurityGroups[0] + + assert.Equal(t, 3, nasInstance.NetworkID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, nasInstance.NetworkID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, nasSecurityGroup.Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, nasSecurityGroup.Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, nasSecurityGroup.CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, nasSecurityGroup.CIDRs[0].GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/nifcloud/nas/nas_instance.go b/internal/adapters/terraform/nifcloud/nas/nas_instance.go new file mode 100644 index 000000000000..e8558c5f42b5 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/nas_instance.go @@ -0,0 +1,22 @@ +package nas + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptNASInstances(modules terraform.Modules) []nas.NASInstance { + var nasInstances []nas.NASInstance + + for _, resource := range modules.GetResourcesByType("nifcloud_nas_instance") { + nasInstances = append(nasInstances, adaptNASInstance(resource)) + } + return nasInstances +} + +func adaptNASInstance(resource *terraform.Block) nas.NASInstance { + return nas.NASInstance{ + Metadata: resource.GetMetadata(), + NetworkID: resource.GetAttribute("network_id").AsStringValueOrDefault("net-COMMON_PRIVATE", resource), + } +} diff --git a/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go b/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go new file mode 100644 index 000000000000..450b008fa7e9 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go @@ -0,0 +1,54 @@ +package nas + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptNASInstances(t *testing.T) { + tests := []struct { + name string + terraform string + expected []nas.NASInstance + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_nas_instance" "example" { + network_id = "example-network" + } +`, + expected: []nas.NASInstance{{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("example-network", defsecTypes.NewTestMetadata()), + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_nas_instance" "example" { + } +`, + + expected: []nas.NASInstance{{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("net-COMMON_PRIVATE", defsecTypes.NewTestMetadata()), + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptNASInstances(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/nas/nas_security_group.go b/internal/adapters/terraform/nifcloud/nas/nas_security_group.go new file mode 100644 index 000000000000..e7dfd593d8dd --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/nas_security_group.go @@ -0,0 +1,30 @@ +package nas + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptNASSecurityGroups(modules terraform.Modules) []nas.NASSecurityGroup { + var nasSecurityGroups []nas.NASSecurityGroup + + for _, resource := range modules.GetResourcesByType("nifcloud_nas_security_group") { + nasSecurityGroups = append(nasSecurityGroups, adaptNASSecurityGroup(resource)) + } + return nasSecurityGroups +} + +func adaptNASSecurityGroup(resource *terraform.Block) nas.NASSecurityGroup { + var cidrs []defsecTypes.StringValue + + for _, rule := range resource.GetBlocks("rule") { + cidrs = append(cidrs, rule.GetAttribute("cidr_ip").AsStringValueOrDefault("", resource)) + } + + return nas.NASSecurityGroup{ + Metadata: resource.GetMetadata(), + Description: resource.GetAttribute("description").AsStringValueOrDefault("", resource), + CIDRs: cidrs, + } +} diff --git a/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go b/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go new file mode 100644 index 000000000000..1f64131f5a68 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go @@ -0,0 +1,66 @@ +package nas + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptNASSecurityGroups(t *testing.T) { + tests := []struct { + name string + terraform string + expected []nas.NASSecurityGroup + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_nas_security_group" "example" { + description = "memo" + + rule { + cidr_ip = "0.0.0.0/0" + } + } +`, + expected: []nas.NASSecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("memo", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("0.0.0.0/0", defsecTypes.NewTestMetadata()), + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_nas_security_group" "example" { + rule { + } + } +`, + + expected: []nas.NASSecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptNASSecurityGroups(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/network/adapt.go b/internal/adapters/terraform/nifcloud/network/adapt.go new file mode 100644 index 000000000000..4c1c10acd1b9 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/adapt.go @@ -0,0 +1,16 @@ +package network + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) network.Network { + + return network.Network{ + ElasticLoadBalancers: adaptElasticLoadBalancers(modules), + LoadBalancers: adaptLoadBalancers(modules), + Routers: adaptRouters(modules), + VpnGateways: adaptVpnGateways(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/network/adapt_test.go b/internal/adapters/terraform/nifcloud/network/adapt_test.go new file mode 100644 index 000000000000..9255e7e16d3b --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/adapt_test.go @@ -0,0 +1,83 @@ +package network + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_elb" "example" { + protocol = "HTTP" + + network_interface { + network_id = "net-COMMON_PRIVATE" + is_vip_network = false + } +} + +resource "nifcloud_load_balancer" "example" { + ssl_policy_id = "example-ssl-policy-id" + load_balancer_port = 8080 +} + +resource "nifcloud_router" "example" { + security_group = nifcloud_security_group.example.group_name + + network_interface { + network_id = "net-COMMON_PRIVATE" + } +} + +resource "nifcloud_security_group" "example" { + group_name = "example" + description = "memo" +} + +resource "nifcloud_vpn_gateway" "example" { + security_group = nifcloud_security_group.example.group_name +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.ElasticLoadBalancers, 1) + require.Len(t, adapted.LoadBalancers, 1) + require.Len(t, adapted.Routers, 1) + require.Len(t, adapted.VpnGateways, 1) + + elb := adapted.ElasticLoadBalancers[0] + lb := adapted.LoadBalancers[0] + router := adapted.Routers[0] + vpngw := adapted.VpnGateways[0] + + assert.Equal(t, 3, elb.Listeners[0].Protocol.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, elb.Listeners[0].Protocol.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, elb.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, elb.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, elb.NetworkInterfaces[0].IsVipNetwork.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, elb.NetworkInterfaces[0].IsVipNetwork.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 12, lb.Listeners[0].TLSPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 12, lb.Listeners[0].TLSPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, lb.Listeners[0].Protocol.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, lb.Listeners[0].Protocol.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, router.SecurityGroup.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, router.SecurityGroup.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 20, router.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 20, router.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 30, vpngw.SecurityGroup.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 30, vpngw.SecurityGroup.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/nifcloud/network/elastic_load_balancer.go b/internal/adapters/terraform/nifcloud/network/elastic_load_balancer.go new file mode 100644 index 000000000000..efe3c510fbc3 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/elastic_load_balancer.go @@ -0,0 +1,50 @@ +package network + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptElasticLoadBalancers(modules terraform.Modules) []network.ElasticLoadBalancer { + var elasticLoadBalancers []network.ElasticLoadBalancer + + for _, resource := range modules.GetResourcesByType("nifcloud_elb") { + elasticLoadBalancers = append(elasticLoadBalancers, adaptElasticLoadBalancer(resource, modules)) + } + return elasticLoadBalancers +} + +func adaptElasticLoadBalancer(resource *terraform.Block, modules terraform.Modules) network.ElasticLoadBalancer { + var listeners []network.ElasticLoadBalancerListener + var networkInterfaces []network.NetworkInterface + + networkInterfaceBlocks := resource.GetBlocks("network_interface") + for _, networkInterfaceBlock := range networkInterfaceBlocks { + networkInterfaces = append( + networkInterfaces, + network.NetworkInterface{ + Metadata: networkInterfaceBlock.GetMetadata(), + NetworkID: networkInterfaceBlock.GetAttribute("network_id").AsStringValueOrDefault("", resource), + IsVipNetwork: networkInterfaceBlock.GetAttribute("is_vip_network").AsBoolValueOrDefault(true, resource), + }, + ) + } + + listeners = append(listeners, adaptElasticLoadBalancerListener(resource)) + for _, listenerBlock := range modules.GetReferencingResources(resource, "nifcloud_elb_listener", "elb_id") { + listeners = append(listeners, adaptElasticLoadBalancerListener(listenerBlock)) + } + + return network.ElasticLoadBalancer{ + Metadata: resource.GetMetadata(), + NetworkInterfaces: networkInterfaces, + Listeners: listeners, + } +} + +func adaptElasticLoadBalancerListener(resource *terraform.Block) network.ElasticLoadBalancerListener { + return network.ElasticLoadBalancerListener{ + Metadata: resource.GetMetadata(), + Protocol: resource.GetAttribute("protocol").AsStringValueOrDefault("", resource), + } +} diff --git a/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go b/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go new file mode 100644 index 000000000000..9d61a0b7e6e5 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go @@ -0,0 +1,90 @@ +package network + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptElasticLoadBalancers(t *testing.T) { + tests := []struct { + name string + terraform string + expected []network.ElasticLoadBalancer + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_elb" "example" { + protocol = "HTTP" + + network_interface { + network_id = "net-COMMON_PRIVATE" + is_vip_network = false + } + } + + resource "nifcloud_elb_listener" "example" { + elb_id = nifcloud_elb.example.id + protocol = "HTTPS" + } +`, + expected: []network.ElasticLoadBalancer{{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkInterfaces: []network.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("net-COMMON_PRIVATE", defsecTypes.NewTestMetadata()), + IsVipNetwork: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Listeners: []network.ElasticLoadBalancerListener{ + { + Metadata: defsecTypes.NewTestMetadata(), + Protocol: defsecTypes.String("HTTP", defsecTypes.NewTestMetadata()), + }, + { + Metadata: defsecTypes.NewTestMetadata(), + Protocol: defsecTypes.String("HTTPS", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_elb" "example" { + network_interface { + } + } +`, + + expected: []network.ElasticLoadBalancer{{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkInterfaces: []network.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IsVipNetwork: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + Listeners: []network.ElasticLoadBalancerListener{{ + Metadata: defsecTypes.NewTestMetadata(), + }}, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptElasticLoadBalancers(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/network/load_balancer.go b/internal/adapters/terraform/nifcloud/network/load_balancer.go new file mode 100644 index 000000000000..0e27c58d377b --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/load_balancer.go @@ -0,0 +1,67 @@ +package network + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptLoadBalancers(modules terraform.Modules) []network.LoadBalancer { + var loadBalancers []network.LoadBalancer + + for _, resource := range modules.GetResourcesByType("nifcloud_load_balancer") { + loadBalancers = append(loadBalancers, adaptLoadBalancer(resource, modules)) + } + + return loadBalancers +} + +func adaptLoadBalancer(resource *terraform.Block, modules terraform.Modules) network.LoadBalancer { + var listeners []network.LoadBalancerListener + + listeners = append(listeners, adaptListener(resource)) + for _, listenerBlock := range modules.GetReferencingResources(resource, "nifcloud_load_balancer_listener", "load_balancer_name") { + listeners = append(listeners, adaptListener(listenerBlock)) + } + + return network.LoadBalancer{ + Metadata: resource.GetMetadata(), + Listeners: listeners, + } +} + +func adaptListener(resource *terraform.Block) network.LoadBalancerListener { + protocolVal := defsecTypes.String("", resource.GetMetadata()) + policyVal := defsecTypes.String("", resource.GetMetadata()) + + portAttr := resource.GetAttribute("load_balancer_port") + if portAttr.IsNotNil() && portAttr.IsNumber() { + port := portAttr.AsNumber() + switch port { + case 21: + protocolVal = defsecTypes.String("FTP", portAttr.GetMetadata()) + case 80: + protocolVal = defsecTypes.String("HTTP", portAttr.GetMetadata()) + case 443: + protocolVal = defsecTypes.String("HTTPS", portAttr.GetMetadata()) + default: + protocolVal = defsecTypes.String("custom", portAttr.GetMetadata()) + } + } + + policyIDAttr := resource.GetAttribute("ssl_policy_id") + if policyIDAttr.IsNotNil() && policyIDAttr.IsString() { + policyVal = policyIDAttr.AsStringValueOrDefault("", resource) + } + + policyNameAttr := resource.GetAttribute("ssl_policy_name") + if policyNameAttr.IsNotNil() && policyNameAttr.IsString() { + policyVal = policyNameAttr.AsStringValueOrDefault("", resource) + } + + return network.LoadBalancerListener{ + Metadata: resource.GetMetadata(), + Protocol: protocolVal, + TLSPolicy: policyVal, + } +} diff --git a/internal/adapters/terraform/nifcloud/network/load_balancer_test.go b/internal/adapters/terraform/nifcloud/network/load_balancer_test.go new file mode 100644 index 000000000000..c52e40a0a921 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/load_balancer_test.go @@ -0,0 +1,75 @@ +package network + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptLoadBalancers(t *testing.T) { + tests := []struct { + name string + terraform string + expected []network.LoadBalancer + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_load_balancer" "example" { + load_balancer_name = "example" + load_balancer_port = 80 + ssl_policy_id = "example-ssl-policy-id" + } + + resource "nifcloud_load_balancer_listener" "example" { + load_balancer_name = nifcloud_load_balancer.example.load_balancer_name + load_balancer_port = 443 + ssl_policy_name = "example-ssl-policy-name" + } + +`, + expected: []network.LoadBalancer{{ + Metadata: defsecTypes.NewTestMetadata(), + Listeners: []network.LoadBalancerListener{ + { + Metadata: defsecTypes.NewTestMetadata(), + TLSPolicy: defsecTypes.String("example-ssl-policy-id", defsecTypes.NewTestMetadata()), + Protocol: defsecTypes.String("HTTP", defsecTypes.NewTestMetadata()), + }, + { + Metadata: defsecTypes.NewTestMetadata(), + TLSPolicy: defsecTypes.String("example-ssl-policy-name", defsecTypes.NewTestMetadata()), + Protocol: defsecTypes.String("HTTPS", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_load_balancer" "example" { + } +`, + + expected: []network.LoadBalancer{{ + Metadata: defsecTypes.NewTestMetadata(), + Listeners: []network.LoadBalancerListener{{ + Metadata: defsecTypes.NewTestMetadata(), + }}, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptLoadBalancers(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/network/router.go b/internal/adapters/terraform/nifcloud/network/router.go new file mode 100644 index 000000000000..6804820381d0 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/router.go @@ -0,0 +1,37 @@ +package network + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptRouters(modules terraform.Modules) []network.Router { + var routers []network.Router + + for _, resource := range modules.GetResourcesByType("nifcloud_router") { + routers = append(routers, adaptRouter(resource)) + } + return routers +} + +func adaptRouter(resource *terraform.Block) network.Router { + var networkInterfaces []network.NetworkInterface + networkInterfaceBlocks := resource.GetBlocks("network_interface") + for _, networkInterfaceBlock := range networkInterfaceBlocks { + networkInterfaces = append( + networkInterfaces, + network.NetworkInterface{ + Metadata: networkInterfaceBlock.GetMetadata(), + NetworkID: networkInterfaceBlock.GetAttribute("network_id").AsStringValueOrDefault("", resource), + IsVipNetwork: types.Bool(false, networkInterfaceBlock.GetMetadata()), + }, + ) + } + + return network.Router{ + Metadata: resource.GetMetadata(), + SecurityGroup: resource.GetAttribute("security_group").AsStringValueOrDefault("", resource), + NetworkInterfaces: networkInterfaces, + } +} diff --git a/internal/adapters/terraform/nifcloud/network/router_test.go b/internal/adapters/terraform/nifcloud/network/router_test.go new file mode 100644 index 000000000000..19618a67e82f --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/router_test.go @@ -0,0 +1,70 @@ +package network + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptRouters(t *testing.T) { + tests := []struct { + name string + terraform string + expected []network.Router + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_router" "example" { + security_group = "example-security-group" + network_interface { + network_id = "net-COMMON_PRIVATE" + } + } +`, + expected: []network.Router{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("example-security-group", defsecTypes.NewTestMetadata()), + NetworkInterfaces: []network.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("net-COMMON_PRIVATE", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_router" "example" { + network_interface { + } + } +`, + + expected: []network.Router{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("", defsecTypes.NewTestMetadata()), + NetworkInterfaces: []network.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptRouters(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/network/vpn_gateway.go b/internal/adapters/terraform/nifcloud/network/vpn_gateway.go new file mode 100644 index 000000000000..ca607a646ea0 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/vpn_gateway.go @@ -0,0 +1,22 @@ +package network + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptVpnGateways(modules terraform.Modules) []network.VpnGateway { + var vpnGateways []network.VpnGateway + + for _, resource := range modules.GetResourcesByType("nifcloud_vpn_gateway") { + vpnGateways = append(vpnGateways, adaptVpnGateway(resource)) + } + return vpnGateways +} + +func adaptVpnGateway(resource *terraform.Block) network.VpnGateway { + return network.VpnGateway{ + Metadata: resource.GetMetadata(), + SecurityGroup: resource.GetAttribute("security_group").AsStringValueOrDefault("", resource), + } +} diff --git a/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go b/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go new file mode 100644 index 000000000000..0aa7331b58a3 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go @@ -0,0 +1,53 @@ +package network + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptVpnGateways(t *testing.T) { + tests := []struct { + name string + terraform string + expected []network.VpnGateway + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_vpn_gateway" "example" { + security_group = "example-security-group" + } +`, + expected: []network.VpnGateway{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("example-security-group", defsecTypes.NewTestMetadata()), + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_vpn_gateway" "example" { + } +`, + + expected: []network.VpnGateway{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptVpnGateways(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/nifcloud.go b/internal/adapters/terraform/nifcloud/nifcloud.go new file mode 100644 index 000000000000..8c9ae16a4dc4 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nifcloud.go @@ -0,0 +1,23 @@ +package nifcloud + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/computing" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/dns" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/nas" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/network" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/rdb" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/sslcertificate" +) + +func Adapt(modules terraform.Modules) nifcloud.Nifcloud { + return nifcloud.Nifcloud{ + Computing: computing.Adapt(modules), + DNS: dns.Adapt(modules), + NAS: nas.Adapt(modules), + Network: network.Adapt(modules), + RDB: rdb.Adapt(modules), + SSLCertificate: sslcertificate.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/rdb/adapt.go b/internal/adapters/terraform/nifcloud/rdb/adapt.go new file mode 100644 index 000000000000..8c249b09b212 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/adapt.go @@ -0,0 +1,13 @@ +package rdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) rdb.RDB { + return rdb.RDB{ + DBSecurityGroups: adaptDBSecurityGroups(modules), + DBInstances: adaptDBInstances(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/rdb/adapt_test.go b/internal/adapters/terraform/nifcloud/rdb/adapt_test.go new file mode 100644 index 000000000000..ab15a2f10747 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/adapt_test.go @@ -0,0 +1,60 @@ +package rdb + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_db_instance" "example" { + publicly_accessible = false + engine = "MySQL" + engine_version = "5.7.15" + backup_retention_period = 2 + network_id = "example-network" +} + +resource "nifcloud_db_security_group" "example" { + description = "memo" + + rule { + cidr_ip = "0.0.0.0/0" + } +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.DBInstances, 1) + require.Len(t, adapted.DBSecurityGroups, 1) + + dbInstance := adapted.DBInstances[0] + dbSecurityGroup := adapted.DBSecurityGroups[0] + + assert.Equal(t, 3, dbInstance.PublicAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, dbInstance.PublicAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, dbInstance.Engine.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, dbInstance.Engine.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, dbInstance.EngineVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, dbInstance.EngineVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, dbInstance.BackupRetentionPeriodDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, dbInstance.BackupRetentionPeriodDays.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, dbInstance.NetworkID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, dbInstance.NetworkID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, dbSecurityGroup.Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, dbSecurityGroup.Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 14, dbSecurityGroup.CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, dbSecurityGroup.CIDRs[0].GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/nifcloud/rdb/db_instance.go b/internal/adapters/terraform/nifcloud/rdb/db_instance.go new file mode 100644 index 000000000000..90662cb8a4ed --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/db_instance.go @@ -0,0 +1,26 @@ +package rdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptDBInstances(modules terraform.Modules) []rdb.DBInstance { + var dbInstances []rdb.DBInstance + + for _, resource := range modules.GetResourcesByType("nifcloud_db_instance") { + dbInstances = append(dbInstances, adaptDBInstance(resource)) + } + return dbInstances +} + +func adaptDBInstance(resource *terraform.Block) rdb.DBInstance { + return rdb.DBInstance{ + Metadata: resource.GetMetadata(), + BackupRetentionPeriodDays: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(0, resource), + Engine: resource.GetAttribute("engine").AsStringValueOrDefault("", resource), + EngineVersion: resource.GetAttribute("engine_version").AsStringValueOrDefault("", resource), + NetworkID: resource.GetAttribute("network_id").AsStringValueOrDefault("net-COMMON_PRIVATE", resource), + PublicAccess: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(true, resource), + } +} diff --git a/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go b/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go new file mode 100644 index 000000000000..0999144c589f --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go @@ -0,0 +1,66 @@ +package rdb + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptDBInstances(t *testing.T) { + tests := []struct { + name string + terraform string + expected []rdb.DBInstance + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_db_instance" "example" { + backup_retention_period = 2 + engine = "MySQL" + engine_version = "5.7.15" + publicly_accessible = false + network_id = "example-network" + } +`, + expected: []rdb.DBInstance{{ + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(2, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String("MySQL", defsecTypes.NewTestMetadata()), + EngineVersion: defsecTypes.String("5.7.15", defsecTypes.NewTestMetadata()), + NetworkID: defsecTypes.String("example-network", defsecTypes.NewTestMetadata()), + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_db_instance" "example" { + } +`, + + expected: []rdb.DBInstance{{ + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EngineVersion: defsecTypes.String("", defsecTypes.NewTestMetadata()), + NetworkID: defsecTypes.String("net-COMMON_PRIVATE", defsecTypes.NewTestMetadata()), + PublicAccess: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDBInstances(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/rdb/db_security_group.go b/internal/adapters/terraform/nifcloud/rdb/db_security_group.go new file mode 100644 index 000000000000..4e476c31176d --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/db_security_group.go @@ -0,0 +1,30 @@ +package rdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptDBSecurityGroups(modules terraform.Modules) []rdb.DBSecurityGroup { + var dbSecurityGroups []rdb.DBSecurityGroup + + for _, resource := range modules.GetResourcesByType("nifcloud_db_security_group") { + dbSecurityGroups = append(dbSecurityGroups, adaptDBSecurityGroup(resource)) + } + return dbSecurityGroups +} + +func adaptDBSecurityGroup(resource *terraform.Block) rdb.DBSecurityGroup { + var cidrs []defsecTypes.StringValue + + for _, rule := range resource.GetBlocks("rule") { + cidrs = append(cidrs, rule.GetAttribute("cidr_ip").AsStringValueOrDefault("", resource)) + } + + return rdb.DBSecurityGroup{ + Metadata: resource.GetMetadata(), + Description: resource.GetAttribute("description").AsStringValueOrDefault("", resource), + CIDRs: cidrs, + } +} diff --git a/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go b/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go new file mode 100644 index 000000000000..5092a3385a53 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go @@ -0,0 +1,66 @@ +package rdb + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_adaptDBSecurityGroups(t *testing.T) { + tests := []struct { + name string + terraform string + expected []rdb.DBSecurityGroup + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_db_security_group" "example" { + description = "memo" + + rule { + cidr_ip = "0.0.0.0/0" + } + } +`, + expected: []rdb.DBSecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("memo", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("0.0.0.0/0", defsecTypes.NewTestMetadata()), + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_db_security_group" "example" { + rule { + } + } +`, + + expected: []rdb.DBSecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDBSecurityGroups(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/adapt.go b/internal/adapters/terraform/nifcloud/sslcertificate/adapt.go new file mode 100644 index 000000000000..31673c121493 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/sslcertificate/adapt.go @@ -0,0 +1,12 @@ +package sslcertificate + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/sslcertificate" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) sslcertificate.SSLCertificate { + return sslcertificate.SSLCertificate{ + ServerCertificates: adaptServerCertificates(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go b/internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go new file mode 100644 index 000000000000..9483467e47cc --- /dev/null +++ b/internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go @@ -0,0 +1,28 @@ +package sslcertificate + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_ssl_certificate" "example" { + certificate = "generated-certificate" +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.ServerCertificates, 1) + + serverCertificate := adapted.ServerCertificates[0] + + assert.Equal(t, 3, serverCertificate.Expiration.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, serverCertificate.Expiration.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate.go b/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate.go new file mode 100644 index 000000000000..c90570cee9ea --- /dev/null +++ b/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate.go @@ -0,0 +1,41 @@ +package sslcertificate + +import ( + "crypto/x509" + "encoding/pem" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/sslcertificate" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptServerCertificates(modules terraform.Modules) []sslcertificate.ServerCertificate { + var serverCertificates []sslcertificate.ServerCertificate + + for _, resource := range modules.GetResourcesByType("nifcloud_ssl_certificate") { + serverCertificates = append(serverCertificates, adaptServerCertificate(resource)) + } + return serverCertificates +} + +func adaptServerCertificate(resource *terraform.Block) sslcertificate.ServerCertificate { + certificateAttr := resource.GetAttribute("certificate") + expiryDateVal := defsecTypes.TimeUnresolvable(resource.GetMetadata()) + + if certificateAttr.IsNotNil() { + expiryDateVal = defsecTypes.TimeUnresolvable(certificateAttr.GetMetadata()) + if certificateAttr.IsString() { + certificateString := certificateAttr.Value().AsString() + if block, _ := pem.Decode([]byte(certificateString)); block != nil { + if cert, err := x509.ParseCertificate(block.Bytes); err == nil { + expiryDateVal = defsecTypes.Time(cert.NotAfter, certificateAttr.GetMetadata()) + } + } + } + } + + return sslcertificate.ServerCertificate{ + Metadata: resource.GetMetadata(), + Expiration: expiryDateVal, + } +} diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go b/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go new file mode 100644 index 000000000000..74dc6eb87766 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go @@ -0,0 +1,72 @@ +package sslcertificate + +import ( + "testing" + "time" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/sslcertificate" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +const certificate = ` +-----BEGIN CERTIFICATE----- +MIIB0zCCAX2gAwIBAgIJAI/M7BYjwB+uMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX +aWRnaXRzIFB0eSBMdGQwHhcNMTIwOTEyMjE1MjAyWhcNMTUwOTEyMjE1MjAyWjBF +MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50 +ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANLJ +hPHhITqQbPklG3ibCVxwGMRfp/v4XqhfdQHdcVfHap6NQ5Wok/4xIA+ui35/MmNa +rtNuC+BdZ1tMuVCPFZcCAwEAAaNQME4wHQYDVR0OBBYEFJvKs8RfJaXTH08W+SGv +zQyKn0H8MB8GA1UdIwQYMBaAFJvKs8RfJaXTH08W+SGvzQyKn0H8MAwGA1UdEwQF +MAMBAf8wDQYJKoZIhvcNAQEFBQADQQBJlffJHybjDGxRMqaRmDhX0+6v02TUKZsW +r5QuVbpQhH6u+0UgcW0jp9QwpxoPTLTWGXEWBBBurxFwiCBhkQ+V +-----END CERTIFICATE----- +` + +func Test_adaptServerCertificates(t *testing.T) { + tests := []struct { + name string + terraform string + expected []sslcertificate.ServerCertificate + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_ssl_certificate" "example" { + certificate = < 0) || + (sniff.Resources != nil && len(sniff.Resources) > 0) + } + + matchers[FileTypeDockerfile] = func(name string, _ io.ReadSeeker) bool { + requiredFiles := []string{"Dockerfile", "Containerfile"} + for _, requiredFile := range requiredFiles { + base := filepath.Base(name) + ext := filepath.Ext(base) + if strings.TrimSuffix(base, ext) == requiredFile { + return true + } + if strings.EqualFold(ext, "."+requiredFile) { + return true + } + } + return false + } + + matchers[FileTypeHelm] = func(name string, r io.ReadSeeker) bool { + helmFiles := []string{"Chart.yaml", ".helmignore", "values.schema.json", "NOTES.txt"} + for _, expected := range helmFiles { + if strings.HasSuffix(name, expected) { + return true + } + } + helmFileExtensions := []string{".yaml", ".tpl"} + ext := filepath.Ext(filepath.Base(name)) + for _, expected := range helmFileExtensions { + if strings.EqualFold(ext, expected) { + return true + } + } + return IsHelmChartArchive(name, r) + } + + matchers[FileTypeKubernetes] = func(name string, r io.ReadSeeker) bool { + + if !IsType(name, r, FileTypeYAML) && !IsType(name, r, FileTypeJSON) { + return false + } + if resetReader(r) == nil { + return false + } + + expectedProperties := []string{"apiVersion", "kind", "metadata"} + + if IsType(name, r, FileTypeJSON) { + if resetReader(r) == nil { + return false + } + + var result map[string]interface{} + if err := json.NewDecoder(r).Decode(&result); err != nil { + return false + } + + for _, expected := range expectedProperties { + if _, ok := result[expected]; !ok { + return false + } + } + return true + } + + // at this point, we need to inspect bytes + var buf bytes.Buffer + if _, err := io.Copy(&buf, r); err != nil { + return false + } + data := buf.Bytes() + + marker := "\n---\n" + altMarker := "\r\n---\r\n" + if bytes.Contains(data, []byte(altMarker)) { + marker = altMarker + } + + for _, partial := range strings.Split(string(data), marker) { + var result map[string]interface{} + if err := yaml.Unmarshal([]byte(partial), &result); err != nil { + continue + } + match := true + for _, expected := range expectedProperties { + if _, ok := result[expected]; !ok { + match = false + break + } + } + if match { + return true + } + } + + return false + } +} + +func IsTerraformFile(path string) bool { + for _, ext := range []string{".tf", ".tf.json", ".tfvars"} { + if strings.HasSuffix(path, ext) { + return true + } + } + + return false +} + +func IsType(name string, r io.ReadSeeker, t FileType) bool { + r = ensureSeeker(r) + f, ok := matchers[t] + if !ok { + return false + } + return f(name, r) +} + +func GetTypes(name string, r io.ReadSeeker) []FileType { + var matched []FileType + r = ensureSeeker(r) + for check, f := range matchers { + if f(name, r) { + matched = append(matched, check) + } + resetReader(r) + } + return matched +} + +func ensureSeeker(r io.Reader) io.ReadSeeker { + if r == nil { + return nil + } + if seeker, ok := r.(io.ReadSeeker); ok { + return seeker + } + + var buf bytes.Buffer + if _, err := io.Copy(&buf, r); err == nil { + return bytes.NewReader(buf.Bytes()) + } + + return nil +} + +func resetReader(r io.Reader) io.ReadSeeker { + if r == nil { + return nil + } + if seeker, ok := r.(io.ReadSeeker); ok { + _, _ = seeker.Seek(0, 0) + return seeker + } + return ensureSeeker(r) +} diff --git a/pkg/detection/detect_test.go b/pkg/detection/detect_test.go new file mode 100644 index 000000000000..6a38dc014731 --- /dev/null +++ b/pkg/detection/detect_test.go @@ -0,0 +1,410 @@ +package detection + +import ( + "bytes" + "fmt" + "io" + "os" + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Detection(t *testing.T) { + tests := []struct { + name string + path string + r io.ReadSeeker + expected []FileType + }{ + { + name: "text file, no reader", + path: "something.txt", + expected: nil, + }, + { + name: "text file, with reader", + path: "something.txt", + r: strings.NewReader("some file content"), + expected: nil, + }, + { + name: "terraform, no reader", + path: "main.tf", + expected: []FileType{ + FileTypeTerraform, + }, + }, + { + name: "terraform, with reader", + path: "main.tf", + r: strings.NewReader("some file content"), + expected: []FileType{ + FileTypeTerraform, + }, + }, + { + name: "terraform json, no reader", + path: "main.tf.json", + expected: []FileType{ + FileTypeTerraform, + FileTypeJSON, + }, + }, + { + name: "terraform json, with reader", + path: "main.tf.json", + r: strings.NewReader(` +{ + "variable": { + "example": { + "default": "hello" + } + } +} +`), + expected: []FileType{ + FileTypeTerraform, + FileTypeJSON, + }, + }, + { + name: "terraform vars, no reader", + path: "main.tfvars", + expected: []FileType{ + FileTypeTerraform, + }, + }, + { + name: "terraform vars, with reader", + path: "main.tfvars", + r: strings.NewReader("some_var = \"some value\""), + expected: []FileType{ + FileTypeTerraform, + }, + }, + { + name: "cloudformation, no reader", + path: "main.yaml", + expected: []FileType{ + FileTypeYAML, + FileTypeHelm, + }, + }, + { + name: "terraform plan, with reader", + path: "plan.json", + r: strings.NewReader(`{ + "format_version": "0.2", + "terraform_version": "1.0.3", + "variables": { + "bucket_name": { + "value": "tfsec-plan-testing" + } + }, + "planned_values": {}, + "resource_changes": [], + "prior_state": {}, + "configuration": {} + }`), + expected: []FileType{ + FileTypeTerraformPlan, + FileTypeJSON, + }, + }, + { + name: "cloudformation, with reader", + path: "main.yaml", + r: strings.NewReader(`--- +AWSTemplateFormatVersion: 2010-09-09 + +Description: CodePipeline for continuous integration and continuous deployment + +Parameters: + RepositoryName: + Type: String + Description: Name of the CodeCommit repository + BuildDockerImage: + Type: String + Default: aws/codebuild/ubuntu-base:14.04 + Description: Docker image to use for the build phase + DeployDockerImage: + Type: String + Default: aws/codebuild/ubuntu-base:14.04 + Description: Docker image to use for the deployment phase + +Resources: + PipelineS3Bucket: + Type: AWS::S3::Bucket +`), + expected: []FileType{ + FileTypeCloudFormation, + FileTypeYAML, + FileTypeHelm, + }, + }, + { + name: "JSON with Resources, not cloudformation", + path: "whatever.json", + r: strings.NewReader(`{ + "Resources": ["something"] +}`), + expected: []FileType{ + FileTypeJSON, + }, + }, + { + name: "Dockerfile, no reader", + path: "Dockerfile", + r: nil, + expected: []FileType{ + FileTypeDockerfile, + }, + }, + { + name: "Containerfile, no reader", + path: "Containerfile", + r: nil, + expected: []FileType{ + FileTypeDockerfile, + }, + }, + { + name: "Dockerfile, reader", + path: "Dockerfile", + r: strings.NewReader("FROM ubuntu\n"), + expected: []FileType{ + FileTypeDockerfile, + }, + }, + { + name: "Dockerfile extension", + path: "lol.Dockerfile", + r: nil, + expected: []FileType{ + FileTypeDockerfile, + }, + }, + { + name: "kubernetes, no reader", + path: "k8s.yml", + r: nil, + expected: []FileType{ + FileTypeYAML, + }, + }, + { + name: "kubernetes, reader", + path: "k8s.yml", + r: strings.NewReader(`apiVersion: apps/v1 +kind: Deployment +metadata: + name: nginx-deployment + labels: + app: nginx +spec: + replicas: 3 + selector: + matchLabels: + app: nginx + template: + metadata: + labels: + app: nginx + spec: + containers: + - name: nginx + image: nginx:1.14.2 + ports: + - containerPort: 80`), + expected: []FileType{ + FileTypeKubernetes, + FileTypeYAML, + }, + }, + { + name: "kubernetes, reader, JSON", + path: "k8s.json", + r: strings.NewReader(`{ + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "name": "nginx-deployment", + "labels": { + "app": "nginx" + } + }, + "spec": { + "replicas": 3, + "selector": { + "matchLabels": { + "app": "nginx" + } + }, + "template": { + "metadata": { + "labels": { + "app": "nginx" + } + }, + "spec": { + "containers": [ + { + "name": "nginx", + "image": "nginx:1.14.2", + "ports": [ + { + "containerPort": 80 + } + ] + } + ] + } + } + } +}`), + expected: []FileType{ + FileTypeKubernetes, + FileTypeJSON, + }, + }, + { + name: "YAML, no reader", + path: "file.yaml", + r: nil, + expected: []FileType{ + FileTypeYAML, + FileTypeHelm, + }, + }, + { + name: "YML, no reader", + path: "file.yml", + r: nil, + expected: []FileType{ + FileTypeYAML, + }, + }, + { + name: "YML uppercase", + path: "file.YML", + r: nil, + expected: []FileType{ + FileTypeYAML, + }, + }, + { + name: "TOML, no reader", + path: "file.toml", + r: nil, + expected: []FileType{ + FileTypeTOML, + }, + }, + { + name: "JSON, no reader", + path: "file.json", + r: nil, + expected: []FileType{ + FileTypeJSON, + }, + }, + { + name: "kubernetes, configmap", + path: "k8s.yml", + r: strings.NewReader(`apiVersion: v1 +kind: ConfigMap +metadata: + name: test + namespace: default +data: + AWS_ACCESS_KEY_ID: "XXX" + AWS_SECRET_ACCESS_KEY: "XXX"`), + expected: []FileType{ + FileTypeKubernetes, + FileTypeYAML, + }, + }, + { + name: "kubernetes, clusterRole", + path: "k8s.yml", + r: strings.NewReader(`apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + annotations: + rbac.authorization.kubernetes.io/autoupdate: "true" + labels: + kubernetes.io/bootstrapping: rbac-defaults + rbac.authorization.k8s.io/aggregate-to-edit: "true" + name: view +rules: +- apiGroups: + - networking.k8s.io + resources: + - ingresses + - ingresses/status + - networkpolicies + verbs: + - get + - list + - watch`), + expected: []FileType{ + FileTypeKubernetes, + FileTypeYAML, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + t.Run("GetTypes", func(t *testing.T) { + actualDetections := GetTypes(test.path, test.r) + assert.Equal(t, len(test.expected), len(actualDetections)) + for _, expected := range test.expected { + resetReader(test.r) + var found bool + for _, actual := range actualDetections { + if actual == expected { + found = true + break + } + } + assert.True(t, found, "%s should be detected", expected) + } + }) + for _, expected := range test.expected { + resetReader(test.r) + t.Run(fmt.Sprintf("IsType_%s", expected), func(t *testing.T) { + assert.True(t, IsType(test.path, test.r, expected)) + }) + } + t.Run("IsType_invalid", func(t *testing.T) { + resetReader(test.r) + assert.False(t, IsType(test.path, test.r, "invalid")) + }) + }) + } +} + +func BenchmarkIsType_SmallFile(b *testing.B) { + data, err := os.ReadFile(fmt.Sprintf("./testdata/%s", "small.file")) + assert.Nil(b, err) + + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = IsType(fmt.Sprintf("./testdata/%s", "small.file"), bytes.NewReader(data), FileTypeAzureARM) + } +} + +func BenchmarkIsType_BigFile(b *testing.B) { + data, err := os.ReadFile(fmt.Sprintf("./testdata/%s", "big.file")) + assert.Nil(b, err) + + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = IsType(fmt.Sprintf("./testdata/%s", "big.file"), bytes.NewReader(data), FileTypeAzureARM) + } +} diff --git a/pkg/detection/peek.go b/pkg/detection/peek.go new file mode 100644 index 000000000000..0e76115d9bd8 --- /dev/null +++ b/pkg/detection/peek.go @@ -0,0 +1,53 @@ +package detection + +import ( + "archive/tar" + "compress/gzip" + "errors" + "io" + "strings" +) + +func IsHelmChartArchive(path string, file io.Reader) bool { + + if !IsArchive(path) { + return false + } + + var err error + var fr = file + + if IsZip(path) { + if fr, err = gzip.NewReader(file); err != nil { + return false + } + } + tr := tar.NewReader(fr) + + if tr == nil { + return false + } + + for { + header, err := tr.Next() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return false + } + + if header.Typeflag == tar.TypeReg && strings.HasSuffix(header.Name, "Chart.yaml") { + return true + } + } + return false +} + +func IsArchive(path string) bool { + return strings.HasSuffix(path, ".tar") || IsZip(path) +} + +func IsZip(path string) bool { + return strings.HasSuffix(path, ".tgz") || strings.HasSuffix(path, ".tar.gz") +} diff --git a/pkg/detection/testdata/big.file b/pkg/detection/testdata/big.file new file mode 100644 index 0000000000000000000000000000000000000000..e7f3c2d40ecc31921643a456cda2de3a907b680a GIT binary patch literal 5120 scmeIu0Sy2E0K%a6Pi+qe5hx58Fkrxd0RsjM7%*VKfB^#r3>bJF7!&{i0RR91 literal 0 HcmV?d00001 diff --git a/pkg/detection/testdata/small.file b/pkg/detection/testdata/small.file new file mode 100644 index 000000000000..d8ae428a4800 --- /dev/null +++ b/pkg/detection/testdata/small.file @@ -0,0 +1,3 @@ +{ + "content": "foo bar baz" +} \ No newline at end of file diff --git a/pkg/extrafs/extrafs.go b/pkg/extrafs/extrafs.go new file mode 100644 index 000000000000..e3956c193bbe --- /dev/null +++ b/pkg/extrafs/extrafs.go @@ -0,0 +1,54 @@ +package extrafs + +import ( + "io/fs" + "os" + "path/filepath" +) + +/* + Go does not currently support symlinks in io/fs. + We work around this by wrapping the fs.FS returned by os.DirFS with our own type which bolts on the ReadLinkFS +*/ + +type OSFS interface { + fs.FS + fs.StatFS +} + +type ReadLinkFS interface { + ResolveSymlink(name, dir string) (string, error) +} + +type FS interface { + OSFS + ReadLinkFS +} + +type filesystem struct { + root string + underlying OSFS +} + +func OSDir(path string) FS { + return &filesystem{ + root: path, + underlying: os.DirFS(path).(OSFS), + } +} + +func (f *filesystem) Open(name string) (fs.File, error) { + return f.underlying.Open(name) +} + +func (f *filesystem) Stat(name string) (fs.FileInfo, error) { + return f.underlying.Stat(name) +} + +func (f *filesystem) ResolveSymlink(name, dir string) (string, error) { + link, err := os.Readlink(filepath.Join(f.root, dir, name)) + if err == nil { + return filepath.Join(dir, link), nil + } + return name, nil +} diff --git a/pkg/fanal/analyzer/config/terraform/terraform.go b/pkg/fanal/analyzer/config/terraform/terraform.go index e684ca1017c0..96fb3bb47a07 100644 --- a/pkg/fanal/analyzer/config/terraform/terraform.go +++ b/pkg/fanal/analyzer/config/terraform/terraform.go @@ -3,7 +3,7 @@ package terraform import ( "os" - "github.com/aquasecurity/trivy-iac/pkg/detection" + "github.com/aquasecurity/trivy/pkg/detection" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config" "github.com/aquasecurity/trivy/pkg/misconf" diff --git a/pkg/fanal/analyzer/const.go b/pkg/fanal/analyzer/const.go index 88774ac15cbc..0cd02c4290f3 100644 --- a/pkg/fanal/analyzer/const.go +++ b/pkg/fanal/analyzer/const.go @@ -1,6 +1,6 @@ package analyzer -import "github.com/aquasecurity/trivy-iac/pkg/detection" +import "github.com/aquasecurity/trivy/pkg/detection" type Type string diff --git a/pkg/misconf/scanner.go b/pkg/misconf/scanner.go index 2891267b0ac6..3dc1c436d26b 100644 --- a/pkg/misconf/scanner.go +++ b/pkg/misconf/scanner.go @@ -16,19 +16,19 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy-iac/pkg/detection" - "github.com/aquasecurity/trivy-iac/pkg/scanners" - "github.com/aquasecurity/trivy-iac/pkg/scanners/azure/arm" - cfscanner "github.com/aquasecurity/trivy-iac/pkg/scanners/cloudformation" - cfparser "github.com/aquasecurity/trivy-iac/pkg/scanners/cloudformation/parser" - dfscanner "github.com/aquasecurity/trivy-iac/pkg/scanners/dockerfile" - "github.com/aquasecurity/trivy-iac/pkg/scanners/helm" - k8sscanner "github.com/aquasecurity/trivy-iac/pkg/scanners/kubernetes" - tfscanner "github.com/aquasecurity/trivy-iac/pkg/scanners/terraform" - tfpscanner "github.com/aquasecurity/trivy-iac/pkg/scanners/terraformplan" + "github.com/aquasecurity/trivy/pkg/detection" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/mapfs" + "github.com/aquasecurity/trivy/pkg/scanners" + "github.com/aquasecurity/trivy/pkg/scanners/azure/arm" + cfscanner "github.com/aquasecurity/trivy/pkg/scanners/cloudformation" + cfparser "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + dfscanner "github.com/aquasecurity/trivy/pkg/scanners/dockerfile" + "github.com/aquasecurity/trivy/pkg/scanners/helm" + k8sscanner "github.com/aquasecurity/trivy/pkg/scanners/kubernetes" + tfscanner "github.com/aquasecurity/trivy/pkg/scanners/terraform" + tfpscanner "github.com/aquasecurity/trivy/pkg/scanners/terraformplan" _ "embed" ) diff --git a/pkg/scanners/azure/arm/parser/armjson/bench_test.go b/pkg/scanners/azure/arm/parser/armjson/bench_test.go new file mode 100644 index 000000000000..f2e56f853f7b --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/bench_test.go @@ -0,0 +1,71 @@ +package armjson + +import ( + "encoding/json" + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/require" +) + +func BenchmarkUnmarshal_JFather(b *testing.B) { + target := make(map[string]interface{}) + input := []byte(`{ + "glossary": { + "title": "example glossary", + "GlossDiv": { + "title": "S", + "GlossList": { + "GlossEntry": { + "ID": "SGML", + "SortAs": "SGML", + "GlossTerm": "Standard Generalized Markup Language", + "Acronym": "SGML", + "Abbrev": "ISO 8879:1986", + "GlossDef": { + "para": "A meta-markup language, used to create markup languages such as DocBook.", + "GlossSeeAlso": ["GML", "XML"] + }, + "GlossSee": "markup" + } + } + } + } +}`) + + for n := 0; n < b.N; n++ { + metadata := types.NewTestMetadata() + require.NoError(b, Unmarshal(input, &target, &metadata)) + } +} + +func BenchmarkUnmarshal_Traditional(b *testing.B) { + target := make(map[string]interface{}) + input := []byte(`{ + "glossary": { + "title": "example glossary", + "GlossDiv": { + "title": "S", + "GlossList": { + "GlossEntry": { + "ID": "SGML", + "SortAs": "SGML", + "GlossTerm": "Standard Generalized Markup Language", + "Acronym": "SGML", + "Abbrev": "ISO 8879:1986", + "GlossDef": { + "para": "A meta-markup language, used to create markup languages such as DocBook.", + "GlossSeeAlso": ["GML", "XML"] + }, + "GlossSee": "markup" + } + } + } + } +}`) + + for n := 0; n < b.N; n++ { + require.NoError(b, json.Unmarshal(input, &target)) + } +} diff --git a/pkg/scanners/azure/arm/parser/armjson/decode.go b/pkg/scanners/azure/arm/parser/armjson/decode.go new file mode 100644 index 000000000000..5dd2f6fd3e1c --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/decode.go @@ -0,0 +1,66 @@ +package armjson + +import ( + "fmt" + "reflect" + + "github.com/aquasecurity/defsec/pkg/types" +) + +func (n *node) Decode(target interface{}) error { + v := reflect.ValueOf(target) + return n.decodeToValue(v) +} + +func (n *node) Metadata() types.Metadata { + return *n.metadata +} + +var unmarshaller = reflect.TypeOf((*Unmarshaller)(nil)).Elem() +var receiver = reflect.TypeOf((*MetadataReceiver)(nil)).Elem() + +func (n *node) decodeToValue(v reflect.Value) error { + + if v.Type().Implements(receiver) { + rec := v + defer func() { + rec.MethodByName("SetMetadata").Call([]reflect.Value{reflect.ValueOf(n.metadata)}) + }() + } + if v.Type().Implements(unmarshaller) { + returns := v.MethodByName("UnmarshalJSONWithMetadata").Call([]reflect.Value{reflect.ValueOf(n)}) + if err := returns[0].Interface(); err != nil { + return err.(error) + } + return nil + } + + for v.Kind() == reflect.Ptr { + v = v.Elem() + } + + if !v.CanSet() { + return fmt.Errorf("target is not settable") + } + + switch n.kind { + case KindObject: + return n.decodeObject(v) + case KindArray: + return n.decodeArray(v) + case KindString: + return n.decodeString(v) + case KindNumber: + return n.decodeNumber(v) + case KindBoolean: + return n.decodeBoolean(v) + case KindNull: + return n.decodeNull(v) + case KindComment: + return n.decodeString(v) + case KindUnknown: + return fmt.Errorf("cannot decode unknown kind") + default: + return fmt.Errorf("decoding of kind 0x%x is not supported", n.kind) + } +} diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_array.go b/pkg/scanners/azure/arm/parser/armjson/decode_array.go new file mode 100644 index 000000000000..75faada57252 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/decode_array.go @@ -0,0 +1,51 @@ +package armjson + +import ( + "fmt" + "reflect" +) + +func (n *node) decodeArray(v reflect.Value) error { + + length := len(n.content) + + var original reflect.Value + + switch v.Kind() { + case reflect.Array: + if v.Len() != length { + return fmt.Errorf("invalid length") + } + case reflect.Slice: + v.Set(reflect.MakeSlice(v.Type(), length, length)) + case reflect.Interface: + original = v + slice := reflect.ValueOf(make([]interface{}, length)) + v = reflect.New(slice.Type()).Elem() + v.Set(slice) + default: + return fmt.Errorf("invalid target type") + } + + elementType := v.Type().Elem() + for i, nodeElement := range n.content { + node := nodeElement.(*node) + targetElement := reflect.New(elementType).Elem() + addressable := targetElement + if targetElement.Kind() == reflect.Ptr { + targetElement.Set(reflect.New(elementType.Elem())) + } else { + addressable = targetElement.Addr() + } + if err := node.decodeToValue(addressable); err != nil { + return err + } + v.Index(i).Set(targetElement) + } + + if original.IsValid() { + original.Set(v) + } + + return nil +} diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_boolean.go b/pkg/scanners/azure/arm/parser/armjson/decode_boolean.go new file mode 100644 index 000000000000..dbdef3a3253d --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/decode_boolean.go @@ -0,0 +1,18 @@ +package armjson + +import ( + "fmt" + "reflect" +) + +func (n *node) decodeBoolean(v reflect.Value) error { + switch v.Kind() { + case reflect.Bool: + v.SetBool(n.raw.(bool)) + case reflect.Interface: + v.Set(reflect.ValueOf(n.raw)) + default: + return fmt.Errorf("cannot decode boolean value to %s target", v.Kind()) + } + return nil +} diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_meta_test.go b/pkg/scanners/azure/arm/parser/armjson/decode_meta_test.go new file mode 100644 index 000000000000..57e657a7093a --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/decode_meta_test.go @@ -0,0 +1,40 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type TestParent struct { + Child *TestChild `json:"child"` +} + +type TestChild struct { + Name string + Line int + Column int +} + +func (t *TestChild) UnmarshalJSONWithMetadata(node Node) error { + t.Line = node.Range().Start.Line + t.Column = node.Range().Start.Column + return node.Decode(&t.Name) +} + +func Test_DecodeWithMetadata(t *testing.T) { + example := []byte(` +{ + "child": "secret" +} +`) + var parent TestParent + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &parent, &metadata)) + assert.Equal(t, 3, parent.Child.Line) + assert.Equal(t, 12, parent.Child.Column) + assert.Equal(t, "secret", parent.Child.Name) +} diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_null.go b/pkg/scanners/azure/arm/parser/armjson/decode_null.go new file mode 100644 index 000000000000..2cc86b3c1bb7 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/decode_null.go @@ -0,0 +1,10 @@ +package armjson + +import ( + "reflect" +) + +func (n *node) decodeNull(v reflect.Value) error { + v.Set(reflect.Zero(v.Type())) + return nil +} diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_number.go b/pkg/scanners/azure/arm/parser/armjson/decode_number.go new file mode 100644 index 000000000000..653f6f1fbe06 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/decode_number.go @@ -0,0 +1,46 @@ +package armjson + +import ( + "fmt" + "reflect" +) + +func (n *node) decodeNumber(v reflect.Value) error { + + switch v.Kind() { + case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int: + if i64, ok := n.raw.(int64); ok { + v.SetInt(i64) + return nil + } + if f64, ok := n.raw.(float64); ok { + v.SetInt(int64(f64)) + return nil + } + case reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8, reflect.Uint: + if i64, ok := n.raw.(int64); ok { + v.SetUint(uint64(i64)) + return nil + } + if f64, ok := n.raw.(float64); ok { + v.SetUint(uint64(f64)) + return nil + } + case reflect.Float32, reflect.Float64: + if i64, ok := n.raw.(int64); ok { + v.SetFloat(float64(i64)) + return nil + } + if f64, ok := n.raw.(float64); ok { + v.SetFloat(f64) + return nil + } + case reflect.Interface: + v.Set(reflect.ValueOf(n.raw)) + return nil + default: + return fmt.Errorf("cannot decode number value to %s target", v.Kind()) + } + + return fmt.Errorf("internal value is not numeric") +} diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_object.go b/pkg/scanners/azure/arm/parser/armjson/decode_object.go new file mode 100644 index 000000000000..516029b55deb --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/decode_object.go @@ -0,0 +1,122 @@ +package armjson + +import ( + "fmt" + "reflect" + "strings" +) + +func (n *node) decodeObject(v reflect.Value) error { + switch v.Kind() { + case reflect.Struct: + return n.decodeObjectToStruct(v) + case reflect.Map: + return n.decodeObjectToMap(v) + case reflect.Interface: + target := reflect.New(reflect.TypeOf(make(map[string]interface{}, len(n.Content())))).Elem() + if err := n.decodeObjectToMap(target); err != nil { + return err + } + v.Set(target) + return nil + default: + return fmt.Errorf("cannot set object value to target of type %s", v.Kind()) + } +} + +func (n *node) decodeObjectToMap(v reflect.Value) error { + properties, err := n.objectAsMap() + if err != nil { + return err + } + + newMap := reflect.MakeMap(v.Type()) + valueType := v.Type().Elem() + + for key, value := range properties { + target := reflect.New(valueType).Elem() + addressable := target + if target.Kind() == reflect.Ptr { + target.Set(reflect.New(valueType.Elem())) + } else { + addressable = target.Addr() + } + if err := value.(*node).decodeToValue(addressable); err != nil { + return err + } + newMap.SetMapIndex(reflect.ValueOf(key), target) + } + + v.Set(newMap) + return nil + +} + +func (n *node) objectAsMap() (map[string]Node, error) { + if n.kind != KindObject { + return nil, fmt.Errorf("not an object") + } + properties := make(map[string]Node) + contents := n.content + for i := 0; i < len(contents); i += 2 { + key := contents[i] + if key.Kind() != KindString { + return nil, fmt.Errorf("invalid object key - please report this bug") + } + keyStr := key.(*node).raw.(string) + + if i+1 >= len(contents) { + return nil, fmt.Errorf("missing object value - please report this bug") + } + properties[keyStr] = contents[i+1] + } + return properties, nil +} + +func (n *node) decodeObjectToStruct(v reflect.Value) error { + + temp := reflect.New(v.Type()).Elem() + v.Set(temp) + + properties, err := n.objectAsMap() + if err != nil { + return err + } + + t := v.Type() + for i := 0; i < t.NumField(); i++ { + fv := t.Field(i) + tags := strings.Split(fv.Tag.Get("json"), ",") + var tagName string + for _, tag := range tags { + if tag != "omitempty" && tag != "-" { + tagName = tag + } + } + if tagName == "" { + tagName = fv.Name + } + + value, ok := properties[tagName] + if !ok { + // TODO: should we zero this value? + continue + } + + subject := v.Field(i) + + // if fields are nil pointers, initialise them with values of the correct type + if subject.Kind() == reflect.Ptr { + if subject.IsNil() { + subject.Set(reflect.New(subject.Type().Elem())) + } + } else { + subject = subject.Addr() + } + + if err := value.(*node).decodeToValue(subject); err != nil { + return err + } + } + return nil +} diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_string.go b/pkg/scanners/azure/arm/parser/armjson/decode_string.go new file mode 100644 index 000000000000..c8f734b57024 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/decode_string.go @@ -0,0 +1,19 @@ +package armjson + +import ( + "fmt" + "reflect" +) + +func (n *node) decodeString(v reflect.Value) error { + + switch v.Kind() { + case reflect.String: + v.SetString(n.raw.(string)) + case reflect.Interface: + v.Set(reflect.ValueOf(n.raw)) + default: + return fmt.Errorf("cannot decode string value to non-string target: %s", v.Kind()) + } + return nil +} diff --git a/pkg/scanners/azure/arm/parser/armjson/kind.go b/pkg/scanners/azure/arm/parser/armjson/kind.go new file mode 100644 index 000000000000..82712cc89225 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/kind.go @@ -0,0 +1,14 @@ +package armjson + +type Kind uint8 + +const ( + KindUnknown Kind = iota + KindNull + KindNumber + KindString + KindBoolean + KindArray + KindObject + KindComment +) diff --git a/pkg/scanners/azure/arm/parser/armjson/node.go b/pkg/scanners/azure/arm/parser/armjson/node.go new file mode 100644 index 000000000000..3c398d6ed29c --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/node.go @@ -0,0 +1,59 @@ +package armjson + +import "github.com/aquasecurity/defsec/pkg/types" + +type Node interface { + Comments() []Node + Range() Range + Decode(target interface{}) error + Kind() Kind + Content() []Node + Metadata() types.Metadata +} + +type Range struct { + Start Position + End Position +} + +type Position struct { + Line int + Column int +} + +type node struct { + raw interface{} + start Position + end Position + kind Kind + content []Node + comments []Node + metadata *types.Metadata + ref string +} + +func (n *node) Range() Range { + return Range{ + Start: n.start, + End: Position{ + Column: n.end.Column - 1, + Line: n.end.Line, + }, + } +} + +func (n *node) Comments() []Node { + return n.comments +} + +func (n *node) End() Position { + return n.end +} + +func (n *node) Kind() Kind { + return n.kind +} + +func (n *node) Content() []Node { + return n.content +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse.go b/pkg/scanners/azure/arm/parser/armjson/parse.go new file mode 100644 index 000000000000..ac86f459fced --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse.go @@ -0,0 +1,150 @@ +package armjson + +import ( + "fmt" + "strings" + + "github.com/aquasecurity/defsec/pkg/types" +) + +type parser struct { + position Position + size int + peeker *PeekReader +} + +func newParser(p *PeekReader, pos Position) *parser { + return &parser{ + position: pos, + peeker: p, + } +} + +func (p *parser) parse(rootMetadata *types.Metadata) (Node, error) { + root, err := p.parseElement(rootMetadata) + if err != nil { + return nil, err + } + root.(*node).updateMetadata("") + return root, nil +} + +func (p *parser) parseElement(parentMetadata *types.Metadata) (Node, error) { + if err := p.parseWhitespace(); err != nil { + return nil, err + } + n, err := p.parseValue(parentMetadata) + if err != nil { + return nil, err + } + if err := p.parseWhitespace(); err != nil { + return nil, err + } + return n, nil +} + +func (p *parser) parseValue(parentMetadata *types.Metadata) (Node, error) { + c, err := p.peeker.Peek() + if err != nil { + return nil, err + } + + switch c { + case '/': + return p.parseComment(parentMetadata) + case '"': + return p.parseString(parentMetadata) + case '{': + return p.parseObject(parentMetadata) + case '[': + return p.parseArray(parentMetadata) + case 'n': + return p.parseNull(parentMetadata) + case 't', 'f': + return p.parseBoolean(parentMetadata) + default: + if c == '-' || (c >= '0' && c <= '9') { + return p.parseNumber(parentMetadata) + } + return nil, fmt.Errorf("unexpected character '%c'", c) + } +} + +func (p *parser) next() (rune, error) { + b, err := p.peeker.Next() + if err != nil { + return 0, err + } + p.position.Column++ + p.size++ + return b, nil +} + +func (p *parser) undo() error { + if err := p.peeker.Undo(); err != nil { + return err + } + p.position.Column-- + p.size-- + return nil +} + +func (p *parser) makeError(format string, args ...interface{}) error { + return fmt.Errorf( + "error at line %d, column %d: %s", + p.position.Line, + p.position.Column, + fmt.Sprintf(format, args...), + ) +} + +func (p *parser) newNode(k Kind, parentMetadata *types.Metadata) (*node, *types.Metadata) { + n := &node{ + start: p.position, + kind: k, + } + metadata := types.NewMetadata( + types.NewRange(parentMetadata.Range().GetFilename(), n.start.Line, n.end.Line, "", parentMetadata.Range().GetFS()), + n.ref, + ) + metadata.SetParentPtr(parentMetadata) + n.metadata = &metadata + return n, n.metadata +} + +func (n *node) updateMetadata(prefix string) { + + var full string + // nolint:gocritic + if strings.HasPrefix(n.ref, "[") { + full = prefix + n.ref + } else if prefix != "" { + full = prefix + "." + n.ref + } else { + full = n.ref + } + + n.metadata.SetRange(types.NewRange(n.metadata.Range().GetFilename(), + n.start.Line, + n.end.Line, + "", + n.metadata.Range().GetFS())) + + n.metadata.SetReference(full) + + for i := range n.content { + n.content[i].(*node).updateMetadata(full) + } +} + +func (p *parser) swallowIfEqual(r rune) bool { + c, err := p.peeker.Peek() + if err != nil { + return false + } + if c != r { + return false + } + _, _ = p.next() + return true +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_array.go b/pkg/scanners/azure/arm/parser/armjson/parse_array.go new file mode 100644 index 000000000000..795d69460253 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_array.go @@ -0,0 +1,54 @@ +package armjson + +import ( + "fmt" + + "github.com/aquasecurity/defsec/pkg/types" +) + +func (p *parser) parseArray(parentMetadata *types.Metadata) (Node, error) { + n, metadata := p.newNode(KindArray, parentMetadata) + + c, err := p.next() + if err != nil { + return nil, err + } + + if c != '[' { + return nil, p.makeError("expecting object delimiter") + } + if err := p.parseWhitespace(); err != nil { + return nil, err + } + // we've hit the end of the object + if p.swallowIfEqual(']') { + n.end = p.position + return n, nil + } + + // for each element + for { + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + val, err := p.parseElement(metadata) + if err != nil { + return nil, err + } + val.(*node).ref = fmt.Sprintf("[%d]", len(n.content)) + + n.content = append(n.content, val) + + // we've hit the end of the array + if p.swallowIfEqual(']') { + n.end = p.position + return n, nil + } + + if !p.swallowIfEqual(',') { + return nil, p.makeError("unexpected character - expecting , or ]") + } + } +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_array_test.go b/pkg/scanners/azure/arm/parser/armjson/parse_array_test.go new file mode 100644 index 000000000000..f1146ab08d87 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_array_test.go @@ -0,0 +1,46 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Array_Empty(t *testing.T) { + example := []byte(`[]`) + target := []int{} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Len(t, target, 0) +} + +func Test_Array_ToSlice(t *testing.T) { + example := []byte(`[1, 2, 3]`) + target := []int{} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Len(t, target, 3) + assert.EqualValues(t, []int{1, 2, 3}, target) +} + +func Test_Array_ToArray(t *testing.T) { + example := []byte(`[3, 2, 1]`) + target := [3]int{6, 6, 6} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Len(t, target, 3) + assert.EqualValues(t, [3]int{3, 2, 1}, target) +} + +func Test_Array_ToInterface(t *testing.T) { + example := []byte(`{ "List": [1, 2, 3] }`) + target := struct { + List interface{} + }{} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Len(t, target.List, 3) +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_boolean.go b/pkg/scanners/azure/arm/parser/armjson/parse_boolean.go new file mode 100644 index 000000000000..3d97589acdcb --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_boolean.go @@ -0,0 +1,40 @@ +package armjson + +import ( + "fmt" + + "github.com/aquasecurity/defsec/pkg/types" +) + +var trueRunes = []rune("true") +var falseRunes = []rune("false") + +func (p *parser) parseBoolean(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindBoolean, parentMetadata) + + r, err := p.peeker.Peek() + if err != nil { + return nil, err + } + + if r == 't' { + for _, expected := range trueRunes { + if !p.swallowIfEqual(expected) { + return nil, fmt.Errorf("unexpected character in boolean value") + } + } + n.raw = true + n.end = p.position + return n, err + } + + for _, expected := range falseRunes { + if !p.swallowIfEqual(expected) { + return nil, fmt.Errorf("unexpected character in boolean value") + } + } + n.raw = false + n.end = p.position + return n, nil +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_boolean_test.go b/pkg/scanners/azure/arm/parser/armjson/parse_boolean_test.go new file mode 100644 index 000000000000..e1d44db6119c --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_boolean_test.go @@ -0,0 +1,54 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Boolean_True(t *testing.T) { + example := []byte(`true`) + var output bool + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.True(t, output) +} + +func Test_Boolean_False(t *testing.T) { + example := []byte(`false`) + var output bool + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.False(t, output) +} + +func Test_Boolean_ToNonBoolPointer(t *testing.T) { + example := []byte(`false`) + var output string + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.Error(t, err) +} + +func Test_Bool_ToUninitialisedPointer(t *testing.T) { + example := []byte(`true`) + var str *string + metadata := types.NewTestMetadata() + err := Unmarshal(example, str, &metadata) + require.Error(t, err) + assert.Nil(t, str) +} + +func Test_Bool_ToInterface(t *testing.T) { + example := []byte(`true`) + var output interface{} + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.True(t, output.(bool)) +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_comment.go b/pkg/scanners/azure/arm/parser/armjson/parse_comment.go new file mode 100644 index 000000000000..7f35078ae84e --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_comment.go @@ -0,0 +1,98 @@ +package armjson + +import ( + "strings" + + "github.com/aquasecurity/defsec/pkg/types" +) + +func (p *parser) parseComment(parentMetadata *types.Metadata) (Node, error) { + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + _, err := p.next() + if err != nil { + return nil, err + } + + b, err := p.next() + if err != nil { + return nil, err + } + + switch b { + case '/': + return p.parseLineComment(parentMetadata) + case '*': + return p.parseBlockComment(parentMetadata) + default: + return nil, p.makeError("expecting comment delimiter") + } +} + +func (p *parser) parseLineComment(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindComment, parentMetadata) + + var sb strings.Builder + for { + c, err := p.next() + if err != nil { + return nil, err + } + if c == '\n' { + p.position.Column = 1 + p.position.Line++ + break + } + sb.WriteRune(c) + } + + n.raw = sb.String() + n.end = p.position + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + return n, nil +} + +func (p *parser) parseBlockComment(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindComment, parentMetadata) + + var sb strings.Builder + + for { + c, err := p.next() + if err != nil { + return nil, err + } + if c == '*' { + c, err := p.peeker.Peek() + if err != nil { + return nil, err + } + if c == '/' { + break + } + sb.WriteRune('*') + } else { + if c == '\n' { + p.position.Column = 1 + p.position.Line++ + } + sb.WriteRune(c) + } + } + + n.raw = sb.String() + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + return n, nil +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_complex_test.go b/pkg/scanners/azure/arm/parser/armjson/parse_complex_test.go new file mode 100644 index 000000000000..17c4014b83a3 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_complex_test.go @@ -0,0 +1,131 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/require" +) + +func Test_Complex(t *testing.T) { + target := make(map[string]interface{}) + input := `{ + "glossary": { + "title": "example glossary", + "GlossDiv": { + "title": "S", + "GlossList": { + "GlossEntry": { + "ID": "SGML", + "SortAs": "SGML", + "GlossTerm": "Standard Generalized Markup Language", + "Acronym": "SGML", + "Abbrev": "ISO 8879:1986", + "GlossDef": { + "para": "A meta-markup language, used to create markup languages such as DocBook.", + "GlossSeeAlso": ["GML", "XML"] + }, + "GlossSee": "markup" + } + } + } + } +}` + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal([]byte(input), &target, &metadata)) +} + +type Resource struct { + Line int + inner resourceInner +} + +type resourceInner struct { + Type string `json:"Type" yaml:"Type"` + Properties map[string]*Property `json:"Properties" yaml:"Properties"` +} + +func (r *Resource) UnmarshalJSONWithMetadata(node Node) error { + r.Line = node.Range().Start.Line + return node.Decode(&r.inner) +} + +type Parameter struct { + inner parameterInner +} + +type parameterInner struct { + Type string `json:"Type" yaml:"Type"` + Default interface{} `yaml:"Default"` +} + +func (p *Parameter) UnmarshalJSONWithMetadata(node Node) error { + return node.Decode(&p.inner) +} + +type Property struct { + Line int + inner propertyInner +} + +type CFType string + +type propertyInner struct { + Type CFType + Value interface{} `json:"Value" yaml:"Value"` +} + +func (p *Property) UnmarshalJSONWithMetadata(node Node) error { + p.Line = node.Range().Start.Line + return node.Decode(&p.inner) +} + +type Temp struct { + BucketName *Parameter + BucketKeyEnabled *Parameter +} + +type FileContext struct { + Parameters map[string]*Parameter `json:"Parameters" yaml:"Parameters"` + Resources map[string]*Resource `json:"Resources" yaml:"Resources"` +} + +func Test_CloudFormation(t *testing.T) { + var target FileContext + input := ` +{ + "Parameters": { + "BucketName": { + "Type": "String", + "Default": "naughty" + }, + "BucketKeyEnabled": { + "Type": "Boolean", + "Default": false + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + { + "BucketKeyEnabled": { + "Ref": "BucketKeyEnabled" + } + } + ] + } + } + } + } +} +` + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal([]byte(input), &target, &metadata)) +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_null.go b/pkg/scanners/azure/arm/parser/armjson/parse_null.go new file mode 100644 index 000000000000..36fa5d7370e1 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_null.go @@ -0,0 +1,23 @@ +package armjson + +import ( + "fmt" + + "github.com/aquasecurity/defsec/pkg/types" +) + +var nullRunes = []rune("null") + +func (p *parser) parseNull(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindNull, parentMetadata) + + for _, expected := range nullRunes { + if !p.swallowIfEqual(expected) { + return nil, fmt.Errorf("unexpected character") + } + } + n.raw = nil + n.end = p.position + return n, nil +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_null_test.go b/pkg/scanners/azure/arm/parser/armjson/parse_null_test.go new file mode 100644 index 000000000000..5fd343479c82 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_null_test.go @@ -0,0 +1,18 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/require" +) + +func Test_Null(t *testing.T) { + example := []byte(`null`) + var output string + ref := &output + metadata := types.NewTestMetadata() + err := Unmarshal(example, &ref, &metadata) + require.NoError(t, err) +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_number.go b/pkg/scanners/azure/arm/parser/armjson/parse_number.go new file mode 100644 index 000000000000..ca544cecce35 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_number.go @@ -0,0 +1,163 @@ +package armjson + +import ( + "fmt" + "strconv" + "strings" + + "github.com/aquasecurity/defsec/pkg/types" +) + +func (p *parser) parseNumber(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindNumber, parentMetadata) + + var str string + + if p.swallowIfEqual('-') { + str = "-" + } + + integral, err := p.parseIntegral() + if err != nil { + return nil, err + } + fraction, err := p.parseFraction() + if err != nil { + return nil, err + } + exponent, err := p.parseExponent() + if err != nil { + return nil, err + } + + str = fmt.Sprintf("%s%s%s%s", str, integral, fraction, exponent) + n.end = p.position + + if fraction != "" || exponent != "" { + f, err := strconv.ParseFloat(str, 64) + if err != nil { + return nil, p.makeError("%s", err) + } + n.raw = f + return n, nil + } + + i, err := strconv.ParseInt(str, 10, 64) + if err != nil { + return nil, p.makeError("%s", err) + } + n.raw = i + + return n, nil +} + +func (p *parser) parseIntegral() (string, error) { + r, err := p.next() + if err != nil { + return "", err + } + if r == '0' { + r, _ := p.peeker.Peek() + if r >= '0' && r <= '9' { + return "", p.makeError("invalid number") + } + return "0", nil + } + + var sb strings.Builder + if r < '1' || r > '9' { + return "", p.makeError("invalid number") + } + sb.WriteRune(r) + + for { + r, err := p.next() + if err != nil { + return sb.String(), nil + } + if r < '0' || r > '9' { + return sb.String(), p.undo() + } + sb.WriteRune(r) + } +} + +func (p *parser) parseFraction() (string, error) { + r, err := p.next() + if err != nil { + return "", nil + } + if r != '.' { + return "", p.undo() + } + + var sb strings.Builder + sb.WriteRune('.') + + for { + r, err := p.next() + if err != nil { + break + } + if r < '0' || r > '9' { + if err := p.undo(); err != nil { + return "", err + } + break + } + sb.WriteRune(r) + } + + str := sb.String() + if str == "." { + return "", p.makeError("invalid number - missing digits after decimal point") + } + + return str, nil +} + +func (p *parser) parseExponent() (string, error) { + r, err := p.next() + if err != nil { + return "", nil + } + if r != 'e' && r != 'E' { + return "", p.undo() + } + + var sb strings.Builder + sb.WriteRune(r) + + r, err = p.next() + if err != nil { + return "", nil + } + hasDigits := r >= '0' && r <= '9' + if r != '-' && r != '+' && !hasDigits { + return "", p.undo() + } + + sb.WriteRune(r) + + for { + r, err := p.next() + if err != nil { + break + } + if r < '0' || r > '9' { + if err := p.undo(); err != nil { + return "", err + } + break + } + hasDigits = true + sb.WriteRune(r) + } + + if !hasDigits { + return "", p.makeError("invalid number - no digits in exponent") + } + + return sb.String(), nil +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_number_test.go b/pkg/scanners/azure/arm/parser/armjson/parse_number_test.go new file mode 100644 index 000000000000..237c3b918fe9 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_number_test.go @@ -0,0 +1,178 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Number_IntToInt(t *testing.T) { + example := []byte(`123`) + var output int + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, 123, output) +} + +func Test_Number_IntToFloat(t *testing.T) { + example := []byte(`123`) + var output float64 + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, 123.0, output) +} + +func Test_Number_FloatToFloat(t *testing.T) { + example := []byte(`123.456`) + var output float64 + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, 123.456, output) +} + +func Test_Number_FloatToInt(t *testing.T) { + example := []byte(`123.456`) + var output int + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, 123, output) +} + +func Test_Number_FloatWithExponent(t *testing.T) { + cases := []struct { + in string + out float64 + }{ + { + in: `123.456e10`, + out: 123.456e+10, + }, + { + in: `123e+1`, + out: 123e+1, + }, + { + in: `123e-2`, + out: 123e-2, + }, + } + for _, test := range cases { + t.Run(test.in, func(t *testing.T) { + example := []byte(test.in) + var output float64 + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, test.out, output) + + }) + } +} + +func Test_Number_IntWithExponent(t *testing.T) { + cases := []struct { + in string + out int64 + }{ + { + in: `123e10`, + out: 123e+10, + }, + { + in: `123e+1`, + out: 123e+1, + }, + } + for _, test := range cases { + t.Run(test.in, func(t *testing.T) { + example := []byte(test.in) + var output int64 + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, test.out, output) + + }) + } +} + +func Test_Number_Ints(t *testing.T) { + cases := []struct { + in string + out int64 + err bool + }{ + { + in: `123e10`, + out: 123e+10, + }, + { + in: `-1`, + out: -1, + }, + { + in: `1.0123`, + out: 1, + }, + { + in: `0`, + out: 0, + }, + { + in: `01`, + err: true, + }, + { + in: ``, + err: true, + }, + { + in: `+1`, + err: true, + }, + { + in: `e`, + err: true, + }, + + { + in: `.123`, + err: true, + }, + + { + in: `.`, + err: true, + }, + + { + in: `00`, + err: true, + }, + { + in: `-`, + err: true, + }, + } + for _, test := range cases { + t.Run(test.in, func(t *testing.T) { + example := []byte(test.in) + var output int64 + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + if test.err { + require.Error(t, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.out, output) + }) + } +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_object.go b/pkg/scanners/azure/arm/parser/armjson/parse_object.go new file mode 100644 index 000000000000..f87ffc439ac9 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_object.go @@ -0,0 +1,143 @@ +package armjson + +import ( + "github.com/aquasecurity/defsec/pkg/types" +) + +func (p *parser) parseObject(parentMetadata *types.Metadata) (Node, error) { + + n, metadata := p.newNode(KindObject, parentMetadata) + + c, err := p.next() + if err != nil { + return nil, err + } + + if c != '{' { + return nil, p.makeError("expecting object delimiter") + } + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + // we've hit the end of the object + if p.swallowIfEqual('}') { + n.end = p.position + return n, nil + } + + var nextComments []Node + return p.iterateObject(nextComments, metadata, n) + +} + +// nolint: cyclop +func (p *parser) iterateObject(nextComments []Node, metadata *types.Metadata, n *node) (Node, error) { + for { + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + comments := make([]Node, len(nextComments)) + copy(comments, nextComments) + nextComments = nil + for { + peeked, err := p.peeker.Peek() + if err != nil { + return nil, err + } + if peeked != '/' { + break + } + comment, err := p.parseComment(metadata) + if err != nil { + return nil, err + } + comments = append(comments, comment) + } + + if comments != nil { + if err := p.parseWhitespace(); err != nil { + return nil, err + } + } + + key, err := p.parseString(metadata) + if err != nil { + return nil, err + } + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + if !p.swallowIfEqual(':') { + return nil, p.makeError("invalid character, expecting ':'") + } + + val, err := p.parseElement(metadata) + if err != nil { + return nil, err + } + ref := key.(*node).raw.(string) + key.(*node).ref = ref + val.(*node).ref = ref + + for { + peeked, err := p.peeker.Peek() + if err != nil { + return nil, err + } + if peeked != '/' { + break + } + comment, err := p.parseComment(metadata) + if err != nil { + return nil, err + } + comments = append(comments, comment) + } + + // we've hit the end of the object + if p.swallowIfEqual('}') { + key.(*node).comments = comments + val.(*node).comments = comments + n.content = append(n.content, key, val) + n.end = p.position + return n, nil + } + + if !p.swallowIfEqual(',') { + return nil, p.makeError("unexpected character - expecting , or }") + } + + for { + if err := p.parseWhitespace(); err != nil { + return nil, err + } + peeked, err := p.peeker.Peek() + if err != nil { + return nil, err + } + if peeked != '/' { + break + } + comment, err := p.parseComment(metadata) + if err != nil { + return nil, err + } + if comment.Range().Start.Line > val.Range().End.Line { + nextComments = append(nextComments, comment) + } else { + comments = append(comments, comment) + } + } + + key.(*node).comments = comments + val.(*node).comments = comments + n.content = append(n.content, key, val) + + } +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_object_test.go b/pkg/scanners/azure/arm/parser/armjson/parse_object_test.go new file mode 100644 index 000000000000..56985ecbc805 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_object_test.go @@ -0,0 +1,115 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Object(t *testing.T) { + example := []byte(`{ + "name": "testing", + "balance": 3.14 +}`) + target := struct { + Name string `json:"name"` + Balance float64 `json:"balance"` + }{} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Equal(t, "testing", target.Name) + assert.Equal(t, 3.14, target.Balance) +} + +func Test_ObjectWithPointers(t *testing.T) { + example := []byte(`{ + "name": "testing", + "balance": 3.14 +}`) + target := struct { + Name *string `json:"name"` + Balance *float64 `json:"balance"` + }{} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Equal(t, "testing", *target.Name) + assert.Equal(t, 3.14, *target.Balance) +} + +type nestedParent struct { + Child *nestedChild + Name string +} + +type nestedChild struct { + Blah string `json:"secret"` +} + +func Test_ObjectWithPointerToNestedStruct(t *testing.T) { + example := []byte(`{ + "Child": { + "secret": "password" + }, + "Name": "testing" +}`) + + var parent nestedParent + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &parent, &metadata)) + assert.Equal(t, "testing", parent.Name) + assert.Equal(t, "password", parent.Child.Blah) +} + +func Test_Object_ToMapStringInterface(t *testing.T) { + example := []byte(`{ + "Name": "testing" +}`) + + parent := make(map[string]interface{}) + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &parent, &metadata)) + assert.Equal(t, "testing", parent["Name"]) +} + +func Test_Object_ToNestedMapStringInterfaceFromIAM(t *testing.T) { + example := []byte(` +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": "ec2:*", + "Resource": "*", + "Condition": { + "Bool": { + "aws:MultiFactorAuthPresent": ["true"] + } + } + } + ] +}`) + + parent := make(map[string]interface{}) + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &parent, &metadata)) +} + +func Test_Object_ToNestedMapStringInterface(t *testing.T) { + example := []byte(`{ + "Child": { + "secret": "password" + }, + "Name": "testing" +}`) + + parent := make(map[string]interface{}) + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &parent, &metadata)) + assert.Equal(t, "testing", parent["Name"]) + child := parent["Child"].(map[string]interface{}) + assert.Equal(t, "password", child["secret"]) +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_string.go b/pkg/scanners/azure/arm/parser/armjson/parse_string.go new file mode 100644 index 000000000000..7f4740cd459b --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_string.go @@ -0,0 +1,91 @@ +package armjson + +import ( + "strconv" + "strings" + + "github.com/aquasecurity/defsec/pkg/types" +) + +var escapes = map[rune]string{ + '\\': "\\", + '/': "/", + '"': "\"", + 'n': "\n", + 'r': "\r", + 'b': "\b", + 'f': "\f", + 't': "\t", +} + +// nolint: cyclop +func (p *parser) parseString(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindString, parentMetadata) + + b, err := p.next() + if err != nil { + return nil, err + } + + if b != '"' { + return nil, p.makeError("expecting string delimiter") + } + + var sb strings.Builder + + var inEscape bool + var inHex bool + var hex []rune + + for { + c, err := p.next() + if err != nil { + return nil, err + } + // nolint: gocritic + if inHex { + switch { + case c >= 'a' && c <= 'f', c >= 'A' && c <= 'F', c >= '0' && c <= '9': + hex = append(hex, c) + if len(hex) == 4 { + inHex = false + char, err := strconv.Unquote("\\u" + string(hex)) + if err != nil { + return nil, p.makeError("invalid unicode character '%s'", err) + } + sb.WriteString(char) + hex = nil + } + default: + return nil, p.makeError("invalid hexedecimal escape sequence '\\%s%c'", string(hex), c) + } + } else if inEscape { + inEscape = false + if c == 'u' { + inHex = true + continue + } + seq, ok := escapes[c] + if !ok { + return nil, p.makeError("invalid escape sequence '\\%c'", c) + } + sb.WriteString(seq) + } else { + switch c { + case '\\': + inEscape = true + case '"': + n.raw = sb.String() + n.end = p.position + return n, nil + default: + if c < 0x20 || c > 0x10FFFF { + return nil, p.makeError("invalid unescaped character '0x%X'", c) + } + sb.WriteRune(c) + } + } + + } +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_string_test.go b/pkg/scanners/azure/arm/parser/armjson/parse_string_test.go new file mode 100644 index 000000000000..83c98cd859fc --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_string_test.go @@ -0,0 +1,37 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_String(t *testing.T) { + example := []byte(`"hello"`) + var output string + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, "hello", output) +} + +func Test_StringToUninitialisedPointer(t *testing.T) { + example := []byte(`"hello"`) + var str *string + metadata := types.NewTestMetadata() + err := Unmarshal(example, str, &metadata) + require.Error(t, err) + assert.Nil(t, str) +} + +func Test_String_ToInterface(t *testing.T) { + example := []byte(`"hello"`) + var output interface{} + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, "hello", output) +} diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_whitespace.go b/pkg/scanners/azure/arm/parser/armjson/parse_whitespace.go new file mode 100644 index 000000000000..ad5751147d3e --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/parse_whitespace.go @@ -0,0 +1,29 @@ +package armjson + +import ( + "errors" + "io" +) + +func (p *parser) parseWhitespace() error { + for { + b, err := p.peeker.Peek() + if err != nil { + if errors.Is(err, io.EOF) { + return nil + } + return err + } + switch b { + case 0x0d, 0x20, 0x09: + case 0x0a: + p.position.Column = 1 + p.position.Line++ + default: + return nil + } + if _, err := p.next(); err != nil { + return err + } + } +} diff --git a/pkg/scanners/azure/arm/parser/armjson/reader.go b/pkg/scanners/azure/arm/parser/armjson/reader.go new file mode 100644 index 000000000000..e05769f02da9 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/reader.go @@ -0,0 +1,36 @@ +package armjson + +import ( + "bufio" + "io" +) + +type PeekReader struct { + underlying *bufio.Reader +} + +func NewPeekReader(reader io.Reader) *PeekReader { + return &PeekReader{ + underlying: bufio.NewReader(reader), + } +} + +func (r *PeekReader) Next() (rune, error) { + c, _, err := r.underlying.ReadRune() + return c, err +} + +func (r *PeekReader) Undo() error { + return r.underlying.UnreadRune() +} + +func (r *PeekReader) Peek() (rune, error) { + c, _, err := r.underlying.ReadRune() + if err != nil { + return 0, err + } + if err := r.underlying.UnreadRune(); err != nil { + return 0, err + } + return c, nil +} diff --git a/pkg/scanners/azure/arm/parser/armjson/reader_test.go b/pkg/scanners/azure/arm/parser/armjson/reader_test.go new file mode 100644 index 000000000000..8017f30f9f98 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/reader_test.go @@ -0,0 +1,62 @@ +package armjson + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var input = `abcdefghijklmnopqrstuvwxyz` + +func Test_Peeker(t *testing.T) { + peeker := NewPeekReader(strings.NewReader(input)) + + var b rune + var err error + + for i := 0; i < 30; i++ { + b, err = peeker.Peek() + require.NoError(t, err) + assert.Equal(t, ('a'), b) + } + + b, err = peeker.Next() + require.NoError(t, err) + assert.Equal(t, ('a'), b) + + b, err = peeker.Next() + require.NoError(t, err) + assert.Equal(t, ('b'), b) + + b, err = peeker.Peek() + require.NoError(t, err) + assert.Equal(t, ('c'), b) + + for i := 0; i < 5; i++ { + b, err = peeker.Next() + require.NoError(t, err) + assert.Equal(t, []rune(input)[2+i], b) + } + + b, err = peeker.Peek() + require.NoError(t, err) + assert.Equal(t, ('h'), b) + + b, err = peeker.Next() + require.NoError(t, err) + assert.Equal(t, ('h'), b) + for i := 0; i < 18; i++ { + b, err = peeker.Next() + require.NoError(t, err) + assert.Equal(t, []rune(input)[8+i], b) + } + + _, err = peeker.Peek() + require.Error(t, err) + + _, err = peeker.Next() + require.Error(t, err) + +} diff --git a/pkg/scanners/azure/arm/parser/armjson/unmarshal.go b/pkg/scanners/azure/arm/parser/armjson/unmarshal.go new file mode 100644 index 000000000000..6e096a694d8a --- /dev/null +++ b/pkg/scanners/azure/arm/parser/armjson/unmarshal.go @@ -0,0 +1,40 @@ +package armjson + +import ( + "bytes" + "io" + + "github.com/aquasecurity/defsec/pkg/types" +) + +type Unmarshaller interface { + UnmarshalJSONWithMetadata(node Node) error +} + +type MetadataReceiver interface { + SetMetadata(m *types.Metadata) +} + +func Unmarshal(data []byte, target interface{}, metadata *types.Metadata) error { + node, err := newParser(NewPeekReader(bytes.NewReader(data)), Position{1, 1}).parse(metadata) + if err != nil { + return err + } + if err := node.Decode(target); err != nil { + return err + } + + return nil +} + +func UnmarshalFromReader(r io.ReadSeeker, target interface{}, metadata *types.Metadata) error { + node, err := newParser(NewPeekReader(r), Position{1, 1}).parse(metadata) + if err != nil { + return err + } + if err := node.Decode(target); err != nil { + return err + } + + return nil +} diff --git a/pkg/scanners/azure/arm/parser/parser.go b/pkg/scanners/azure/arm/parser/parser.go new file mode 100644 index 000000000000..a692fd93b438 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/parser.go @@ -0,0 +1,194 @@ +package parser + +import ( + "context" + "fmt" + "io" + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser/armjson" + "github.com/aquasecurity/trivy/pkg/scanners/azure/resolver" +) + +type Parser struct { + targetFS fs.FS + skipRequired bool + debug debug.Logger +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "azure", "arm") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +func New(targetFS fs.FS, opts ...options.ParserOption) *Parser { + p := &Parser{ + targetFS: targetFS, + } + for _, opt := range opts { + opt(p) + } + return p +} + +func (p *Parser) ParseFS(ctx context.Context, dir string) ([]azure.Deployment, error) { + + var deployments []azure.Deployment + + if err := fs.WalkDir(p.targetFS, dir, func(path string, entry fs.DirEntry, err error) error { + if err != nil { + return err + } + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if entry.IsDir() { + return nil + } + if !p.Required(path) { + return nil + } + f, err := p.targetFS.Open(path) + if err != nil { + return err + } + defer f.Close() + deployment, err := p.parseFile(f, path) + if err != nil { + return err + } + deployments = append(deployments, *deployment) + return nil + }); err != nil { + return nil, err + } + + return deployments, nil +} + +func (p *Parser) Required(path string) bool { + if p.skipRequired { + return true + } + if !strings.HasSuffix(path, ".json") { + return false + } + data, err := fs.ReadFile(p.targetFS, path) + if err != nil { + return false + } + var template Template + root := types.NewMetadata( + types.NewRange(filepath.Base(path), 0, 0, "", p.targetFS), + "", + ) + if err := armjson.Unmarshal(data, &template, &root); err != nil { + p.debug.Log("Error scanning %s: %s", path, err) + return false + } + + if template.Schema.Kind != azure.KindString { + return false + } + + return strings.HasPrefix(template.Schema.AsString(), "https://schema.management.azure.com") +} + +func (p *Parser) parseFile(r io.Reader, filename string) (*azure.Deployment, error) { + var template Template + data, err := io.ReadAll(r) + if err != nil { + return nil, err + } + root := types.NewMetadata( + types.NewRange(filename, 0, 0, "", p.targetFS), + "", + ).WithInternal(resolver.NewResolver()) + + if err := armjson.Unmarshal(data, &template, &root); err != nil { + return nil, fmt.Errorf("failed to parse template: %w", err) + } + return p.convertTemplate(template), nil +} + +func (p *Parser) convertTemplate(template Template) *azure.Deployment { + + deployment := azure.Deployment{ + Metadata: template.Metadata, + TargetScope: azure.ScopeResourceGroup, // TODO: override from --resource-group? + Parameters: nil, + Variables: nil, + Resources: nil, + Outputs: nil, + } + + if r, ok := template.Metadata.Internal().(resolver.Resolver); ok { + r.SetDeployment(&deployment) + } + + // TODO: the references passed here should probably not be the name - maybe params.NAME.DefaultValue? + for name, param := range template.Parameters { + deployment.Parameters = append(deployment.Parameters, azure.Parameter{ + Variable: azure.Variable{ + Name: name, + Value: param.DefaultValue, + }, + Default: param.DefaultValue, + Decorators: nil, + }) + } + + for name, variable := range template.Variables { + deployment.Variables = append(deployment.Variables, azure.Variable{ + Name: name, + Value: variable, + }) + } + + for name, output := range template.Outputs { + deployment.Outputs = append(deployment.Outputs, azure.Output{ + Name: name, + Value: output, + }) + } + + for _, resource := range template.Resources { + deployment.Resources = append(deployment.Resources, p.convertResource(resource)) + } + + return &deployment +} + +func (p *Parser) convertResource(input Resource) azure.Resource { + + var children []azure.Resource + + for _, child := range input.Resources { + children = append(children, p.convertResource(child)) + } + + resource := azure.Resource{ + Metadata: input.Metadata, + APIVersion: input.APIVersion, + Type: input.Type, + Kind: input.Kind, + Name: input.Name, + Location: input.Location, + Properties: input.Properties, + Resources: children, + } + + return resource +} diff --git a/pkg/scanners/azure/arm/parser/parser_test.go b/pkg/scanners/azure/arm/parser/parser_test.go new file mode 100644 index 000000000000..edcec5dc2a29 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/parser_test.go @@ -0,0 +1,338 @@ +package parser + +import ( + "context" + "io/fs" + "os" + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/liamg/memoryfs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/scanners/azure/resolver" +) + +func createMetadata(targetFS fs.FS, filename string, start, end int, ref string, parent *types.Metadata) types.Metadata { + child := types.NewMetadata(types.NewRange(filename, start, end, "", targetFS), ref) + if parent != nil { + child.SetParentPtr(parent) + } + return child +} + +func TestParser_Parse(t *testing.T) { + + filename := "example.json" + + targetFS := memoryfs.New() + + tests := []struct { + name string + input string + want func() azure.Deployment + wantDeployment bool + }{ + { + name: "invalid code", + input: `blah`, + wantDeployment: false, + }, + { + name: "basic param", + input: `{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", // another one + "contentVersion": "1.0.0.0", + "parameters": { + "storagePrefix": { + "type": "string", + "defaultValue": "x", + "maxLength": 11, + "minLength": 3 + } + }, + "resources": [] +}`, + want: func() azure.Deployment { + + root := createMetadata(targetFS, filename, 0, 0, "", nil).WithInternal(resolver.NewResolver()) + metadata := createMetadata(targetFS, filename, 1, 13, "", &root) + parametersMetadata := createMetadata(targetFS, filename, 4, 11, "parameters", &metadata) + storageMetadata := createMetadata(targetFS, filename, 5, 10, "parameters.storagePrefix", ¶metersMetadata) + + return azure.Deployment{ + Metadata: metadata, + TargetScope: azure.ScopeResourceGroup, + Parameters: []azure.Parameter{ + { + Variable: azure.Variable{ + Name: "storagePrefix", + Value: azure.NewValue("x", createMetadata(targetFS, filename, 7, 7, "parameters.storagePrefix.defaultValue", &storageMetadata)), + }, + Default: azure.NewValue("x", createMetadata(targetFS, filename, 7, 7, "parameters.storagePrefix.defaultValue", &storageMetadata)), + Decorators: nil, + }, + }, + } + }, + wantDeployment: true, + }, + { + name: "storageAccount", + input: `{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", // another one + "contentVersion": "1.0.0.0", + "parameters": {}, + "resources": [ +{ + "type": "Microsoft.Storage/storageAccounts", + "apiVersion": "2022-05-01", + "name": "myResource", + "location": "string", + "tags": { + "tagName1": "tagValue1", + "tagName2": "tagValue2" + }, + "sku": { + "name": "string" + }, + "kind": "string", + "extendedLocation": { + "name": "string", + "type": "EdgeZone" + }, + "identity": { + "type": "string", + "userAssignedIdentities": {} + }, + "properties": { + "allowSharedKeyAccess":false, + "customDomain": { + "name": "string", + "useSubDomainName":false, + "number": 123 + }, + "networkAcls": [ + { + "bypass": "AzureServices1" + }, + { + "bypass": "AzureServices2" + } + ] + } +} +] +}`, + want: func() azure.Deployment { + + rootMetadata := createMetadata(targetFS, filename, 0, 0, "", nil).WithInternal(resolver.NewResolver()) + fileMetadata := createMetadata(targetFS, filename, 1, 45, "", &rootMetadata) + resourcesMetadata := createMetadata(targetFS, filename, 5, 44, "resources", &fileMetadata) + + resourceMetadata := createMetadata(targetFS, filename, 6, 43, "resources[0]", &resourcesMetadata) + + propertiesMetadata := createMetadata(targetFS, filename, 27, 42, "resources[0].properties", &resourceMetadata) + + customDomainMetadata := createMetadata(targetFS, filename, 29, 33, "resources[0].properties.customDomain", &propertiesMetadata) + networkACLListMetadata := createMetadata(targetFS, filename, 34, 41, "resources[0].properties.networkAcls", &propertiesMetadata) + + networkACL0Metadata := createMetadata(targetFS, filename, 35, 37, "resources[0].properties.networkAcls[0]", &networkACLListMetadata) + networkACL1Metadata := createMetadata(targetFS, filename, 38, 40, "resources[0].properties.networkAcls[1]", &networkACLListMetadata) + + return azure.Deployment{ + Metadata: fileMetadata, + TargetScope: azure.ScopeResourceGroup, + Resources: []azure.Resource{ + { + Metadata: resourceMetadata, + APIVersion: azure.NewValue( + "2022-05-01", + createMetadata(targetFS, filename, 8, 8, "resources[0].apiVersion", &resourceMetadata), + ), + Type: azure.NewValue( + "Microsoft.Storage/storageAccounts", + createMetadata(targetFS, filename, 7, 7, "resources[0].type", &resourceMetadata), + ), + Kind: azure.NewValue( + "string", + createMetadata(targetFS, filename, 18, 18, "resources[0].kind", &resourceMetadata), + ), + Name: azure.NewValue( + "myResource", + createMetadata(targetFS, filename, 9, 9, "resources[0].name", &resourceMetadata), + ), + Location: azure.NewValue( + "string", + createMetadata(targetFS, filename, 10, 10, "resources[0].location", &resourceMetadata), + ), + Properties: azure.NewValue( + map[string]azure.Value{ + "allowSharedKeyAccess": azure.NewValue(false, createMetadata(targetFS, filename, 28, 28, "resources[0].properties.allowSharedKeyAccess", &propertiesMetadata)), + "customDomain": azure.NewValue( + map[string]azure.Value{ + "name": azure.NewValue("string", createMetadata(targetFS, filename, 30, 30, "resources[0].properties.customDomain.name", &customDomainMetadata)), + "useSubDomainName": azure.NewValue(false, createMetadata(targetFS, filename, 31, 31, "resources[0].properties.customDomain.useSubDomainName", &customDomainMetadata)), + "number": azure.NewValue(int64(123), createMetadata(targetFS, filename, 32, 32, "resources[0].properties.customDomain.number", &customDomainMetadata)), + }, customDomainMetadata), + "networkAcls": azure.NewValue( + []azure.Value{ + azure.NewValue( + map[string]azure.Value{ + "bypass": azure.NewValue("AzureServices1", createMetadata(targetFS, filename, 36, 36, "resources[0].properties.networkAcls[0].bypass", &networkACL0Metadata)), + }, + networkACL0Metadata, + ), + azure.NewValue( + map[string]azure.Value{ + "bypass": azure.NewValue("AzureServices2", createMetadata(targetFS, filename, 39, 39, "resources[0].properties.networkAcls[1].bypass", &networkACL1Metadata)), + }, + networkACL1Metadata, + ), + }, networkACLListMetadata), + }, + propertiesMetadata, + ), + }, + }, + } + }, + + wantDeployment: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + + require.NoError(t, targetFS.WriteFile(filename, []byte(tt.input), 0644)) + + p := New(targetFS, options.ParserWithDebug(os.Stderr)) + got, err := p.ParseFS(context.Background(), ".") + require.NoError(t, err) + + if !tt.wantDeployment { + assert.Len(t, got, 0) + return + } + + require.Len(t, got, 1) + want := tt.want() + g := got[0] + + require.Equal(t, want, g) + }) + } +} + +func Test_NestedResourceParsing(t *testing.T) { + + input := ` +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "environment": { + "type": "string", + "allowedValues": [ + "dev", + "test", + "prod" + ] + }, + "location": { + "type": "string", + "defaultValue": "[resourceGroup().location]", + "metadata": { + "description": "Location for all resources." + } + }, + "storageAccountSkuName": { + "type": "string", + "defaultValue": "Standard_LRS" + }, + "storageAccountSkuTier": { + "type": "string", + "defaultValue": "Standard" + } + }, + "variables": { + "uniquePart": "[take(uniqueString(resourceGroup().id), 4)]", + "storageAccountName": "[concat('mystorageaccount', variables('uniquePart'), parameters('environment'))]", + "queueName": "myqueue" + }, + "resources": [ + { + "type": "Microsoft.Storage/storageAccounts", + "name": "[variables('storageAccountName')]", + "location": "[parameters('location')]", + "apiVersion": "2019-06-01", + "sku": { + "name": "[parameters('storageAccountSkuName')]", + "tier": "[parameters('storageAccountSkuTier')]" + }, + "kind": "StorageV2", + "properties": {}, + "resources": [ + { + "name": "[concat('default/', variables('queueName'))]", + "type": "queueServices/queues", + "apiVersion": "2019-06-01", + "dependsOn": [ + "[variables('storageAccountName')]" + ], + "properties": { + "metadata": {} + } + } + ] + } + ] +} +` + + targetFS := memoryfs.New() + + require.NoError(t, targetFS.WriteFile("nested.json", []byte(input), 0644)) + + p := New(targetFS, options.ParserWithDebug(os.Stderr)) + got, err := p.ParseFS(context.Background(), ".") + require.NoError(t, err) + require.Len(t, got, 1) + + deployment := got[0] + + require.Len(t, deployment.Resources, 1) + + storageAccountResource := deployment.Resources[0] + + require.Len(t, storageAccountResource.Resources, 1) + + queue := storageAccountResource.Resources[0] + + assert.Equal(t, "queueServices/queues", queue.Type.AsString()) +} + +// +// func Test_JsonFile(t *testing.T) { +// +// input, err := os.ReadFile("testdata/postgres.json") +// require.NoError(t, err) +// +// targetFS := memoryfs.New() +// +// require.NoError(t, targetFS.WriteFile("postgres.json", input, 0644)) +// +// p := New(targetFS, options.ParserWithDebug(os.Stderr)) +// got, err := p.ParseFS(context.Background(), ".") +// require.NoError(t, err) +// +// got[0].Resources[3].Name.Resolve() +// +// name := got[0].Resources[3].Name.AsString() +// assert.Equal(t, "myserver", name) +// +// } diff --git a/pkg/scanners/azure/arm/parser/template.go b/pkg/scanners/azure/arm/parser/template.go new file mode 100644 index 000000000000..9ece4297149e --- /dev/null +++ b/pkg/scanners/azure/arm/parser/template.go @@ -0,0 +1,78 @@ +package parser + +import ( + "github.com/aquasecurity/defsec/pkg/types" + types2 "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser/armjson" +) + +type Template struct { + Metadata types.Metadata `json:"-"` + Schema types2.Value `json:"$schema"` + ContentVersion types2.Value `json:"contentVersion"` + APIProfile types2.Value `json:"apiProfile"` + Parameters map[string]Parameter `json:"parameters"` + Variables map[string]types2.Value `json:"variables"` + Functions []Function `json:"functions"` + Resources []Resource `json:"resources"` + Outputs map[string]types2.Value `json:"outputs"` +} + +type Parameter struct { + Metadata types.Metadata + Type types2.Value `json:"type"` + DefaultValue types2.Value `json:"defaultValue"` + MaxLength types2.Value `json:"maxLength"` + MinLength types2.Value `json:"minLength"` +} + +type Function struct{} + +type Resource struct { + Metadata types.Metadata `json:"-"` + innerResource +} + +func (t *Template) SetMetadata(m *types.Metadata) { + t.Metadata = *m +} + +func (r *Resource) SetMetadata(m *types.Metadata) { + r.Metadata = *m +} + +func (p *Parameter) SetMetadata(m *types.Metadata) { + p.Metadata = *m +} + +type innerResource struct { + APIVersion types2.Value `json:"apiVersion"` + Type types2.Value `json:"type"` + Kind types2.Value `json:"kind"` + Name types2.Value `json:"name"` + Location types2.Value `json:"location"` + Tags types2.Value `json:"tags"` + Sku types2.Value `json:"sku"` + Properties types2.Value `json:"properties"` + Resources []Resource `json:"resources"` +} + +func (v *Resource) UnmarshalJSONWithMetadata(node armjson.Node) error { + + if err := node.Decode(&v.innerResource); err != nil { + return err + } + + v.Metadata = node.Metadata() + + for _, comment := range node.Comments() { + var str string + if err := comment.Decode(&str); err != nil { + return err + } + // TODO + // v.Metadata.Comments = append(v.Metadata.Comments, str) + } + + return nil +} diff --git a/pkg/scanners/azure/arm/parser/template_test.go b/pkg/scanners/azure/arm/parser/template_test.go new file mode 100644 index 000000000000..130b513319cd --- /dev/null +++ b/pkg/scanners/azure/arm/parser/template_test.go @@ -0,0 +1,60 @@ +package parser + +import ( + "os" + "path/filepath" + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + types2 "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser/armjson" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_JSONUnmarshal(t *testing.T) { + data, err := os.ReadFile(filepath.Join("testdata", "example.json")) + require.NoError(t, err) + var target Template + metadata := types.NewTestMetadata() + require.NoError(t, armjson.Unmarshal(data, &target, &metadata)) + assert.Equal(t, + "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + target.Schema.AsString(), + ) + require.Len(t, target.Schema.Comments, 2) + assert.Equal(t, " wow this is a comment", target.Schema.Comments[0]) + assert.Equal(t, " another one", target.Schema.Comments[1]) + + assert.Equal(t, "1.0.0.0", target.ContentVersion.Raw()) + require.Len(t, target.ContentVersion.Comments, 1) + assert.Equal(t, " this version is great", target.ContentVersion.Comments[0]) + + require.Contains(t, target.Parameters, "storagePrefix") + prefix := target.Parameters["storagePrefix"] + /* + "type": "string", + "defaultValue": "x", + "maxLength": 11, + "minLength": 3 + */ + assert.Equal(t, "string", prefix.Type.Raw()) + assert.Equal(t, types2.KindString, prefix.Type.Kind) + assert.Equal(t, 8, prefix.Type.Metadata.Range().GetStartLine()) + assert.Equal(t, 8, prefix.Type.Metadata.Range().GetEndLine()) + + assert.Equal(t, "x", prefix.DefaultValue.Raw()) + assert.Equal(t, types2.KindString, prefix.DefaultValue.Kind) + assert.Equal(t, 9, prefix.DefaultValue.Metadata.Range().GetStartLine()) + assert.Equal(t, 9, prefix.DefaultValue.Metadata.Range().GetEndLine()) + + assert.Equal(t, int64(11), prefix.MaxLength.Raw()) + assert.Equal(t, types2.KindNumber, prefix.MaxLength.Kind) + assert.Equal(t, 10, prefix.MaxLength.Metadata.Range().GetStartLine()) + assert.Equal(t, 10, prefix.MaxLength.Metadata.Range().GetEndLine()) + + assert.Equal(t, int64(3), prefix.MinLength.Raw()) + assert.Equal(t, types2.KindNumber, prefix.MinLength.Kind) + assert.Equal(t, 11, prefix.MinLength.Metadata.Range().GetStartLine()) + assert.Equal(t, 11, prefix.MinLength.Metadata.Range().GetEndLine()) +} diff --git a/pkg/scanners/azure/arm/parser/testdata/example.json b/pkg/scanners/azure/arm/parser/testdata/example.json new file mode 100644 index 000000000000..9698ed1a0583 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/testdata/example.json @@ -0,0 +1,15 @@ +{ + // wow this is a comment + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", // another one + // this version is great + "contentVersion": "1.0.0.0", + "parameters": { + "storagePrefix": { + "type": "string", + "defaultValue": "x", + "maxLength": 11, + "minLength": 3 + } + }, + "resources": [] +} \ No newline at end of file diff --git a/pkg/scanners/azure/arm/parser/testdata/postgres.json b/pkg/scanners/azure/arm/parser/testdata/postgres.json new file mode 100644 index 000000000000..670733fdd308 --- /dev/null +++ b/pkg/scanners/azure/arm/parser/testdata/postgres.json @@ -0,0 +1,73 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.10.61.36676", + "templateHash": "8074447630975889785" + } + }, + "resources": [ + { + "type": "Microsoft.DBforPostgreSQL/servers", + "apiVersion": "2017-12-01", + "name": "myPostgreSQLServer", + "location": "westus", + "identity": { + "type": "SystemAssigned" + }, + "properties": { + "administratorLogin": "myadmin", + "administratorLoginPassword": "myadminpassword", + "version": "9.6", + "sslEnforcement": "Enabled", + "storageProfile": { + "storageMB": 5120 + }, + "createMode": "Default", + "minimalTlsVersion": "1.2", + "publicNetworkAccess": "Enabled", + "FirewallRules": [ + { + "name": "AllowAllAzureIps", + "startIpAddress": "0.0.0.0/0" + } + ] + } + }, + { + "type": "Microsoft.DBforPostgreSQL/servers/configurations", + "apiVersion": "2017-12-01", + "name": "[format('{0}/{1}', 'myPostgreSQLServer', 'connection_throttling')]", + "properties": { + "value": "OFF" + }, + "dependsOn": [ + "[resourceId('Microsoft.DBforPostgreSQL/servers', 'myPostgreSQLServer')]" + ] + }, + { + "type": "Microsoft.DBforPostgreSQL/servers/configurations", + "apiVersion": "2017-12-01", + "name": "[format('{0}/{1}', 'myPostgreSQLServer', 'log_checkpoints')]", + "properties": { + "value": "OFF" + }, + "dependsOn": [ + "[resourceId('Microsoft.DBforPostgreSQL/servers', 'myPostgreSQLServer')]" + ] + }, + { + "type": "Microsoft.DBforPostgreSQL/servers/configurations", + "apiVersion": "2017-12-01", + "name": "[format('{0}/{1}', 'myPostgreSQLServer', 'log_connections')]", + "properties": { + "value": "OFF" + }, + "dependsOn": [ + "[resourceId('Microsoft.DBforPostgreSQL/servers', 'myPostgreSQLServer')]" + ] + } + ] +} \ No newline at end of file diff --git a/pkg/scanners/azure/arm/scanner.go b/pkg/scanners/azure/arm/scanner.go new file mode 100644 index 000000000000..c988ba9311c3 --- /dev/null +++ b/pkg/scanners/azure/arm/scanner.go @@ -0,0 +1,187 @@ +package arm + +import ( + "context" + "fmt" + + "io" + "io/fs" + "sync" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/arm" + "github.com/aquasecurity/trivy/pkg/scanners" + "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + scannerOptions []options.ScannerOption + parserOptions []options.ParserOption + debug debug.Logger + frameworks []framework.Framework + skipRequired bool + regoOnly bool + loadEmbeddedPolicies bool + loadEmbeddedLibraries bool + policyDirs []string + policyReaders []io.Reader + regoScanner *rego.Scanner + spec string + sync.Mutex +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(regoOnly bool) { + s.regoOnly = regoOnly +} + +func New(opts ...options.ScannerOption) *Scanner { + scanner := &Scanner{ + scannerOptions: opts, + } + for _, opt := range opts { + opt(scanner) + } + return scanner +} + +func (s *Scanner) Name() string { + return "Azure ARM" +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "azure", "arm") + s.parserOptions = append(s.parserOptions, options.ParserWithDebug(writer)) +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetSkipRequiredCheck(skipRequired bool) { + s.skipRequired = skipRequired +} +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetTraceWriter(io.Writer) {} +func (s *Scanner) SetPerResultTracingEnabled(bool) {} +func (s *Scanner) SetDataDirs(...string) {} +func (s *Scanner) SetPolicyNamespaces(...string) {} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) error { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return nil + } + regoScanner := rego.NewScanner(types.SourceCloud, s.scannerOptions...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return err + } + s.regoScanner = regoScanner + return nil +} + +func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (scan.Results, error) { + p := parser.New(fs, s.parserOptions...) + deployments, err := p.ParseFS(ctx, dir) + if err != nil { + return nil, err + } + if err := s.initRegoScanner(fs); err != nil { + return nil, err + } + + return s.scanDeployments(ctx, deployments, fs) +} + +func (s *Scanner) scanDeployments(ctx context.Context, deployments []azure.Deployment, f fs.FS) (scan.Results, error) { + + var results scan.Results + + for _, deployment := range deployments { + + result, err := s.scanDeployment(ctx, deployment, f) + if err != nil { + return nil, err + } + results = append(results, result...) + } + + return results, nil +} + +func (s *Scanner) scanDeployment(ctx context.Context, deployment azure.Deployment, fs fs.FS) (scan.Results, error) { + var results scan.Results + deploymentState := s.adaptDeployment(ctx, deployment) + if !s.regoOnly { + for _, rule := range rules.GetRegistered(s.frameworks...) { + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + if rule.GetRule().RegoPackage != "" { + continue + } + ruleResults := rule.Evaluate(deploymentState) + s.debug.Log("Found %d results for %s", len(ruleResults), rule.GetRule().AVDID) + if len(ruleResults) > 0 { + results = append(results, ruleResults...) + } + } + } + + regoResults, err := s.regoScanner.ScanInput(ctx, rego.Input{ + Path: deployment.Metadata.Range().GetFilename(), + FS: fs, + Contents: deploymentState.ToRego(), + }) + if err != nil { + return nil, fmt.Errorf("rego scan error: %w", err) + } + + return append(results, regoResults...), nil +} + +func (s *Scanner) adaptDeployment(ctx context.Context, deployment azure.Deployment) *state.State { + return arm.Adapt(ctx, deployment) +} diff --git a/pkg/scanners/azure/deployment.go b/pkg/scanners/azure/deployment.go new file mode 100644 index 000000000000..6df8b48d6b6a --- /dev/null +++ b/pkg/scanners/azure/deployment.go @@ -0,0 +1,179 @@ +package azure + +import ( + "os" + + "github.com/aquasecurity/defsec/pkg/types" +) + +type Deployment struct { + Metadata types.Metadata + TargetScope Scope + Parameters []Parameter + Variables []Variable + Resources []Resource + Outputs []Output +} + +type Parameter struct { + Variable + Default Value + Decorators []Decorator +} + +type Variable struct { + Name string + Value Value +} + +type Output Variable + +type Resource struct { + Metadata types.Metadata + APIVersion Value + Type Value + Kind Value + Name Value + Location Value + Tags Value + Sku Value + Properties Value + Resources []Resource +} + +type PropertyBag struct { + Metadata types.Metadata + Data map[string]Value +} + +type Decorator struct { + Name string + Args []Value +} + +type Scope string + +const ( + ScopeResourceGroup Scope = "resourceGroup" +) + +func (d *Deployment) GetResourcesByType(t string) []Resource { + var resources []Resource + for _, r := range d.Resources { + if r.Type.AsString() == t { + resources = append(resources, r) + } + } + return resources +} + +func (r *Resource) GetResourcesByType(t string) []Resource { + var resources []Resource + for _, res := range r.Resources { + if res.Type.AsString() == t { + resources = append(resources, res) + } + } + return resources +} + +func (d *Deployment) GetParameter(parameterName string) interface{} { + + for _, parameter := range d.Parameters { + if parameter.Name == parameterName { + return parameter.Value.Raw() + } + } + return nil +} + +func (d *Deployment) GetVariable(variableName string) interface{} { + + for _, variable := range d.Variables { + if variable.Name == variableName { + return variable.Value.Raw() + } + } + return nil +} + +func (d *Deployment) GetEnvVariable(envVariableName string) interface{} { + + if envVariable, exists := os.LookupEnv(envVariableName); exists { + return envVariable + } + return nil +} + +func (d *Deployment) GetOutput(outputName string) interface{} { + + for _, output := range d.Outputs { + if output.Name == outputName { + return output.Value.Raw() + } + } + return nil +} + +func (d *Deployment) GetDeployment() interface{} { + + type template struct { + Schema string `json:"$schema"` + ContentVersion string `json:"contentVersion"` + Parameters map[string]interface{} `json:"parameters"` + Variables map[string]interface{} `json:"variables"` + Resources []interface{} `json:"resources"` + Outputs map[string]interface{} `json:"outputs"` + } + + type templateLink struct { + URI string `json:"uri"` + } + + type properties struct { + TemplateLink templateLink `json:"templateLink"` + Template template `json:"template"` + TemplateHash string `json:"templateHash"` + Parameters map[string]interface{} `json:"parameters"` + Mode string `json:"mode"` + ProvisioningState string `json:"provisioningState"` + } + + deploymentShell := struct { + Name string `json:"name"` + Properties properties `json:"properties"` + }{ + Name: "Placeholder Deployment", + Properties: properties{ + TemplateLink: templateLink{ + URI: "https://placeholder.com", + }, + Template: template{ + Schema: "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + ContentVersion: "", + Parameters: make(map[string]interface{}), + Variables: make(map[string]interface{}), + Resources: make([]interface{}, 0), + Outputs: make(map[string]interface{}), + }, + }, + } + + for _, parameter := range d.Parameters { + deploymentShell.Properties.Template.Parameters[parameter.Name] = parameter.Value.Raw() + } + + for _, variable := range d.Variables { + deploymentShell.Properties.Template.Variables[variable.Name] = variable.Value.Raw() + } + + for _, resource := range d.Resources { + deploymentShell.Properties.Template.Resources = append(deploymentShell.Properties.Template.Resources, resource) + } + + for _, output := range d.Outputs { + deploymentShell.Properties.Template.Outputs[output.Name] = output.Value.Raw() + } + + return deploymentShell +} diff --git a/pkg/scanners/azure/expressions/lex.go b/pkg/scanners/azure/expressions/lex.go new file mode 100644 index 000000000000..09eb7b819eff --- /dev/null +++ b/pkg/scanners/azure/expressions/lex.go @@ -0,0 +1,203 @@ +package expressions + +import ( + "bufio" + "fmt" + "strconv" + "strings" +) + +type TokenType uint16 + +const ( + TokenName TokenType = iota + TokenOpenParen + TokenCloseParen + TokenComma + TokenDot + TokenLiteralString + TokenLiteralInteger + TokenLiteralFloat + TokenNewLine +) + +type Token struct { + Type TokenType + Data interface{} +} + +type lexer struct { + reader *bufio.Reader +} + +func lex(expression string) ([]Token, error) { + lexer := &lexer{ + reader: bufio.NewReader(strings.NewReader(expression)), + } + return lexer.Lex() +} + +func (l *lexer) unread() { + _ = l.reader.UnreadRune() +} + +func (l *lexer) read() (rune, error) { + r, _, err := l.reader.ReadRune() + return r, err +} + +func (l *lexer) Lex() ([]Token, error) { + var tokens []Token + + for { + r, err := l.read() + if err != nil { + break + } + + switch r { + case ' ', '\t', '\r': + continue + case '\n': + tokens = append(tokens, Token{Type: TokenNewLine}) + case '(': + tokens = append(tokens, Token{Type: TokenOpenParen}) + case ')': + tokens = append(tokens, Token{Type: TokenCloseParen}) + case ',': + tokens = append(tokens, Token{Type: TokenComma}) + case '.': + tokens = append(tokens, Token{Type: TokenDot}) + case '"', '\'': + token, err := l.lexString(r) + if err != nil { + return nil, fmt.Errorf("string parse error: %w", err) + } + tokens = append(tokens, token) + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + l.unread() + token, err := l.lexNumber() + if err != nil { + return nil, fmt.Errorf("number parse error: %w", err) + } + tokens = append(tokens, token) + default: + l.unread() + tokens = append(tokens, l.lexKeyword()) + } + } + + return tokens, nil +} + +func (l *lexer) lexString(terminator rune) (Token, error) { + var sb strings.Builder + for { + r, err := l.read() + if err != nil { + break + } + if r == '\\' { + r, err := l.readEscapedChar() + if err != nil { + return Token{}, fmt.Errorf("bad escape: %w", err) + } + sb.WriteRune(r) + continue + } + if r == terminator { + break + } + sb.WriteRune(r) + } + return Token{ + Type: TokenLiteralString, + Data: sb.String(), + }, nil +} + +func (l *lexer) readEscapedChar() (rune, error) { + r, err := l.read() + if err != nil { + return 0, fmt.Errorf("unexpected EOF") + } + switch r { + case 'n': + return '\n', nil + case 'r': + return '\r', nil + case 't': + return '\t', nil + case '"', '\'': + return r, nil + default: + return 0, fmt.Errorf("'%c' is not a supported escape sequence", r) + } +} + +func (l *lexer) lexNumber() (Token, error) { + + var sb strings.Builder + var decimal bool + +LOOP: + for { + r, err := l.read() + if err != nil { + break + } + switch r { + case '.': + decimal = true + sb.WriteRune('.') + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + sb.WriteRune(r) + default: + l.unread() + break LOOP + } + } + + raw := sb.String() + if decimal { + fl, err := strconv.ParseFloat(raw, 64) + if err != nil { + return Token{}, err + } + return Token{ + Type: TokenLiteralFloat, + Data: fl, + }, nil + } + + i, err := strconv.ParseInt(raw, 10, 64) + if err != nil { + return Token{}, err + } + return Token{ + Type: TokenLiteralInteger, + Data: i, + }, nil +} + +func (l *lexer) lexKeyword() Token { + var sb strings.Builder +LOOP: + for { + r, err := l.read() + if err != nil { + break + } + switch { + case r >= 'a' && r <= 'z', r >= 'A' && r <= 'Z', r >= '0' && r <= '9', r == '_': + sb.WriteRune(r) + default: + l.unread() + break LOOP + } + } + return Token{ + Type: TokenName, + Data: sb.String(), + } +} diff --git a/pkg/scanners/azure/expressions/node.go b/pkg/scanners/azure/expressions/node.go new file mode 100644 index 000000000000..3257e127033d --- /dev/null +++ b/pkg/scanners/azure/expressions/node.go @@ -0,0 +1,75 @@ +package expressions + +import ( + "github.com/aquasecurity/trivy/pkg/scanners/azure/functions" +) + +type Node interface { + Evaluate(deploymentProvider functions.DeploymentData) interface{} +} + +type expressionValue struct { + val interface{} +} + +func (e expressionValue) Evaluate(deploymentProvider functions.DeploymentData) interface{} { + if f, ok := e.val.(expression); ok { + return f.Evaluate(deploymentProvider) + } + return e.val +} + +type expression struct { + name string + args []Node +} + +func (f expression) Evaluate(deploymentProvider functions.DeploymentData) interface{} { + args := make([]interface{}, len(f.args)) + for i, arg := range f.args { + args[i] = arg.Evaluate(deploymentProvider) + } + + return functions.Evaluate(deploymentProvider, f.name, args...) +} + +func NewExpressionTree(code string) (Node, error) { + tokens, err := lex(code) + if err != nil { + return nil, err + } + + // create a walker for the nodes + tw := newTokenWalker(tokens) + + // generate the root function + return newFunctionNode(tw), nil +} + +func newFunctionNode(tw *tokenWalker) Node { + funcNode := &expression{ + name: tw.pop().Data.(string), + } + + for tw.hasNext() { + token := tw.pop() + if token == nil { + break + } + + switch token.Type { + case TokenCloseParen: + return funcNode + case TokenName: + if tw.peek().Type == TokenOpenParen { + // this is a function, unwind 1 + tw.unPop() + funcNode.args = append(funcNode.args, newFunctionNode(tw)) + } + case TokenLiteralString, TokenLiteralInteger, TokenLiteralFloat: + funcNode.args = append(funcNode.args, expressionValue{token.Data}) + } + + } + return funcNode +} diff --git a/pkg/scanners/azure/expressions/token_walker.go b/pkg/scanners/azure/expressions/token_walker.go new file mode 100644 index 000000000000..d07a238d1bd9 --- /dev/null +++ b/pkg/scanners/azure/expressions/token_walker.go @@ -0,0 +1,40 @@ +package expressions + +type tokenWalker struct { + tokens []Token + currentPosition int +} + +func newTokenWalker(tokens []Token) *tokenWalker { + return &tokenWalker{ + tokens: tokens, + currentPosition: 0, + } +} + +func (t *tokenWalker) peek() Token { + if t.currentPosition >= len(t.tokens) { + return Token{} + } + return t.tokens[t.currentPosition] +} + +func (t *tokenWalker) hasNext() bool { + return t.currentPosition+1 < len(t.tokens) +} + +func (t *tokenWalker) unPop() { + if t.currentPosition > 0 { + t.currentPosition-- + } +} + +func (t *tokenWalker) pop() *Token { + if !t.hasNext() { + return nil + } + + token := t.tokens[t.currentPosition] + t.currentPosition++ + return &token +} diff --git a/pkg/scanners/azure/functions/add.go b/pkg/scanners/azure/functions/add.go new file mode 100644 index 000000000000..9eb699e2eb9b --- /dev/null +++ b/pkg/scanners/azure/functions/add.go @@ -0,0 +1,15 @@ +package functions + +func Add(args ...interface{}) interface{} { + + if len(args) != 2 { + return nil + } + + if a, ok := args[0].(int); ok { + if b, ok := args[1].(int); ok { + return a + b + } + } + return nil +} diff --git a/pkg/scanners/azure/functions/add_test.go b/pkg/scanners/azure/functions/add_test.go new file mode 100644 index 000000000000..b88e9b8ee1cc --- /dev/null +++ b/pkg/scanners/azure/functions/add_test.go @@ -0,0 +1,38 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Add(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "Add with 1 and 2", + args: []interface{}{1, 2}, + expected: 3, + }, + { + name: "Add with 2 and 3", + args: []interface{}{2, 3}, + expected: 5, + }, + { + name: "Add with 3 and -4", + args: []interface{}{3, -4}, + expected: -1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Add(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/scanners/azure/functions/and.go b/pkg/scanners/azure/functions/and.go new file mode 100644 index 000000000000..67070b5c2cb0 --- /dev/null +++ b/pkg/scanners/azure/functions/and.go @@ -0,0 +1,27 @@ +package functions + +func And(args ...interface{}) interface{} { + + if len(args) <= 1 { + return false + } + + arg0, ok := args[0].(bool) + if !ok { + return false + } + + benchmark := arg0 + + for _, arg := range args[1:] { + arg1, ok := arg.(bool) + if !ok { + return false + } + if benchmark != arg1 { + return false + } + + } + return true +} diff --git a/pkg/scanners/azure/functions/and_test.go b/pkg/scanners/azure/functions/and_test.go new file mode 100644 index 000000000000..6814e9288ca0 --- /dev/null +++ b/pkg/scanners/azure/functions/and_test.go @@ -0,0 +1,39 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_And(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "And with same 2 bools", + args: []interface{}{true, true}, + expected: true, + }, + { + name: "And with same 3 bools", + args: []interface{}{true, true, true}, + expected: true, + }, + { + name: "And with different 4 bools", + args: []interface{}{true, true, false, true}, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := And(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/scanners/azure/functions/array.go b/pkg/scanners/azure/functions/array.go new file mode 100644 index 000000000000..a1da05ef4fdc --- /dev/null +++ b/pkg/scanners/azure/functions/array.go @@ -0,0 +1,29 @@ +package functions + +func Array(args ...interface{}) interface{} { + + if len(args) != 1 { + return "" + } + + switch ctype := args[0].(type) { + case int: + return []int{ctype} + case string: + return []string{ctype} + case map[string]interface{}: + var result []interface{} + for k, v := range ctype { + result = append(result, k, v) + } + return result + case interface{}: + switch ctype := ctype.(type) { + case []string: + return ctype + case []interface{}: + return ctype + } + } + return []interface{}{} +} diff --git a/pkg/scanners/azure/functions/array_test.go b/pkg/scanners/azure/functions/array_test.go new file mode 100644 index 000000000000..c4a376ea6080 --- /dev/null +++ b/pkg/scanners/azure/functions/array_test.go @@ -0,0 +1,44 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Array(t *testing.T) { + test := []struct { + name string + input []interface{} + expected interface{} + }{ + { + name: "array from an int", + input: []interface{}{1}, + expected: []int{1}, + }, + { + name: "array from a string", + input: []interface{}{"hello"}, + expected: []string{"hello"}, + }, + { + name: "array from a map", + input: []interface{}{map[string]interface{}{"hello": "world"}}, + expected: []interface{}{"hello", "world"}, + }, + { + name: "array from an slice", + input: []interface{}{ + []string{"hello", "world"}, + }, + expected: []string{"hello", "world"}, + }, + } + for _, tt := range test { + t.Run(tt.name, func(t *testing.T) { + actual := Array(tt.input...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/base64.go b/pkg/scanners/azure/functions/base64.go new file mode 100644 index 000000000000..c3222e7675ec --- /dev/null +++ b/pkg/scanners/azure/functions/base64.go @@ -0,0 +1,52 @@ +package functions + +import ( + "encoding/base64" + "encoding/json" +) + +func Base64(args ...interface{}) interface{} { + + if len(args) == 0 { + return nil + } + + input := args[0].(string) + + return base64.StdEncoding.EncodeToString([]byte(input)) +} + +func Base64ToString(args ...interface{}) interface{} { + if len(args) == 0 { + return nil + } + + input := args[0].(string) + + result, err := base64.StdEncoding.DecodeString(input) + if err != nil { + return "" + } + return string(result) +} + +func Base64ToJson(args ...interface{}) interface{} { + + if len(args) == 0 { + return nil + } + + input := args[0].(string) + + decoded, err := base64.StdEncoding.DecodeString(input) + if err != nil { + return nil + } + + var result map[string]interface{} + + if err := json.Unmarshal(decoded, &result); err != nil { + return nil + } + return result +} diff --git a/pkg/scanners/azure/functions/base64_test.go b/pkg/scanners/azure/functions/base64_test.go new file mode 100644 index 000000000000..f557b277930c --- /dev/null +++ b/pkg/scanners/azure/functions/base64_test.go @@ -0,0 +1,85 @@ +package functions + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Base64Call(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "simple base64 call", + args: []interface{}{ + "hello, world", + }, + expected: "aGVsbG8sIHdvcmxk", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Base64(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} + +func Test_Base64ToStringCall(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "simple base64ToString call", + args: []interface{}{ + "aGVsbG8sIHdvcmxk", + }, + expected: "hello, world", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Base64ToString(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} + +func Test_Base64ToJsonCall(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "simple base64ToJson call", + args: []interface{}{ + "eyJoZWxsbyI6ICJ3b3JsZCJ9", + }, + expected: `{"hello":"world"}`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Base64ToJson(tt.args...) + + actualContent, err := json.Marshal(actual) + require.NoError(t, err) + + assert.Equal(t, tt.expected, string(actualContent)) + }) + } +} diff --git a/pkg/scanners/azure/functions/bool.go b/pkg/scanners/azure/functions/bool.go new file mode 100644 index 000000000000..0221a5a4b8ee --- /dev/null +++ b/pkg/scanners/azure/functions/bool.go @@ -0,0 +1,20 @@ +package functions + +import "strings" + +func Bool(args ...interface{}) interface{} { + if len(args) != 1 { + return false + } + + switch input := args[0].(type) { + case bool: + return input + case string: + input = strings.ToLower(input) + return input == "true" || input == "1" || input == "yes" || input == "on" + case int: + return input == 1 + } + return false +} diff --git a/pkg/scanners/azure/functions/bool_test.go b/pkg/scanners/azure/functions/bool_test.go new file mode 100644 index 000000000000..6c520a9380f8 --- /dev/null +++ b/pkg/scanners/azure/functions/bool_test.go @@ -0,0 +1,63 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Bool(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "Bool with true", + args: []interface{}{true}, + expected: true, + }, + { + name: "Bool with false", + args: []interface{}{false}, + expected: false, + }, + { + name: "Bool with 1", + args: []interface{}{1}, + expected: true, + }, + { + name: "Bool with 0", + args: []interface{}{0}, + expected: false, + }, + { + name: "Bool with true string", + args: []interface{}{"true"}, + expected: true, + }, + { + name: "Bool with false string", + args: []interface{}{"false"}, + expected: false, + }, + { + name: "Bool with 1 string", + args: []interface{}{"1"}, + expected: true, + }, + { + name: "Bool with 0 string", + args: []interface{}{"0"}, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Bool(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/scanners/azure/functions/casing.go b/pkg/scanners/azure/functions/casing.go new file mode 100644 index 000000000000..56a93bbd7a4b --- /dev/null +++ b/pkg/scanners/azure/functions/casing.go @@ -0,0 +1,29 @@ +package functions + +import "strings" + +func ToLower(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + return strings.ToLower(input) +} + +func ToUpper(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + return strings.ToUpper(input) +} diff --git a/pkg/scanners/azure/functions/casing_test.go b/pkg/scanners/azure/functions/casing_test.go new file mode 100644 index 000000000000..51c970e1765e --- /dev/null +++ b/pkg/scanners/azure/functions/casing_test.go @@ -0,0 +1,71 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_ToLower(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "lowercase a string", + args: []interface{}{ + "HELLO", + }, + expected: "hello", + }, + { + name: "lowercase a string with a non-string input", + args: []interface{}{ + 10, + }, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := ToLower(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} + +func Test_ToUpper(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "uppercase a string", + args: []interface{}{ + "hello", + }, + expected: "HELLO", + }, + { + name: "uppercase a string with a non-string input", + args: []interface{}{ + 10, + }, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := ToUpper(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/scanners/azure/functions/coalesce.go b/pkg/scanners/azure/functions/coalesce.go new file mode 100644 index 000000000000..b7ec261450f7 --- /dev/null +++ b/pkg/scanners/azure/functions/coalesce.go @@ -0,0 +1,10 @@ +package functions + +func Coalesce(args ...interface{}) interface{} { + for _, arg := range args { + if arg != nil { + return arg + } + } + return nil +} diff --git a/pkg/scanners/azure/functions/coalesce_test.go b/pkg/scanners/azure/functions/coalesce_test.go new file mode 100644 index 000000000000..361914df64cd --- /dev/null +++ b/pkg/scanners/azure/functions/coalesce_test.go @@ -0,0 +1,56 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Coalesce(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "coalesce with nil", + args: []interface{}{ + nil, + }, + expected: nil, + }, + { + name: "coalesce with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: "test", + }, + { + name: "coalesce with nil and string and int", + args: []interface{}{ + nil, + "test", + 1, + }, + expected: "test", + }, + { + name: "coalesce with nil and nil and array", + args: []interface{}{ + nil, + nil, + []interface{}{"a", "b", "c"}, + }, + expected: []interface{}{"a", "b", "c"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Coalesce(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/concat.go b/pkg/scanners/azure/functions/concat.go new file mode 100644 index 000000000000..800db04be77d --- /dev/null +++ b/pkg/scanners/azure/functions/concat.go @@ -0,0 +1,28 @@ +package functions + +import ( + "fmt" +) + +func Concat(args ...interface{}) interface{} { + + switch args[0].(type) { + case string: + var result string + for _, arg := range args { + result += fmt.Sprintf("%v", arg) + } + return result + case interface{}: + var result []interface{} + for _, arg := range args { + argArr, ok := arg.([]interface{}) + if !ok { + continue + } + result = append(result, argArr...) + } + return result + } + return "" +} diff --git a/pkg/scanners/azure/functions/concat_test.go b/pkg/scanners/azure/functions/concat_test.go new file mode 100644 index 000000000000..7b0c461c960d --- /dev/null +++ b/pkg/scanners/azure/functions/concat_test.go @@ -0,0 +1,94 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_StringConcatenation(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "simple string concatenation", + args: []interface{}{ + "hello", + ", ", + "world", + "!", + }, + expected: "hello, world!", + }, + { + name: "string concatenation with non strings", + args: []interface{}{ + "pi to 3 decimal places is ", + 3.142, + }, + expected: "pi to 3 decimal places is 3.142", + }, + { + name: "string concatenation with multiple primitives", + args: []interface{}{ + "to say that ", + 3, + " is greater than ", + 5, + " would be ", + false, + }, + expected: "to say that 3 is greater than 5 would be false", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + concatenated := Concat(tt.args...) + require.Equal(t, tt.expected, concatenated) + }) + } +} + +func Test_ArrayConcatenation(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected []interface{} + }{ + { + name: "simple array concatenation", + args: []interface{}{ + []interface{}{1, 2, 3}, + []interface{}{4, 5, 6}, + }, + expected: []interface{}{1, 2, 3, 4, 5, 6}, + }, + { + name: "array concatenation with non arrays", + args: []interface{}{ + []interface{}{1, 2, 3}, + 4, + }, + expected: []interface{}{1, 2, 3}, + }, + { + name: "array concatenation with multiple primitives", + args: []interface{}{ + []interface{}{1, 2, 3}, + 4, + []interface{}{5, 6, 7}, + }, + expected: []interface{}{1, 2, 3, 5, 6, 7}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + concatenated := Concat(tt.args...) + require.Equal(t, tt.expected, concatenated) + }) + } +} diff --git a/pkg/scanners/azure/functions/contains.go b/pkg/scanners/azure/functions/contains.go new file mode 100644 index 000000000000..a067d63dfa85 --- /dev/null +++ b/pkg/scanners/azure/functions/contains.go @@ -0,0 +1,40 @@ +package functions + +import ( + "fmt" + "strings" +) + +func Contains(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + container := args[0] + itemToFind := args[1] + + switch cType := container.(type) { + case string: + switch iType := itemToFind.(type) { + case string: + return strings.Contains(strings.ToLower(cType), strings.ToLower(iType)) + case int, int32, int64, uint, uint32, uint64: + return strings.Contains(strings.ToLower(cType), fmt.Sprintf("%d", iType)) + } + case []interface{}: + for _, item := range cType { + if item == itemToFind { + return true + } + } + case map[string]interface{}: + for key := range cType { + if key == itemToFind { + return true + } + } + } + + return false +} diff --git a/pkg/scanners/azure/functions/contains_test.go b/pkg/scanners/azure/functions/contains_test.go new file mode 100644 index 000000000000..e92f08fd5462 --- /dev/null +++ b/pkg/scanners/azure/functions/contains_test.go @@ -0,0 +1,95 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Contains(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "simple true string contains", + args: []interface{}{ + "hello, world", + "hell", + }, + expected: true, + }, + { + name: "simple false string contains", + args: []interface{}{ + "hello, world", + "help", + }, + expected: false, + }, + { + name: "simple true string contains with case sensitivity", + args: []interface{}{ + "hello, world", + "HELL", + }, + expected: true, + }, + { + name: "simple true string contains with number", + args: []interface{}{ + "You're my number 1", + 1, + }, + expected: true, + }, + { + name: "true object contains key", + args: []interface{}{ + map[string]interface{}{ + "hello": "world", + }, + "hello", + }, + expected: true, + }, + { + name: "false object contains key", + args: []interface{}{ + map[string]interface{}{ + "hello": "world", + }, + "world", + }, + expected: false, + }, + { + name: "true array contains value", + args: []interface{}{ + []interface{}{ + "hello", "world", + }, + "hello", + }, + expected: true, + }, + { + name: "false array contains value", + args: []interface{}{ + []interface{}{ + "hello", "world", + }, + "help", + }, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + doesContain := Contains(tt.args...) + require.Equal(t, tt.expected, doesContain) + }) + } +} diff --git a/pkg/scanners/azure/functions/copy_index.go b/pkg/scanners/azure/functions/copy_index.go new file mode 100644 index 000000000000..aee090e79466 --- /dev/null +++ b/pkg/scanners/azure/functions/copy_index.go @@ -0,0 +1,25 @@ +package functions + +var loopCounter = map[string]int{} + +func CopyIndex(args ...interface{}) interface{} { + loopName := "default" + offset := 1 + if len(args) > 0 { + if providedLoopName, ok := args[0].(string); ok { + loopName = providedLoopName + } + } + if len(args) > 1 { + if providedOffset, ok := args[1].(int); ok { + offset = providedOffset + } + } + + if _, ok := loopCounter[loopName]; !ok { + loopCounter[loopName] = 0 + } + + loopCounter[loopName] += offset + return loopCounter[loopName] +} diff --git a/pkg/scanners/azure/functions/copy_index_test.go b/pkg/scanners/azure/functions/copy_index_test.go new file mode 100644 index 000000000000..041b258ca8cf --- /dev/null +++ b/pkg/scanners/azure/functions/copy_index_test.go @@ -0,0 +1,52 @@ +package functions + +import "testing" + +func Test_CopyIndex(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "CopyIndex with 1", + args: []interface{}{}, + expected: 1, + }, + { + name: "CopyIndex with 2", + args: []interface{}{}, + expected: 2, + }, + { + name: "CopyIndex with 3", + args: []interface{}{}, + expected: 3, + }, + { + name: "CopyIndex with loopName", + args: []interface{}{"loop1"}, + expected: 1, + }, + { + name: "CopyIndex with same lo" + + "opName", + args: []interface{}{"loop1"}, + expected: 2, + }, + { + name: "CopyIndex with loopName", + args: []interface{}{"loop2", 10}, + expected: 10, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := CopyIndex(tt.args...) + if got != tt.expected { + t.Errorf("CopyIndex() = %v, want %v", got, tt.expected) + } + }) + } +} diff --git a/pkg/scanners/azure/functions/create_array.go b/pkg/scanners/azure/functions/create_array.go new file mode 100644 index 000000000000..99f3558847a1 --- /dev/null +++ b/pkg/scanners/azure/functions/create_array.go @@ -0,0 +1,11 @@ +package functions + +func CreateArray(args ...interface{}) interface{} { + var result []interface{} + if len(args) == 0 { + return result + } + + result = append(result, args...) + return result +} diff --git a/pkg/scanners/azure/functions/create_array_test.go b/pkg/scanners/azure/functions/create_array_test.go new file mode 100644 index 000000000000..5e63074888cb --- /dev/null +++ b/pkg/scanners/azure/functions/create_array_test.go @@ -0,0 +1,68 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_CreateArray(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "create array with strings", + args: []interface{}{ + "Hello", + "World", + }, + expected: []interface{}{"Hello", "World"}, + }, + { + name: "create array with ints", + + args: []interface{}{ + 1, 2, 3, + }, + expected: []interface{}{1, 2, 3}, + }, + { + name: "create array with arrays", + args: []interface{}{ + []interface{}{1, 2, 3}, + []interface{}{4, 5, 6}, + }, + expected: []interface{}{[]interface{}{1, 2, 3}, []interface{}{4, 5, 6}}, + }, + { + name: "create arrau with maps", + args: []interface{}{ + map[string]interface{}{ + "one": "a", + }, + map[string]interface{}{ + "two": "b", + }, + }, + expected: []interface{}{ + map[string]interface{}{ + "one": "a", + }, + map[string]interface{}{ + "two": "b", + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := CreateArray(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/scanners/azure/functions/create_object.go b/pkg/scanners/azure/functions/create_object.go new file mode 100644 index 000000000000..b9fe2d29f6d2 --- /dev/null +++ b/pkg/scanners/azure/functions/create_object.go @@ -0,0 +1,21 @@ +package functions + +func CreateObject(args ...interface{}) interface{} { + obj := map[string]interface{}{} + if len(args) == 0 { + return obj + } + + // if there aren't even pairs then return an empty object + if len(args)%2 != 0 { + return obj + } + + for i := 0; i < len(args); i += 2 { + key := args[i].(string) + value := args[i+1] + obj[key] = value + } + + return obj +} diff --git a/pkg/scanners/azure/functions/create_object_test.go b/pkg/scanners/azure/functions/create_object_test.go new file mode 100644 index 000000000000..f695e38410fe --- /dev/null +++ b/pkg/scanners/azure/functions/create_object_test.go @@ -0,0 +1,60 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_CreateObject(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "CreateObject with no args", + args: []interface{}{}, + expected: map[string]interface{}{}, + }, + { + name: "CreateObject with one arg", + args: []interface{}{"foo", "bar"}, + expected: map[string]interface{}{"foo": "bar"}, + }, + { + name: "CreateObject with two args", + args: []interface{}{"foo", "bar", "baz", "qux"}, + expected: map[string]interface{}{"foo": "bar", "baz": "qux"}, + }, + { + name: "CreateObject with three args", + args: []interface{}{"foo", "bar", "baz", 1, "quux", true}, + expected: map[string]interface{}{"foo": "bar", "baz": 1, "quux": true}, + }, + { + name: "CreateObject with odd number of args", + args: []interface{}{"foo", "bar", "baz"}, + expected: map[string]interface{}{}, + }, + { + name: "CreateObject with odd number of args", + args: []interface{}{"foo", "bar", "baz", []string{"Hello", "World"}}, + expected: map[string]interface{}{ + "foo": "bar", + "baz": []string{ + "Hello", "World", + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := CreateObject(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } + +} diff --git a/pkg/scanners/azure/functions/data_uri.go b/pkg/scanners/azure/functions/data_uri.go new file mode 100644 index 000000000000..50f0835ee6ad --- /dev/null +++ b/pkg/scanners/azure/functions/data_uri.go @@ -0,0 +1,36 @@ +package functions + +import ( + "fmt" + "strings" +) + +func DataUri(args ...interface{}) interface{} { + if len(args) == 0 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + return fmt.Sprintf("data:text/plain;charset=utf8;base64,%s", Base64(input)) +} + +func DataUriToString(args ...interface{}) interface{} { + if len(args) == 0 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + parts := strings.Split(input, "base64,") + if len(parts) != 2 { + return "" + } + + return Base64ToString(parts[1]) +} diff --git a/pkg/scanners/azure/functions/data_uri_test.go b/pkg/scanners/azure/functions/data_uri_test.go new file mode 100644 index 000000000000..04f92249e093 --- /dev/null +++ b/pkg/scanners/azure/functions/data_uri_test.go @@ -0,0 +1,53 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_data_uri_from_string(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "data uri from string", + args: []interface{}{ + "Hello", + }, + expected: "data:text/plain;charset=utf8;base64,SGVsbG8=", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dataUri := DataUri(tt.args...) + require.Equal(t, tt.expected, dataUri) + }) + } +} + +func Test_string_from_data_uri(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "data uri to string", + args: []interface{}{ + "data:;base64,SGVsbG8sIFdvcmxkIQ==", + }, + expected: "Hello, World!", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dataUri := DataUriToString(tt.args...) + require.Equal(t, tt.expected, dataUri) + }) + } +} diff --git a/pkg/scanners/azure/functions/date_time_add.go b/pkg/scanners/azure/functions/date_time_add.go new file mode 100644 index 000000000000..c3b902b08965 --- /dev/null +++ b/pkg/scanners/azure/functions/date_time_add.go @@ -0,0 +1,115 @@ +package functions + +import ( + "fmt" + "regexp" + "strconv" + "time" +) + +var pattern = regexp.MustCompile(`^P((?P\d+)Y)?((?P\d+)M)?((?P\d+)W)?((?P\d+)D)?(T((?P\d+)H)?((?P\d+)M)?((?P\d+)S)?)?$`) + +func DateTimeAdd(args ...interface{}) interface{} { + if len(args) < 2 { + return nil + } + + base, ok := args[0].(string) + if !ok { + return nil + } + + format := time.RFC3339 + if len(args) == 3 { + if providedFormat, ok := args[2].(string); ok { + format = convertFormat(providedFormat) + } + + } + + baseTime, err := time.Parse(format, base) + if err != nil { + return nil + } + + duration, err := parseISO8601(args[1].(string)) + if err != nil { + return nil + } + + timeDuration := duration.timeDuration() + baseTime = baseTime.Add(timeDuration) + + if ok { + return baseTime.Format(format) + } + + return baseTime.Format(time.RFC3339) +} + +type Iso8601Duration struct { + Y int + M int + W int + D int + // Time Component + TH int + TM int + TS int +} + +func parseISO8601(from string) (Iso8601Duration, error) { + var match []string + var d Iso8601Duration + + if pattern.MatchString(from) { + match = pattern.FindStringSubmatch(from) + } else { + return d, fmt.Errorf("could not parse duration string") + } + + for i, name := range pattern.SubexpNames() { + part := match[i] + if i == 0 || name == "" || part == "" { + continue + } + + val, err := strconv.Atoi(part) + if err != nil { + return d, err + } + switch name { + case "year": + d.Y = val + case "month": + d.M = val + case "week": + d.W = val + case "day": + d.D = val + case "hour": + d.TH = val + case "minute": + d.TM = val + case "second": + d.TS = val + default: + return d, fmt.Errorf("unknown field %s", name) + } + } + + return d, nil +} + +func (d Iso8601Duration) timeDuration() time.Duration { + var dur time.Duration + dur += time.Duration(d.TH) * time.Hour + dur += time.Duration(d.TM) * time.Minute + dur += time.Duration(d.TS) * time.Second + dur += time.Duration(d.D) * 24 * time.Hour + dur += time.Duration(d.W) * 7 * 24 * time.Hour + dur += time.Duration(d.M) * 30 * 24 * time.Hour + dur += time.Duration(d.Y) * 365 * 24 * time.Hour + + return dur +} diff --git a/pkg/scanners/azure/functions/date_time_epoch.go b/pkg/scanners/azure/functions/date_time_epoch.go new file mode 100644 index 000000000000..9b1802573269 --- /dev/null +++ b/pkg/scanners/azure/functions/date_time_epoch.go @@ -0,0 +1,38 @@ +package functions + +import ( + "time" + + smithyTime "github.com/aws/smithy-go/time" +) + +func DateTimeFromEpoch(args ...interface{}) interface{} { + if len(args) != 1 { + return nil + } + + epoch, ok := args[0].(int) + if !ok { + return nil + } + + return smithyTime.ParseEpochSeconds(float64(epoch)).Format(time.RFC3339) +} + +func DateTimeToEpoch(args ...interface{}) interface{} { + if len(args) != 1 { + return nil + } + + dateTime, ok := args[0].(string) + if !ok { + return nil + } + + parsed, err := time.Parse(time.RFC3339, dateTime) + if err != nil { + return nil + } + + return int(parsed.Unix()) +} diff --git a/pkg/scanners/azure/functions/date_time_epoch_test.go b/pkg/scanners/azure/functions/date_time_epoch_test.go new file mode 100644 index 000000000000..6cdf7a0442bd --- /dev/null +++ b/pkg/scanners/azure/functions/date_time_epoch_test.go @@ -0,0 +1,51 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_DateTimeFromEpoch(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "datetime from epoch", + args: []interface{}{ + 1683040573, + }, + expected: "2023-05-02T15:16:13Z", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := DateTimeFromEpoch(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func Test_DateTimeToEpoch(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "datetime to epoch", + args: []interface{}{ + "2023-05-02T15:16:13Z", + }, + expected: 1683040573, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := DateTimeToEpoch(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/datetime_add_test.go b/pkg/scanners/azure/functions/datetime_add_test.go new file mode 100644 index 000000000000..b5c09d04a742 --- /dev/null +++ b/pkg/scanners/azure/functions/datetime_add_test.go @@ -0,0 +1,72 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_DateTimeAdd(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + + { + name: "datetime add 1 years", + args: []interface{}{ + "2010-01-01T00:00:00Z", + "P1Y", + }, + expected: "2011-01-01T00:00:00Z", + }, + { + name: "datetime add 3 months", + args: []interface{}{ + "2010-01-01T00:00:00Z", + "P3M", + }, + expected: "2010-04-01T00:00:00Z", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := DateTimeAdd(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func Test_ISO8601DurationParse(t *testing.T) { + tests := []struct { + name string + args string + expected Iso8601Duration + }{ + + { + name: "parse 1 year", + args: "P1Y", + expected: Iso8601Duration{Y: 1}, + }, + { + name: "parse 3 months", + args: "P3M", + expected: Iso8601Duration{M: 3}, + }, + { + name: "parse 12 hours", + args: "PT12H", + expected: Iso8601Duration{TH: 12}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual, err := parseISO8601(tt.args) + require.NoError(t, err) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/deployment.go b/pkg/scanners/azure/functions/deployment.go new file mode 100644 index 000000000000..afafb2b3587c --- /dev/null +++ b/pkg/scanners/azure/functions/deployment.go @@ -0,0 +1,75 @@ +package functions + +type DeploymentData interface { + GetParameter(name string) interface{} + GetVariable(variableName string) interface{} + GetEnvVariable(envVariableName string) interface{} +} + +func Deployment(deploymentProvider DeploymentData, args ...interface{}) interface{} { + + /* + + { + "name": "", + "properties": { + "templateLink": { + "uri": "" + }, + "template": { + "$schema": "", + "contentVersion": "", + "parameters": {}, + "variables": {}, + "resources": [], + "outputs": {} + }, + "templateHash": "", + "parameters": {}, + "mode": "", + "provisioningState": "" + } + } + + */ + + return nil +} + +func Environment(envProvider DeploymentData, args ...interface{}) interface{} { + if len(args) == 0 { + return nil + } + + envVarName, ok := args[0].(string) + if !ok { + return nil + } + return envProvider.GetEnvVariable(envVarName) +} + +func Variables(varProvider DeploymentData, args ...interface{}) interface{} { + if len(args) == 0 { + return nil + } + + varName, ok := args[0].(string) + if !ok { + return nil + } + return varProvider.GetVariable(varName) +} + +func Parameters(paramProvider DeploymentData, args ...interface{}) interface{} { + if len(args) == 0 { + return nil + } + + paramName, ok := args[0].(string) + if !ok { + return nil + } + + return paramProvider.GetParameter(paramName) + +} diff --git a/pkg/scanners/azure/functions/div.go b/pkg/scanners/azure/functions/div.go new file mode 100644 index 000000000000..9de0dfb05f73 --- /dev/null +++ b/pkg/scanners/azure/functions/div.go @@ -0,0 +1,15 @@ +package functions + +func Div(args ...interface{}) interface{} { + + if len(args) != 2 { + return nil + } + + if a, ok := args[0].(int); ok { + if b, ok := args[1].(int); ok { + return a / b + } + } + return nil +} diff --git a/pkg/scanners/azure/functions/div_test.go b/pkg/scanners/azure/functions/div_test.go new file mode 100644 index 000000000000..49166190fb5d --- /dev/null +++ b/pkg/scanners/azure/functions/div_test.go @@ -0,0 +1,38 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Div(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "Div 2 by 1", + args: []interface{}{2, 1}, + expected: 2, + }, + { + name: "Div 4 by 2", + args: []interface{}{4, 2}, + expected: 2, + }, + { + name: "Div 6 by 2", + args: []interface{}{6, 2}, + expected: 3, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Div(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/scanners/azure/functions/empty.go b/pkg/scanners/azure/functions/empty.go new file mode 100644 index 000000000000..1dbe8396f7c3 --- /dev/null +++ b/pkg/scanners/azure/functions/empty.go @@ -0,0 +1,33 @@ +package functions + +func Empty(args ...interface{}) interface{} { + + if len(args) != 1 { + return false + } + + container := args[0] + + switch cType := container.(type) { + case string: + return cType == "" + case map[string]interface{}: + return len(cType) == 0 + case interface{}: + switch iType := cType.(type) { + case []string: + return len(iType) == 0 + case []bool: + return len(iType) == 0 + case []int: + return len(iType) == 0 + case []float64: + return len(iType) == 0 + case map[string]interface{}: + return len(iType) == 0 + } + + } + + return false +} diff --git a/pkg/scanners/azure/functions/empty_test.go b/pkg/scanners/azure/functions/empty_test.go new file mode 100644 index 000000000000..a21fb96cd8cd --- /dev/null +++ b/pkg/scanners/azure/functions/empty_test.go @@ -0,0 +1,68 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Empty(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "string is empty", + args: []interface{}{ + "", + }, + expected: true, + }, + { + name: "string is not empty", + args: []interface{}{ + "hello, world", + }, + expected: false, + }, + { + name: "array is empty", + args: []interface{}{ + []string{}, + }, + expected: true, + }, + { + name: "array is not empty", + args: []interface{}{ + []string{"Hello", "World"}, + }, + expected: false, + }, + { + name: "map is empty", + args: []interface{}{ + map[string]interface{}{}, + }, + expected: true, + }, + { + name: "map is not empty", + args: []interface{}{ + map[string]interface{}{ + "hello": "world", + }, + "world", + }, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + doesContain := Empty(tt.args...) + require.Equal(t, tt.expected, doesContain) + }) + } +} diff --git a/pkg/scanners/azure/functions/ends_with.go b/pkg/scanners/azure/functions/ends_with.go new file mode 100644 index 000000000000..2bcd66217ecb --- /dev/null +++ b/pkg/scanners/azure/functions/ends_with.go @@ -0,0 +1,22 @@ +package functions + +import "strings" + +func EndsWith(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + stringToSearch, ok := args[0].(string) + if !ok { + return false + } + + stringToFind, ok := args[1].(string) + if !ok { + return false + } + + return strings.HasSuffix(stringToSearch, stringToFind) +} diff --git a/pkg/scanners/azure/functions/ends_with_test.go b/pkg/scanners/azure/functions/ends_with_test.go new file mode 100644 index 000000000000..b1d1900ba0d2 --- /dev/null +++ b/pkg/scanners/azure/functions/ends_with_test.go @@ -0,0 +1,41 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_EndsWith(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "string ends with", + args: []interface{}{ + "Hello world!", + "world!", + }, + expected: true, + }, + { + name: "string does not end with", + args: []interface{}{ + "Hello world!", + "world", + }, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := EndsWith(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/scanners/azure/functions/equals.go b/pkg/scanners/azure/functions/equals.go new file mode 100644 index 000000000000..ca5174144cb8 --- /dev/null +++ b/pkg/scanners/azure/functions/equals.go @@ -0,0 +1,25 @@ +package functions + +func Equals(args ...interface{}) interface{} { + if len(args) != 2 { + return false + } + + slice1, ok := args[0].([]interface{}) + if ok { + slice2, ok := args[1].([]interface{}) + if ok { + if len(slice1) != len(slice2) { + return false + } + for i := 0; i < len(slice1); i++ { + if slice1[i] != slice2[i] { + return false + } + } + return true + } + } + + return args[0] == args[1] +} diff --git a/pkg/scanners/azure/functions/equals_test.go b/pkg/scanners/azure/functions/equals_test.go new file mode 100644 index 000000000000..e9ad7f03f7c7 --- /dev/null +++ b/pkg/scanners/azure/functions/equals_test.go @@ -0,0 +1,111 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Equals(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "equals with nil", + args: []interface{}{ + nil, + }, + expected: false, + }, + { + name: "equals with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: false, + }, + { + name: "equals with nil and string and int", + args: []interface{}{ + nil, + "test", + 1, + }, + expected: false, + }, + { + name: "equals with nil and nil and array", + args: []interface{}{ + nil, + nil, + []interface{}{"a", "b", "c"}, + }, + expected: false, + }, + { + name: "equals with nil and nil", + args: []interface{}{ + nil, + nil, + }, + expected: true, + }, + { + name: "equals with string and string", + args: []interface{}{ + "test", + "test", + }, + expected: true, + }, + { + name: "equals with string and string", + args: []interface{}{ + "test", + "test1", + }, + expected: false, + }, + { + name: "equals with int and int", + args: []interface{}{ + 1, + 1, + }, + expected: true, + }, + { + name: "equals with int and int", + args: []interface{}{ + 1, + 2, + }, + expected: false, + }, + { + name: "equals with array and array", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{"a", "b", "c"}, + }, + expected: true, + }, + { + name: "equals with array and array", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{"a", "b", "d"}, + }, + expected: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Equals(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/false.go b/pkg/scanners/azure/functions/false.go new file mode 100644 index 000000000000..26309e333812 --- /dev/null +++ b/pkg/scanners/azure/functions/false.go @@ -0,0 +1,5 @@ +package functions + +func False(args ...interface{}) interface{} { + return false +} diff --git a/pkg/scanners/azure/functions/first.go b/pkg/scanners/azure/functions/first.go new file mode 100644 index 000000000000..3415b453ffe3 --- /dev/null +++ b/pkg/scanners/azure/functions/first.go @@ -0,0 +1,37 @@ +package functions + +func First(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + container := args[0] + + switch cType := container.(type) { + case string: + if len(cType) > 0 { + return string(cType[0]) + } + case interface{}: + switch iType := cType.(type) { + case []string: + if len(iType) > 0 { + return iType[0] + } + case []bool: + if len(iType) > 0 { + return iType[0] + } + case []int: + if len(iType) > 0 { + return iType[0] + } + case []float64: + if len(iType) > 0 { + return iType[0] + } + } + } + + return "" +} diff --git a/pkg/scanners/azure/functions/first_test.go b/pkg/scanners/azure/functions/first_test.go new file mode 100644 index 000000000000..5ce059750184 --- /dev/null +++ b/pkg/scanners/azure/functions/first_test.go @@ -0,0 +1,51 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_First(t *testing.T) { + test := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "first in empty string", + args: []interface{}{ + "", + }, + expected: "", + }, + { + name: "first in string", + args: []interface{}{ + "Hello", + }, + expected: "H", + }, + { + name: "first in empty slice", + args: []interface{}{ + []string{}, + }, + expected: "", + }, + { + name: "first in slice", + args: []interface{}{ + []string{"Hello", "World"}, + }, + expected: "Hello", + }, + } + + for _, tt := range test { + t.Run(tt.name, func(t *testing.T) { + actual := First(tt.args...) + require.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/float.go b/pkg/scanners/azure/functions/float.go new file mode 100644 index 000000000000..512b471b9421 --- /dev/null +++ b/pkg/scanners/azure/functions/float.go @@ -0,0 +1,20 @@ +package functions + +import "strconv" + +func Float(args ...interface{}) interface{} { + if len(args) != 1 { + return 0.0 + } + if a, ok := args[0].(int); ok { + return float64(a) + } + if a, ok := args[0].(string); ok { + f, err := strconv.ParseFloat(a, 64) + if err != nil { + return 0.0 + } + return f + } + return 0.0 +} diff --git a/pkg/scanners/azure/functions/float_test.go b/pkg/scanners/azure/functions/float_test.go new file mode 100644 index 000000000000..a7f5f84a8c20 --- /dev/null +++ b/pkg/scanners/azure/functions/float_test.go @@ -0,0 +1,36 @@ +package functions + +import "testing" + +func Test_Float(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected float64 + }{ + { + name: "Float with 1", + args: []interface{}{1}, + expected: 1.0, + }, + { + name: "Float with 2", + args: []interface{}{"2"}, + expected: 2.0, + }, + { + name: "Float with 3", + args: []interface{}{"2.3"}, + expected: 2.3, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Float(tt.args...) + if got != tt.expected { + t.Errorf("Float() = %v, want %v", got, tt.expected) + } + }) + } +} diff --git a/pkg/scanners/azure/functions/format.go b/pkg/scanners/azure/functions/format.go new file mode 100644 index 000000000000..207b9ebfdda7 --- /dev/null +++ b/pkg/scanners/azure/functions/format.go @@ -0,0 +1,31 @@ +package functions + +import ( + "fmt" + "strings" +) + +func Format(args ...interface{}) interface{} { + formatter := generateFormatterString(args...) + + return fmt.Sprintf(formatter, args[1:]...) +} + +func generateFormatterString(args ...interface{}) string { + + formatter, ok := args[0].(string) + if !ok { + return "" + } + for i, arg := range args[1:] { + switch arg.(type) { + case string: + formatter = strings.ReplaceAll(formatter, fmt.Sprintf("{%d}", i), "%s") + case int, int32, int64, uint, uint32, uint64: + formatter = strings.ReplaceAll(formatter, fmt.Sprintf("{%d}", i), "%d") + case float64, float32: + formatter = strings.ReplaceAll(formatter, fmt.Sprintf("{%d}", i), "%f") + } + } + return formatter +} diff --git a/pkg/scanners/azure/functions/format_test.go b/pkg/scanners/azure/functions/format_test.go new file mode 100644 index 000000000000..8d5e840c61a6 --- /dev/null +++ b/pkg/scanners/azure/functions/format_test.go @@ -0,0 +1,42 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_FormatCall(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "simple format call", + args: []interface{}{ + "{0}/{1}", + "myPostgreSQLServer", + "log_checkpoints", + }, + expected: "myPostgreSQLServer/log_checkpoints", + }, + { + name: "complex format call", + args: []interface{}{ + "{0} + {1} = {2}", + 1, 2, 3, + }, + expected: "1 + 2 = 3", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Format(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/scanners/azure/functions/functions.go b/pkg/scanners/azure/functions/functions.go new file mode 100644 index 000000000000..f4ed7815f485 --- /dev/null +++ b/pkg/scanners/azure/functions/functions.go @@ -0,0 +1,99 @@ +package functions + +var deploymentFuncs = map[string]func(dp DeploymentData, args ...interface{}) interface{}{ + "parameters": Parameters, + "deployment": Deployment, + "environment": Environment, + "variables": Variables, +} +var generalFuncs = map[string]func(...interface{}) interface{}{ + + "add": Add, + "and": And, + "array": Array, + "base64": Base64, + "base64ToJson": Base64ToJson, + "bool": Bool, + "coalesce": Coalesce, + "concat": Concat, + "contains": Contains, + "copyIndex": CopyIndex, + "createArray": CreateArray, + "createObject": CreateObject, + "dataUri": DataUri, + "dataUriToString": DataUriToString, + "dateTimeAdd": DateTimeAdd, + "dateTimeFromEpoch": DateTimeFromEpoch, + "dateTimeToEpoch": DateTimeToEpoch, + "div": Div, + "empty": Empty, + "endsWith": EndsWith, + "equals": Equals, + "extensionResourceId": ExtensionResourceID, + "false": False, + "float": Float, + "format": Format, + "greater": Greater, + "greaterOrEquals": GreaterOrEquals, + "guid": Guid, + "if": If, + "indexOf": IndexOf, + "int": Int, + "intersection": Intersection, + "items": Items, + "join": Join, + "lastIndexOf": LastIndexOf, + "length": Length, + "less": Less, + "lessOrEquals": LessOrEquals, + // "list": List, + "managementGroup": ManagementGroup, + "managementGroupResourceId": ManagementGroupResourceID, + "max": Max, + "min": Min, + "mod": Mod, + "mul": Mul, + "newGuid": NewGuid, + "not": Not, + "null": Null, + "or": Or, + "padLeft": PadLeft, + "pickZones": PickZones, + "range": Range, + "reference": Reference, + "replace": Replace, + "resourceGroup": ResourceGroup, + "resourceId": ResourceID, + "skip": Skip, + "split": Split, + "startsWith": StartsWith, + "string": String, + "sub": Sub, + "subscription": Subscription, + "subscriptionResourceId": SubscriptionResourceID, + "substring": SubString, + "tenant": Tenant, + "tenantResourceId": TenantResourceID, + "toLower": ToLower, + "toUpper": ToUpper, + "trim": Trim, + "true": True, + "union": Union, + "union:": Union, + "uniqueString": UniqueString, + "uri": Uri, + "utcNow": UTCNow, +} + +func Evaluate(deploymentProvider DeploymentData, name string, args ...interface{}) interface{} { + + if f, ok := deploymentFuncs[name]; ok { + return f(deploymentProvider, args...) + } + + if f, ok := generalFuncs[name]; ok { + return f(args...) + } + + return nil +} diff --git a/pkg/scanners/azure/functions/greater.go b/pkg/scanners/azure/functions/greater.go new file mode 100644 index 000000000000..24bf79834641 --- /dev/null +++ b/pkg/scanners/azure/functions/greater.go @@ -0,0 +1,47 @@ +package functions + +func Greater(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + switch arg0 := args[0].(type) { + case int: + arg1, ok := args[1].(int) + if ok { + return arg0 > arg1 + } + case string: + arg1, ok := args[1].(string) + if ok { + return arg0 > arg1 + } + } + + return false +} + +func GreaterOrEquals(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + switch arg0 := args[0].(type) { + case nil: + return args[1] == nil + case int: + arg1, ok := args[1].(int) + if ok { + return arg0 >= arg1 + } + case string: + arg1, ok := args[1].(string) + if ok { + return arg0 >= arg1 + } + } + + return false +} diff --git a/pkg/scanners/azure/functions/greater_test.go b/pkg/scanners/azure/functions/greater_test.go new file mode 100644 index 000000000000..8d3e1b21b25e --- /dev/null +++ b/pkg/scanners/azure/functions/greater_test.go @@ -0,0 +1,119 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Greater(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + + { + name: "greater with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: false, + }, + { + name: "greater with nil and nil", + args: []interface{}{ + nil, + nil, + }, + expected: false, + }, + { + name: "greater with string and string", + args: []interface{}{ + "test", + "test", + }, + expected: false, + }, + { + name: "greater with string and int", + args: []interface{}{ + "test", + 1, + }, + expected: false, + }, + { + name: "greater with int and int", + args: []interface{}{ + 1, + 1, + }, + expected: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Greater(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func Test_GreaterThanOrEqual(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + + { + name: "greater with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: false, + }, + { + name: "greater with nil and nil", + args: []interface{}{ + nil, + nil, + }, + expected: true, + }, + { + name: "greater with string and string", + args: []interface{}{ + "test", + "test", + }, + expected: true, + }, + { + name: "greater with string and int", + args: []interface{}{ + "test", + 1, + }, + expected: false, + }, + { + name: "greater with int and int", + args: []interface{}{ + 1, + 1, + }, + expected: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := GreaterOrEquals(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/guid.go b/pkg/scanners/azure/functions/guid.go new file mode 100644 index 000000000000..d54bbacb1beb --- /dev/null +++ b/pkg/scanners/azure/functions/guid.go @@ -0,0 +1,44 @@ +package functions + +import ( + "crypto/sha256" + "strings" + + "github.com/google/uuid" +) + +func Guid(args ...interface{}) interface{} { + + if len(args) == 0 { + return "" + } + + hashParts := make([]string, len(args)) + for i, str := range args { + hashParts[i] = str.(string) + } + + guid, err := generateSeededGUID(hashParts...) + if err != nil { + return "" + } + + return guid.String() +} + +func generateSeededGUID(seedParts ...string) (uuid.UUID, error) { + var id uuid.UUID + + stringToHash := strings.Join(seedParts, "") + + hsha2 := sha256.Sum256([]byte(stringToHash)) + + copy(id[:], hsha2[:16]) + id[6] = (id[6] & 0x0f) | 0x40 // Version 4 + id[8] = (id[8] & 0x3f) | 0x80 // Variant is 10 + return id, nil +} + +func NewGuid(args ...interface{}) interface{} { + return uuid.NewString() +} diff --git a/pkg/scanners/azure/functions/guid_test.go b/pkg/scanners/azure/functions/guid_test.go new file mode 100644 index 000000000000..0e47e5383a54 --- /dev/null +++ b/pkg/scanners/azure/functions/guid_test.go @@ -0,0 +1,35 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Guid(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "guid from a string", + args: []interface{}{ + "hello", + }, + expected: "2cf24dba-5fb0-430e-a6e8-3b2ac5b9e29e", + }, + { + name: "guid from an string", + args: []interface{}{}, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + guid := Guid(tt.args...) + require.Equal(t, tt.expected, guid) + }) + } +} diff --git a/pkg/scanners/azure/functions/if.go b/pkg/scanners/azure/functions/if.go new file mode 100644 index 000000000000..03fd35e360ff --- /dev/null +++ b/pkg/scanners/azure/functions/if.go @@ -0,0 +1,15 @@ +package functions + +func If(args ...interface{}) interface{} { + + if len(args) != 3 { + return nil + } + + if condition, ok := args[0].(bool); ok { + if condition { + return args[1] + } + } + return args[2] +} diff --git a/pkg/scanners/azure/functions/if_test.go b/pkg/scanners/azure/functions/if_test.go new file mode 100644 index 000000000000..52c645fb30aa --- /dev/null +++ b/pkg/scanners/azure/functions/if_test.go @@ -0,0 +1,44 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_If(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "If with true", + args: []interface{}{true, "true", "false"}, + expected: "true", + }, + { + name: "If with false", + args: []interface{}{false, "true", "false"}, + expected: "false", + }, + { + name: "If with true and slice returned", + args: []interface{}{ + true, + []interface{}{"Hello", "World"}, + []interface{}{"Goodbye", "World"}, + }, + expected: []interface{}{"Hello", "World"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := If(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } + +} diff --git a/pkg/scanners/azure/functions/index_of.go b/pkg/scanners/azure/functions/index_of.go new file mode 100644 index 000000000000..93896e21e897 --- /dev/null +++ b/pkg/scanners/azure/functions/index_of.go @@ -0,0 +1,22 @@ +package functions + +import "strings" + +func IndexOf(args ...interface{}) interface{} { + + if len(args) != 2 { + return -1 + } + + stringToSearch, ok := args[0].(string) + if !ok { + return -1 + } + + stringToFind, ok := args[1].(string) + if !ok { + return -1 + } + + return strings.Index(stringToSearch, stringToFind) +} diff --git a/pkg/scanners/azure/functions/index_of_test.go b/pkg/scanners/azure/functions/index_of_test.go new file mode 100644 index 000000000000..c35d59279942 --- /dev/null +++ b/pkg/scanners/azure/functions/index_of_test.go @@ -0,0 +1,48 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_IndexOf(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "get index of string that is there", + args: []interface{}{ + "Hello world!", + "Hell", + }, + expected: 0, + }, + { + name: "get index of string that is there as well", + args: []interface{}{ + "Hello world!", + "world", + }, + expected: 6, + }, + { + name: "get index of string that isn't there", + args: []interface{}{ + "Hello world!", + "planet!", + }, + expected: -1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := IndexOf(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/int.go b/pkg/scanners/azure/functions/int.go new file mode 100644 index 000000000000..f873a29fb0bf --- /dev/null +++ b/pkg/scanners/azure/functions/int.go @@ -0,0 +1,20 @@ +package functions + +import "strconv" + +func Int(args ...interface{}) interface{} { + if len(args) != 1 { + return 0 + } + if a, ok := args[0].(int); ok { + return a + } + if a, ok := args[0].(string); ok { + i, err := strconv.Atoi(a) + if err != nil { + return 0 + } + return i + } + return 0 +} diff --git a/pkg/scanners/azure/functions/int_test.go b/pkg/scanners/azure/functions/int_test.go new file mode 100644 index 000000000000..0834ecdd6fc2 --- /dev/null +++ b/pkg/scanners/azure/functions/int_test.go @@ -0,0 +1,36 @@ +package functions + +import "testing" + +func Test_Int(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "Int with 1", + args: []interface{}{1}, + expected: 1, + }, + { + name: "Int with 2", + args: []interface{}{"2"}, + expected: 2, + }, + { + name: "Int with 2.3", + args: []interface{}{"2.3"}, + expected: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Int(tt.args...) + if got != tt.expected { + t.Errorf("Int() = %v, want %v", got, tt.expected) + } + }) + } +} diff --git a/pkg/scanners/azure/functions/intersection.go b/pkg/scanners/azure/functions/intersection.go new file mode 100644 index 000000000000..5eace2fe0bc7 --- /dev/null +++ b/pkg/scanners/azure/functions/intersection.go @@ -0,0 +1,76 @@ +package functions + +import "sort" + +func Intersection(args ...interface{}) interface{} { + + if args == nil || len(args) < 2 { + return []interface{}{} + } + + switch args[0].(type) { + case map[string]interface{}: + return intersectionMap(args...) + case interface{}: + return intersectionArray(args...) + } + + return []interface{}{} +} + +func intersectionArray(args ...interface{}) interface{} { + result := []interface{}{} + hash := make(map[interface{}]bool) + + for _, arg := range args[0].([]interface{}) { + hash[arg] = true + } + + for i := 1; i < len(args); i++ { + workingHash := make(map[interface{}]bool) + argArr, ok := args[i].([]interface{}) + if !ok { + continue + } + for _, item := range argArr { + if _, ok := hash[item]; ok { + workingHash[item] = true + } + } + hash = workingHash + } + + for k := range hash { + result = append(result, k) + } + + sort.Slice(result, func(i, j int) bool { + return result[i].(string) < result[j].(string) + }) + + return result +} + +func intersectionMap(args ...interface{}) interface{} { + hash := make(map[string]interface{}) + + for k, v := range args[0].(map[string]interface{}) { + hash[k] = v + } + + for i := 1; i < len(args); i++ { + workingHash := make(map[string]interface{}) + argArr, ok := args[i].(map[string]interface{}) + if !ok { + continue + } + for k, v := range argArr { + if ev, ok := hash[k]; ok && ev == v { + workingHash[k] = v + } + } + hash = workingHash + } + + return hash +} diff --git a/pkg/scanners/azure/functions/intersection_test.go b/pkg/scanners/azure/functions/intersection_test.go new file mode 100644 index 000000000000..98630fa9687c --- /dev/null +++ b/pkg/scanners/azure/functions/intersection_test.go @@ -0,0 +1,106 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Intersect(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "intersect two arrays", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{"b", "c", "d"}, + }, + expected: []interface{}{"b", "c"}, + }, + { + name: "intersect three arrays", + args: []interface{}{ + []interface{}{"a", "b", "c", "d"}, + []interface{}{"b", "c", "d"}, + []interface{}{"b", "c"}, + }, + expected: []interface{}{"b", "c"}, + }, + { + name: "intersect two arrays with one empty", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{}, + }, + expected: []interface{}{}, + }, + { + name: "intersect two arrays with both empty", + args: []interface{}{ + []interface{}{}, + []interface{}{}, + }, + expected: []interface{}{}, + }, + { + name: "intersect two arrays with both nil", + args: []interface{}{ + nil, + nil, + }, + expected: []interface{}{}, + }, + { + name: "intersect two maps", + args: []interface{}{ + map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + map[string]interface{}{ + "b": "b", + "c": "c", + "d": "d", + }, + }, + expected: map[string]interface{}{ + "b": "b", + "c": "c", + }, + }, + { + name: "intersect three maps", + args: []interface{}{ + map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + map[string]interface{}{ + "b": "b", + "c": "c", + "d": "d", + }, + map[string]interface{}{ + "b": "b", + "d": "d", + }, + }, + expected: map[string]interface{}{ + "b": "b", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Intersection(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/items.go b/pkg/scanners/azure/functions/items.go new file mode 100644 index 000000000000..2b40a369ea46 --- /dev/null +++ b/pkg/scanners/azure/functions/items.go @@ -0,0 +1,6 @@ +package functions + +func Items(args ...interface{}) interface{} { + + return nil +} diff --git a/pkg/scanners/azure/functions/join.go b/pkg/scanners/azure/functions/join.go new file mode 100644 index 000000000000..cdefa43fdad0 --- /dev/null +++ b/pkg/scanners/azure/functions/join.go @@ -0,0 +1,22 @@ +package functions + +import "strings" + +func Join(args ...interface{}) interface{} { + + if len(args) != 2 { + return "" + } + + container, ok := args[0].([]string) + if !ok { + return "" + } + + separator, ok := args[1].(string) + if !ok { + return "" + } + + return strings.Join(container, separator) +} diff --git a/pkg/scanners/azure/functions/join_test.go b/pkg/scanners/azure/functions/join_test.go new file mode 100644 index 000000000000..fab50a4e1e90 --- /dev/null +++ b/pkg/scanners/azure/functions/join_test.go @@ -0,0 +1,39 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Join(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "join strings with no items", + args: []interface{}{ + []string{}, + " ", + }, + expected: "", + }, + { + name: "join strings", + args: []interface{}{ + []string{"Hello", "World"}, + " ", + }, + expected: "Hello World", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Join(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/json.go b/pkg/scanners/azure/functions/json.go new file mode 100644 index 000000000000..7694b358737b --- /dev/null +++ b/pkg/scanners/azure/functions/json.go @@ -0,0 +1,20 @@ +package functions + +import "encoding/json" + +func JSON(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + value, ok := args[0].(string) + if !ok { + return "" + } + + var jsonType map[string]interface{} + if err := json.Unmarshal([]byte(value), &jsonType); err != nil { + return "" + } + return jsonType +} diff --git a/pkg/scanners/azure/functions/json_test.go b/pkg/scanners/azure/functions/json_test.go new file mode 100644 index 000000000000..1f04cd65026f --- /dev/null +++ b/pkg/scanners/azure/functions/json_test.go @@ -0,0 +1,42 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_JSON(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected map[string]interface{} + }{ + { + name: "simple json string to json type", + args: []interface{}{ + `{"hello": "world"}`, + }, + expected: map[string]interface{}{ + "hello": "world", + }, + }, + { + name: "more complex json string to json type", + args: []interface{}{ + `{"hello": ["world", "world2"]}`, + }, + expected: map[string]interface{}{ + "hello": []interface{}{"world", "world2"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := JSON(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/last.go b/pkg/scanners/azure/functions/last.go new file mode 100644 index 000000000000..8466ec6b669f --- /dev/null +++ b/pkg/scanners/azure/functions/last.go @@ -0,0 +1,37 @@ +package functions + +func Last(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + container := args[0] + + switch cType := container.(type) { + case string: + if len(cType) > 0 { + return string(cType[len(cType)-1]) + } + case interface{}: + switch iType := cType.(type) { + case []string: + if len(iType) > 0 { + return iType[len(iType)-1] + } + case []bool: + if len(iType) > 0 { + return iType[len(iType)-1] + } + case []int: + if len(iType) > 0 { + return iType[len(iType)-1] + } + case []float64: + if len(iType) > 0 { + return iType[len(iType)-1] + } + } + } + + return "" +} diff --git a/pkg/scanners/azure/functions/last_index_of.go b/pkg/scanners/azure/functions/last_index_of.go new file mode 100644 index 000000000000..7dce6320d8fb --- /dev/null +++ b/pkg/scanners/azure/functions/last_index_of.go @@ -0,0 +1,22 @@ +package functions + +import "strings" + +func LastIndexOf(args ...interface{}) interface{} { + + if len(args) != 2 { + return -1 + } + + stringToSearch, ok := args[0].(string) + if !ok { + return -1 + } + + stringToFind, ok := args[1].(string) + if !ok { + return -1 + } + + return strings.LastIndex(stringToSearch, stringToFind) +} diff --git a/pkg/scanners/azure/functions/last_index_of_test.go b/pkg/scanners/azure/functions/last_index_of_test.go new file mode 100644 index 000000000000..96b78d72dc5f --- /dev/null +++ b/pkg/scanners/azure/functions/last_index_of_test.go @@ -0,0 +1,48 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_LastIndexOf(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "get last index of string that is there", + args: []interface{}{ + "Hello world!", + "l", + }, + expected: 9, + }, + { + name: "get last index of string that is there as well", + args: []interface{}{ + "Hello world!", + "world", + }, + expected: 6, + }, + { + name: "get last index of string that isn't there", + args: []interface{}{ + "Hello world!", + "planet!", + }, + expected: -1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := LastIndexOf(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/last_test.go b/pkg/scanners/azure/functions/last_test.go new file mode 100644 index 000000000000..2ceafbf8a69a --- /dev/null +++ b/pkg/scanners/azure/functions/last_test.go @@ -0,0 +1,51 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Last(t *testing.T) { + test := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "last in empty string", + args: []interface{}{ + "", + }, + expected: "", + }, + { + name: "last in string", + args: []interface{}{ + "Hello", + }, + expected: "o", + }, + { + name: "last in empty slice", + args: []interface{}{ + []string{}, + }, + expected: "", + }, + { + name: "last in slice", + args: []interface{}{ + []string{"Hello", "World"}, + }, + expected: "World", + }, + } + + for _, tt := range test { + t.Run(tt.name, func(t *testing.T) { + actual := Last(tt.args...) + require.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/length.go b/pkg/scanners/azure/functions/length.go new file mode 100644 index 000000000000..d74bfb2553bf --- /dev/null +++ b/pkg/scanners/azure/functions/length.go @@ -0,0 +1,29 @@ +package functions + +func Length(args ...interface{}) interface{} { + + if len(args) != 1 { + return 0 + } + + switch ctype := args[0].(type) { + case string: + return len(ctype) + case map[string]interface{}: + return len(ctype) + case interface{}: + switch iType := ctype.(type) { + case []string: + return len(iType) + case []bool: + return len(iType) + case []int: + return len(iType) + case []float64: + return len(iType) + case []interface{}: + return len(iType) + } + } + return 0 +} diff --git a/pkg/scanners/azure/functions/length_test.go b/pkg/scanners/azure/functions/length_test.go new file mode 100644 index 000000000000..2d15ba4968cf --- /dev/null +++ b/pkg/scanners/azure/functions/length_test.go @@ -0,0 +1,53 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Length(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "length of a string", + args: []interface{}{ + "hello", + }, + expected: 5, + }, + { + name: "length of an empty string", + args: []interface{}{ + "", + }, + expected: 0, + }, + { + name: "length of an empty slice", + args: []interface{}{ + []string{}, + }, + expected: 0, + }, + { + name: "length of an slice with items", + args: []interface{}{ + []string{ + "hello", "world", + }, + }, + expected: 2, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Length(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/less.go b/pkg/scanners/azure/functions/less.go new file mode 100644 index 000000000000..e25b3662c5c9 --- /dev/null +++ b/pkg/scanners/azure/functions/less.go @@ -0,0 +1,47 @@ +package functions + +func Less(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + switch arg0 := args[0].(type) { + case int: + arg1, ok := args[1].(int) + if ok { + return arg0 < arg1 + } + case string: + arg1, ok := args[1].(string) + if ok { + return arg0 < arg1 + } + } + + return false +} + +func LessOrEquals(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + switch arg0 := args[0].(type) { + case nil: + return args[1] == nil + case int: + arg1, ok := args[1].(int) + if ok { + return arg0 <= arg1 + } + case string: + arg1, ok := args[1].(string) + if ok { + return arg0 <= arg1 + } + } + + return false +} diff --git a/pkg/scanners/azure/functions/less_test.go b/pkg/scanners/azure/functions/less_test.go new file mode 100644 index 000000000000..706ee89db33f --- /dev/null +++ b/pkg/scanners/azure/functions/less_test.go @@ -0,0 +1,119 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Less(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + + { + name: "less with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: false, + }, + { + name: "less with nil and nil", + args: []interface{}{ + nil, + nil, + }, + expected: false, + }, + { + name: "less with string and string", + args: []interface{}{ + "test", + "test", + }, + expected: false, + }, + { + name: "less with string and int", + args: []interface{}{ + "test", + 1, + }, + expected: false, + }, + { + name: "less with int and int", + args: []interface{}{ + 1, + 1, + }, + expected: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Less(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func Test_LessThanOrEqual(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + + { + name: "less with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: false, + }, + { + name: "less with nil and nil", + args: []interface{}{ + nil, + nil, + }, + expected: true, + }, + { + name: "less with string and string", + args: []interface{}{ + "test", + "test", + }, + expected: true, + }, + { + name: "less with string and int", + args: []interface{}{ + "test", + 1, + }, + expected: false, + }, + { + name: "less with int and int", + args: []interface{}{ + 1, + 1, + }, + expected: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := LessOrEquals(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/max.go b/pkg/scanners/azure/functions/max.go new file mode 100644 index 000000000000..6cbfd5ba25bc --- /dev/null +++ b/pkg/scanners/azure/functions/max.go @@ -0,0 +1,33 @@ +package functions + +func Max(args ...interface{}) interface{} { + switch args[0].(type) { + case int: + var ints []int + for _, arg := range args { + ints = append(ints, arg.(int)) + } + return maxInt(ints) + case interface{}: + switch iType := args[0].(type) { + case []int: + return maxInt(iType) + } + } + return 0 +} + +func maxInt(args []int) int { + if len(args) == 0 { + return 0 + } + + max := args[0] + + for i := 1; i < len(args); i++ { + if args[i] > max { + max = args[i] + } + } + return max +} diff --git a/pkg/scanners/azure/functions/max_test.go b/pkg/scanners/azure/functions/max_test.go new file mode 100644 index 000000000000..942fad7e9e59 --- /dev/null +++ b/pkg/scanners/azure/functions/max_test.go @@ -0,0 +1,58 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Max(t *testing.T) { + test := []struct { + name string + args []interface{} + expected int + }{ + { + name: "max of empty slice", + args: []interface{}{ + []int{}, + }, + expected: 0, + }, + { + name: "max of slice", + args: []interface{}{ + []int{1, 2, 3}, + }, + expected: 3, + }, + { + name: "max of slice with negative numbers", + args: []interface{}{ + []int{-1, -2, -3}, + }, + expected: -1, + }, + { + name: "max of slice with negative and positive numbers", + args: []interface{}{ + []int{-1, 2, -3}, + }, + expected: 2, + }, + { + name: "max of comma separated numbers", + args: []interface{}{ + 1, 2, 3, 4, 5, + }, + expected: 5, + }, + } + + for _, tt := range test { + t.Run(tt.name, func(t *testing.T) { + actual := Max(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/min.go b/pkg/scanners/azure/functions/min.go new file mode 100644 index 000000000000..35900e26d33d --- /dev/null +++ b/pkg/scanners/azure/functions/min.go @@ -0,0 +1,33 @@ +package functions + +func Min(args ...interface{}) interface{} { + switch args[0].(type) { + case int: + var ints []int + for _, arg := range args { + ints = append(ints, arg.(int)) + } + return minInt(ints) + case interface{}: + switch iType := args[0].(type) { + case []int: + return minInt(iType) + } + } + return 0 +} + +func minInt(args []int) int { + if len(args) == 0 { + return 0 + } + + min := args[0] + + for i := 1; i < len(args); i++ { + if args[i] < min { + min = args[i] + } + } + return min +} diff --git a/pkg/scanners/azure/functions/min_test.go b/pkg/scanners/azure/functions/min_test.go new file mode 100644 index 000000000000..28e12ef69de8 --- /dev/null +++ b/pkg/scanners/azure/functions/min_test.go @@ -0,0 +1,58 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Min(t *testing.T) { + test := []struct { + name string + args []interface{} + expected int + }{ + { + name: "min of empty slice", + args: []interface{}{ + []int{}, + }, + expected: 0, + }, + { + name: "min of slice", + args: []interface{}{ + []int{1, 2, 3}, + }, + expected: 1, + }, + { + name: "min of slice with negative numbers", + args: []interface{}{ + []int{-1, -2, -3}, + }, + expected: -3, + }, + { + name: "min of slice with negative and positive numbers", + args: []interface{}{ + []int{-1, 2, -3}, + }, + expected: -3, + }, + { + name: "min of comma separated numbers", + args: []interface{}{ + 1, 2, 3, 4, 5, + }, + expected: 1, + }, + } + + for _, tt := range test { + t.Run(tt.name, func(t *testing.T) { + actual := Min(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/mod.go b/pkg/scanners/azure/functions/mod.go new file mode 100644 index 000000000000..34fb12b7a356 --- /dev/null +++ b/pkg/scanners/azure/functions/mod.go @@ -0,0 +1,14 @@ +package functions + +func Mod(args ...interface{}) interface{} { + if len(args) != 2 { + return 0 + } + + if a, ok := args[0].(int); ok { + if b, ok := args[1].(int); ok { + return a % b + } + } + return 0 +} diff --git a/pkg/scanners/azure/functions/mod_test.go b/pkg/scanners/azure/functions/mod_test.go new file mode 100644 index 000000000000..656e77e9aae3 --- /dev/null +++ b/pkg/scanners/azure/functions/mod_test.go @@ -0,0 +1,41 @@ +package functions + +import "testing" + +func Test_Mod(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "Mod with 1 and 2", + args: []interface{}{1, 2}, + expected: 1, + }, + { + name: "Mod with 2 and 3", + args: []interface{}{2, 3}, + expected: 2, + }, + { + name: "Mod with 3 and -4", + args: []interface{}{3, -4}, + expected: 3, + }, + { + name: "Mod with 7 and 3", + args: []interface{}{7, 3}, + expected: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Mod(tt.args...) + if got != tt.expected { + t.Errorf("Mod() = %v, want %v", got, tt.expected) + } + }) + } +} diff --git a/pkg/scanners/azure/functions/mul.go b/pkg/scanners/azure/functions/mul.go new file mode 100644 index 000000000000..9d079728107f --- /dev/null +++ b/pkg/scanners/azure/functions/mul.go @@ -0,0 +1,15 @@ +package functions + +func Mul(args ...interface{}) interface{} { + + if len(args) != 2 { + return nil + } + + if a, ok := args[0].(int); ok { + if b, ok := args[1].(int); ok { + return a * b + } + } + return nil +} diff --git a/pkg/scanners/azure/functions/mul_test.go b/pkg/scanners/azure/functions/mul_test.go new file mode 100644 index 000000000000..cf4ff57607b2 --- /dev/null +++ b/pkg/scanners/azure/functions/mul_test.go @@ -0,0 +1,38 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Mul(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "multiply -2 by 1", + args: []interface{}{-2, 1}, + expected: -2, + }, + { + name: "multiply 4 by 2", + args: []interface{}{4, 2}, + expected: 8, + }, + { + name: "multiply 6 by 3", + args: []interface{}{6, 3}, + expected: 18, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Mul(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/scanners/azure/functions/not.go b/pkg/scanners/azure/functions/not.go new file mode 100644 index 000000000000..5de10af5dffa --- /dev/null +++ b/pkg/scanners/azure/functions/not.go @@ -0,0 +1,13 @@ +package functions + +func Not(args ...interface{}) interface{} { + + if len(args) != 1 { + return false + } + + if condition, ok := args[0].(bool); ok { + return !condition + } + return false +} diff --git a/pkg/scanners/azure/functions/not_test.go b/pkg/scanners/azure/functions/not_test.go new file mode 100644 index 000000000000..b1a209768f36 --- /dev/null +++ b/pkg/scanners/azure/functions/not_test.go @@ -0,0 +1,33 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Not(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "Not with true", + args: []interface{}{true}, + expected: false, + }, + { + name: "Not with false", + args: []interface{}{false}, + expected: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Not(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/scanners/azure/functions/null.go b/pkg/scanners/azure/functions/null.go new file mode 100644 index 000000000000..597c5485e9f5 --- /dev/null +++ b/pkg/scanners/azure/functions/null.go @@ -0,0 +1,5 @@ +package functions + +func Null(args ...interface{}) interface{} { + return nil +} diff --git a/pkg/scanners/azure/functions/null_test.go b/pkg/scanners/azure/functions/null_test.go new file mode 100644 index 000000000000..3394193415fb --- /dev/null +++ b/pkg/scanners/azure/functions/null_test.go @@ -0,0 +1,12 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Null(t *testing.T) { + + assert.Nil(t, Null()) +} diff --git a/pkg/scanners/azure/functions/or.go b/pkg/scanners/azure/functions/or.go new file mode 100644 index 000000000000..87e6f8627ed4 --- /dev/null +++ b/pkg/scanners/azure/functions/or.go @@ -0,0 +1,20 @@ +package functions + +func Or(args ...interface{}) interface{} { + + if len(args) <= 1 { + return false + } + + for _, arg := range args { + arg1, ok := arg.(bool) + if !ok { + return false + } + if arg1 { + return true + } + + } + return false +} diff --git a/pkg/scanners/azure/functions/or_test.go b/pkg/scanners/azure/functions/or_test.go new file mode 100644 index 000000000000..2361c858a82a --- /dev/null +++ b/pkg/scanners/azure/functions/or_test.go @@ -0,0 +1,44 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Or(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "And with same 2 bools", + args: []interface{}{true, true}, + expected: true, + }, + { + name: "And with same 3 bools", + args: []interface{}{true, true, true}, + expected: true, + }, + { + name: "And with different 4 bools", + args: []interface{}{true, true, false, true}, + expected: true, + }, + { + name: "And with same false 4 bools", + args: []interface{}{false, false, false, false}, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Or(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/scanners/azure/functions/pad.go b/pkg/scanners/azure/functions/pad.go new file mode 100644 index 000000000000..9d668210b11c --- /dev/null +++ b/pkg/scanners/azure/functions/pad.go @@ -0,0 +1,32 @@ +package functions + +import "strings" + +func PadLeft(args ...interface{}) interface{} { + if len(args) != 3 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + length, ok := args[1].(int) + if !ok { + return "" + } + + pad, ok := args[2].(string) + if !ok { + return "" + } + + if len(input) >= length { + return input + } + + repeat := (length - len(input)) / len(pad) + + return strings.Repeat(pad, repeat) + input +} diff --git a/pkg/scanners/azure/functions/pad_test.go b/pkg/scanners/azure/functions/pad_test.go new file mode 100644 index 000000000000..e7d274504298 --- /dev/null +++ b/pkg/scanners/azure/functions/pad_test.go @@ -0,0 +1,61 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_PadLeft(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "pad left with a input smaller than length", + args: []interface{}{ + "1234", + 8, + "0", + }, + expected: "00001234", + }, + { + name: "pad left with a input larger than length", + args: []interface{}{ + "1234", + 2, + "0", + }, + expected: "1234", + }, + { + name: "pad left with a input same as than length", + args: []interface{}{ + "1234", + 4, + "0", + }, + expected: "1234", + }, + { + name: "pad left with larger padding character", + args: []interface{}{ + "1234", + 8, + "00", + }, + expected: "00001234", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := PadLeft(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/scanners/azure/functions/parameters.go b/pkg/scanners/azure/functions/parameters.go new file mode 100644 index 000000000000..b13ee3d60e4e --- /dev/null +++ b/pkg/scanners/azure/functions/parameters.go @@ -0,0 +1 @@ +package functions diff --git a/pkg/scanners/azure/functions/pick_zones.go b/pkg/scanners/azure/functions/pick_zones.go new file mode 100644 index 000000000000..982936633dbe --- /dev/null +++ b/pkg/scanners/azure/functions/pick_zones.go @@ -0,0 +1,23 @@ +package functions + +func PickZones(args ...interface{}) interface{} { + if len(args) < 3 { + return nil + } + numOfZones := 1 + + if len(args) > 3 { + numOfZones = args[3].(int) + if numOfZones > 3 { + numOfZones = 3 + } + } + + var zones []int + + for i := 1; i <= numOfZones; i++ { + zones = append(zones, i) + } + + return zones +} diff --git a/pkg/scanners/azure/functions/pick_zones_test.go b/pkg/scanners/azure/functions/pick_zones_test.go new file mode 100644 index 000000000000..19db480f9b0d --- /dev/null +++ b/pkg/scanners/azure/functions/pick_zones_test.go @@ -0,0 +1,14 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_PickZones(t *testing.T) { + assert.Equal(t, []int{1}, PickZones("Microsoft.Compute", "virtualmachines", "eu-west-1")) + assert.Equal(t, []int{1, 2}, PickZones("Microsoft.Compute", "virtualmachines", "eu-west-1", 2)) + assert.Equal(t, []int{1, 2, 3}, PickZones("Microsoft.Compute", "virtualmachines", "eu-west-1", 3)) + assert.Equal(t, []int{1, 2, 3}, PickZones("Microsoft.Compute", "virtualmachines", "eu-west-1", 4)) +} diff --git a/pkg/scanners/azure/functions/range.go b/pkg/scanners/azure/functions/range.go new file mode 100644 index 000000000000..12a3526957d8 --- /dev/null +++ b/pkg/scanners/azure/functions/range.go @@ -0,0 +1,30 @@ +package functions + +func Range(args ...interface{}) interface{} { + + if len(args) != 2 { + return []interface{}{} + } + + start, ok := args[0].(int) + if !ok { + return []int{} + } + + count, ok := args[1].(int) + if !ok { + return []int{} + } + + if count > 10000 { + count = 10000 + } + + result := make([]int, count) + + for i := 0; i < count; i++ { + result[i] = start + i + } + + return result +} diff --git a/pkg/scanners/azure/functions/range_test.go b/pkg/scanners/azure/functions/range_test.go new file mode 100644 index 000000000000..9c0c6a084b6b --- /dev/null +++ b/pkg/scanners/azure/functions/range_test.go @@ -0,0 +1,47 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Range(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "range for 3 from 1", + args: []interface{}{ + 1, + 3, + }, + expected: []int{1, 2, 3}, + }, + { + name: "range with for 10 from 3", + args: []interface{}{ + 3, + 10, + }, + expected: []int{3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, + }, + { + name: "range with for 10 from -10", + args: []interface{}{ + -10, + 10, + }, + expected: []int{-10, -9, -8, -7, -6, -5, -4, -3, -2, -1}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Range(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/scanners/azure/functions/reference.go b/pkg/scanners/azure/functions/reference.go new file mode 100644 index 000000000000..2f7b38ccf741 --- /dev/null +++ b/pkg/scanners/azure/functions/reference.go @@ -0,0 +1,12 @@ +package functions + +import "fmt" + +// Reference function can't work as per Azure because it requires Azure ARM logic +// best effort is to return the resourcename with a suffix to try and make it unique +func Reference(args ...interface{}) interface{} { + if len(args) < 1 { + return nil + } + return fmt.Sprintf("%v-reference", args[0]) +} diff --git a/pkg/scanners/azure/functions/reference_test.go b/pkg/scanners/azure/functions/reference_test.go new file mode 100644 index 000000000000..c669fe98d3f0 --- /dev/null +++ b/pkg/scanners/azure/functions/reference_test.go @@ -0,0 +1,12 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Reference(t *testing.T) { + assert.Equal(t, "test-reference", Reference("test")) + assert.Equal(t, "123-reference", Reference(123)) +} diff --git a/pkg/scanners/azure/functions/replace.go b/pkg/scanners/azure/functions/replace.go new file mode 100644 index 000000000000..00a7a8a4560f --- /dev/null +++ b/pkg/scanners/azure/functions/replace.go @@ -0,0 +1,26 @@ +package functions + +import "strings" + +func Replace(args ...interface{}) interface{} { + if len(args) != 3 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + old, ok := args[1].(string) + if !ok { + return "" + } + + new, ok := args[2].(string) + if !ok { + return "" + } + + return strings.ReplaceAll(input, old, new) +} diff --git a/pkg/scanners/azure/functions/replace_test.go b/pkg/scanners/azure/functions/replace_test.go new file mode 100644 index 000000000000..fe8fb40994cd --- /dev/null +++ b/pkg/scanners/azure/functions/replace_test.go @@ -0,0 +1,41 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Replace(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "replace a string", + args: []interface{}{ + "hello", + "l", + "p", + }, + expected: "heppo", + }, + { + name: "replace a string with invalid replacement", + args: []interface{}{ + "hello", + "q", + "p", + }, + expected: "hello", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Replace(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/resource.go b/pkg/scanners/azure/functions/resource.go new file mode 100644 index 000000000000..7eacfaeccff1 --- /dev/null +++ b/pkg/scanners/azure/functions/resource.go @@ -0,0 +1,48 @@ +package functions + +import ( + "fmt" +) + +func ResourceID(args ...interface{}) interface{} { + if len(args) < 2 { + return nil + } + + var resourceID string + + for _, arg := range args { + resourceID += "/" + fmt.Sprintf("%v", arg) + } + + return resourceID +} + +func ExtensionResourceID(args ...interface{}) interface{} { + if len(args) < 3 { + return nil + } + + var resourceID string + + for _, arg := range args { + resourceID += "/" + fmt.Sprintf("%v", arg) + } + + return resourceID +} + +func ResourceGroup(args ...interface{}) interface{} { + return fmt.Sprintf(`{ +"id": "/subscriptions/%s/resourceGroups/PlaceHolderResourceGroup", +"name": "Placeholder Resource Group", +"type":"Microsoft.Resources/resourceGroups", +"location": "westus", +"managedBy": "%s", +"tags": { +}, +"properties": { + "provisioningState": "Succeeded +} +}`, subscriptionID, managingResourceID) +} diff --git a/pkg/scanners/azure/functions/resource_test.go b/pkg/scanners/azure/functions/resource_test.go new file mode 100644 index 000000000000..d6dac14b4184 --- /dev/null +++ b/pkg/scanners/azure/functions/resource_test.go @@ -0,0 +1,12 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_ResourceID(t *testing.T) { + assert.Equal(t, "/test1/test2", ResourceID("test1", "test2")) + assert.Equal(t, "/test1/123", ResourceID("test1", 123)) +} diff --git a/pkg/scanners/azure/functions/scope.go b/pkg/scanners/azure/functions/scope.go new file mode 100644 index 000000000000..dcd1676b1945 --- /dev/null +++ b/pkg/scanners/azure/functions/scope.go @@ -0,0 +1,106 @@ +package functions + +import ( + "fmt" + + "github.com/google/uuid" +) + +var ( + tenantID = uuid.NewString() + groupID = uuid.NewString() + updaterID = uuid.NewString() + subscriptionID = uuid.NewString() + managingResourceID = uuid.NewString() +) + +func ManagementGroup(_ ...interface{}) interface{} { + + return fmt.Sprintf(`{ + "id": "/providers/Microsoft.Management/managementGroups/mgPlaceholder", + "name": "mgPlaceholder", + "properties": { + "details": { + "parent": { + "displayName": "Tenant Root Group", + "id": "/providers/Microsoft.Management/managementGroups/%[1]s", + "name": "%[1]s" + }, + "updatedBy": "%[2]s", + "updatedTime": "2020-07-23T21:05:52.661306Z", + "version": "1" + }, + "displayName": "Management PlaceHolder Group", + "tenantId": "%[3]s" + }, + "type": "/providers/Microsoft.Management/managementGroups" + } +`, groupID, updaterID, tenantID) +} + +func ManagementGroupResourceID(args ...interface{}) interface{} { + if len(args) < 2 { + return "" + } + + switch len(args) { + case 3: + return fmt.Sprintf("/providers/Microsoft.Management/managementGroups/%s/providers/%s/%s/%s", groupID, args[0], args[1], args[2]) + case 4: + return fmt.Sprintf("/providers/Microsoft.Management/managementGroups/%s/providers/%s/%s/%s", args[0], args[1], args[2], args[3]) + default: + return fmt.Sprintf("/providers/Microsoft.Management/managementGroups/%s/providers/%s/%s", groupID, args[0], args[1]) + } + +} + +func Subscription(_ ...interface{}) interface{} { + return fmt.Sprintf(`{ + "id": "/subscriptions/%[1]s", + "subscriptionId": "%[1]s", + "tenantId": "%[2]s", + "displayName": "Placeholder Subscription" +}`, subscriptionID, tenantID) +} + +func SubscriptionResourceID(args ...interface{}) interface{} { + if len(args) < 2 { + return nil + } + + switch len(args) { + + case 3: + return fmt.Sprintf("/subscriptions/%s/providers/%s/%s/%s", subscriptionID, args[0], args[1], args[2]) + case 4: + // subscription ID has been provided so use that + return fmt.Sprintf("/subscriptions/%s/providers/%s/%s/%s", args[0], args[1], args[2], args[3]) + default: + + return fmt.Sprintf("/subscriptions/%s/providers/%s/%s", subscriptionID, args[0], args[1]) + } +} + +func Tenant(_ ...interface{}) interface{} { + return fmt.Sprintf(`{ + "countryCode": "US", + "displayName": "Placeholder Tenant Name", + "id": "/tenants/%[1]s", + "tenantId": "%[1]s" + }`, tenantID) +} + +func TenantResourceID(args ...interface{}) interface{} { + if len(args) < 2 { + return nil + } + + switch len(args) { + case 3: + return fmt.Sprintf("/providers/%s/%s/%s", args[0], args[1], args[2]) + + default: + return fmt.Sprintf("/providers/%s/%s", args[0], args[1]) + } + +} diff --git a/pkg/scanners/azure/functions/scope_test.go b/pkg/scanners/azure/functions/scope_test.go new file mode 100644 index 000000000000..af84119e350e --- /dev/null +++ b/pkg/scanners/azure/functions/scope_test.go @@ -0,0 +1,34 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_SubscriptionResourceID(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "scope resource id with subscription ID", + args: []interface{}{ + "4ec875a5-41a2-4837-88cf-4266466e65ed", + "Microsoft.Authorization/roleDefinitions", + "8e3af657-a8ff-443c-a75c-2fe8c4bcb635", + "b34282f6-5e3c-4306-8741-ebd7a871d187", + }, + expected: "/subscriptions/4ec875a5-41a2-4837-88cf-4266466e65ed/providers/Microsoft.Authorization/roleDefinitions/8e3af657-a8ff-443c-a75c-2fe8c4bcb635/b34282f6-5e3c-4306-8741-ebd7a871d187", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := SubscriptionResourceID(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/skip.go b/pkg/scanners/azure/functions/skip.go new file mode 100644 index 000000000000..b68296fff66d --- /dev/null +++ b/pkg/scanners/azure/functions/skip.go @@ -0,0 +1,34 @@ +package functions + +func Skip(args ...interface{}) interface{} { + if len(args) != 2 { + return "" + } + + count, ok := args[1].(int) + if !ok { + return "" + } + switch input := args[0].(type) { + case string: + if count > len(input) { + return "" + } + return input[count:] + case interface{}: + switch iType := input.(type) { + case []int: + return iType[count:] + case []string: + return iType[count:] + case []bool: + return iType[count:] + case []float64: + return iType[count:] + case []interface{}: + return iType[count:] + } + } + + return "" +} diff --git a/pkg/scanners/azure/functions/skip_test.go b/pkg/scanners/azure/functions/skip_test.go new file mode 100644 index 000000000000..692e6508f7f1 --- /dev/null +++ b/pkg/scanners/azure/functions/skip_test.go @@ -0,0 +1,65 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Skip(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "skip a string", + args: []interface{}{ + "hello", + 1, + }, + expected: "ello", + }, + { + name: "skip beyond the length a string", + args: []interface{}{ + "hello", + 6, + }, + expected: "", + }, + { + name: "skip with a zero count on a string", + args: []interface{}{ + "hello", + 0, + }, + expected: "hello", + }, + { + name: "skip with slice of ints", + args: []interface{}{ + []int{1, 2, 3, 4, 5}, + 2, + }, + expected: []int{3, 4, 5}, + }, + { + name: "skip with slice of strings", + args: []interface{}{ + []string{"hello", "world"}, + 1, + }, + expected: []string{"world"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Skip(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/scanners/azure/functions/split.go b/pkg/scanners/azure/functions/split.go new file mode 100644 index 000000000000..04b7f5779d33 --- /dev/null +++ b/pkg/scanners/azure/functions/split.go @@ -0,0 +1,36 @@ +package functions + +import "strings" + +func Split(args ...interface{}) interface{} { + if len(args) != 2 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + switch separator := args[1].(type) { + case string: + return strings.Split(input, separator) + case interface{}: + switch separator := separator.(type) { + case []string: + m := make(map[rune]int) + for _, r := range separator { + r := rune(r[0]) + m[r] = 1 + } + + splitter := func(r rune) bool { + return m[r] == 1 + } + + return strings.FieldsFunc(input, splitter) + } + + } + return []string{} +} diff --git a/pkg/scanners/azure/functions/split_test.go b/pkg/scanners/azure/functions/split_test.go new file mode 100644 index 000000000000..e40df07526aa --- /dev/null +++ b/pkg/scanners/azure/functions/split_test.go @@ -0,0 +1,38 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Split(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected []string + }{ + { + name: "split a string", + args: []interface{}{ + "hello, world", + ",", + }, + expected: []string{"hello", " world"}, + }, + { + name: "split a string with multiple separators", + args: []interface{}{ + "one;two,three", + []string{",", ";"}, + }, + expected: []string{"one", "two", "three"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Split(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/starts_with.go b/pkg/scanners/azure/functions/starts_with.go new file mode 100644 index 000000000000..a4eb398cea3d --- /dev/null +++ b/pkg/scanners/azure/functions/starts_with.go @@ -0,0 +1,22 @@ +package functions + +import "strings" + +func StartsWith(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + stringToSearch, ok := args[0].(string) + if !ok { + return false + } + + stringToFind, ok := args[1].(string) + if !ok { + return false + } + + return strings.HasPrefix(stringToSearch, stringToFind) +} diff --git a/pkg/scanners/azure/functions/starts_with_test.go b/pkg/scanners/azure/functions/starts_with_test.go new file mode 100644 index 000000000000..4a745478ee51 --- /dev/null +++ b/pkg/scanners/azure/functions/starts_with_test.go @@ -0,0 +1,41 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_StartsWith(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "string ends with", + args: []interface{}{ + "Hello, world!", + "Hello,", + }, + expected: true, + }, + { + name: "string does not end with", + args: []interface{}{ + "Hello world!", + "Hello,", + }, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := StartsWith(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/scanners/azure/functions/string.go b/pkg/scanners/azure/functions/string.go new file mode 100644 index 000000000000..cba9997d9e9c --- /dev/null +++ b/pkg/scanners/azure/functions/string.go @@ -0,0 +1,16 @@ +package functions + +import "fmt" + +func String(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return fmt.Sprintf("%v", args[0]) + } + + return input +} diff --git a/pkg/scanners/azure/functions/string_test.go b/pkg/scanners/azure/functions/string_test.go new file mode 100644 index 000000000000..ecab50ea8b65 --- /dev/null +++ b/pkg/scanners/azure/functions/string_test.go @@ -0,0 +1,44 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_String(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "string from a string", + args: []interface{}{ + "hello", + }, + expected: "hello", + }, + { + name: "string from a bool", + args: []interface{}{ + false, + }, + expected: "false", + }, + { + name: "string from an int", + args: []interface{}{ + 10, + }, + expected: "10", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := String(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/sub.go b/pkg/scanners/azure/functions/sub.go new file mode 100644 index 000000000000..6013a8c0d509 --- /dev/null +++ b/pkg/scanners/azure/functions/sub.go @@ -0,0 +1,15 @@ +package functions + +func Sub(args ...interface{}) interface{} { + + if len(args) != 2 { + return nil + } + + if a, ok := args[0].(int); ok { + if b, ok := args[1].(int); ok { + return a - b + } + } + return nil +} diff --git a/pkg/scanners/azure/functions/sub_test.go b/pkg/scanners/azure/functions/sub_test.go new file mode 100644 index 000000000000..a3f9308a2710 --- /dev/null +++ b/pkg/scanners/azure/functions/sub_test.go @@ -0,0 +1,43 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Sub(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "subtract 2 from 5", + args: []interface{}{5, 2}, + expected: 3, + }, + { + name: "subtract 2 from 1", + args: []interface{}{1, 2}, + expected: -1, + }, + { + name: "subtract 3 from 2", + args: []interface{}{2, 3}, + expected: -1, + }, + { + name: "subtract -4 from 3", + args: []interface{}{3, -4}, + expected: 7, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Sub(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/scanners/azure/functions/substring.go b/pkg/scanners/azure/functions/substring.go new file mode 100644 index 000000000000..fed22f0d14a6 --- /dev/null +++ b/pkg/scanners/azure/functions/substring.go @@ -0,0 +1,36 @@ +package functions + +func SubString(args ...interface{}) interface{} { + if len(args) < 2 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + start, ok := args[1].(int) + if !ok { + return "" + } + + if len(args) == 2 { + args = append(args, len(input)) + } + + length, ok := args[2].(int) + if !ok { + return "" + } + + if start > len(input) { + return "" + } + + if start+length > len(input) { + return input[start:] + } + + return input[start : start+length] +} diff --git a/pkg/scanners/azure/functions/substring_test.go b/pkg/scanners/azure/functions/substring_test.go new file mode 100644 index 000000000000..56e2ea107c73 --- /dev/null +++ b/pkg/scanners/azure/functions/substring_test.go @@ -0,0 +1,49 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_SubString(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "substring a string", + args: []interface{}{ + "hello", + 1, + 3, + }, + expected: "ell", + }, + { + name: "substring a string with no upper bound", + args: []interface{}{ + "hello", + 1, + }, + expected: "ello", + }, + { + name: "substring a string with start higher than the length", + args: []interface{}{ + "hello", + 10, + }, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := SubString(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/take.go b/pkg/scanners/azure/functions/take.go new file mode 100644 index 000000000000..738c9d7d8064 --- /dev/null +++ b/pkg/scanners/azure/functions/take.go @@ -0,0 +1,49 @@ +package functions + +func Take(args ...interface{}) interface{} { + if len(args) != 2 { + return "" + } + + count, ok := args[1].(int) + if !ok { + return "" + } + switch input := args[0].(type) { + case string: + if count > len(input) { + return input + } + return input[:count] + case interface{}: + switch iType := input.(type) { + case []int: + if count > len(iType) { + return iType + } + return iType[:count] + case []string: + if count > len(iType) { + return iType + } + return iType[:count] + case []bool: + if count > len(iType) { + return iType + } + return iType[:count] + case []float64: + if count > len(iType) { + return iType + } + return iType[:count] + case []interface{}: + if count > len(iType) { + return iType + } + return iType[:count] + } + } + + return "" +} diff --git a/pkg/scanners/azure/functions/take_test.go b/pkg/scanners/azure/functions/take_test.go new file mode 100644 index 000000000000..68c19070a6e9 --- /dev/null +++ b/pkg/scanners/azure/functions/take_test.go @@ -0,0 +1,63 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Take(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "take a string", + args: []interface{}{ + "hello", + 2, + }, + expected: "he", + }, + { + name: "take a string with invalid count", + args: []interface{}{ + "hello", + 10, + }, + expected: "hello", + }, + { + name: "take a string from slice", + args: []interface{}{ + []string{"a", "b", "c"}, + 2, + }, + expected: []string{"a", "b"}, + }, + { + name: "take a string from a slice", + args: []interface{}{ + []string{"a", "b", "c"}, + 2, + }, + expected: []string{"a", "b"}, + }, + { + name: "take a string from a slice with invalid count", + args: []interface{}{ + []string{"a", "b", "c"}, + 10, + }, + expected: []string{"a", "b", "c"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Take(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/trim.go b/pkg/scanners/azure/functions/trim.go new file mode 100644 index 000000000000..5215bbe7f43d --- /dev/null +++ b/pkg/scanners/azure/functions/trim.go @@ -0,0 +1,16 @@ +package functions + +import "strings" + +func Trim(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + return strings.TrimSpace(input) +} diff --git a/pkg/scanners/azure/functions/trim_test.go b/pkg/scanners/azure/functions/trim_test.go new file mode 100644 index 000000000000..44a787b0f268 --- /dev/null +++ b/pkg/scanners/azure/functions/trim_test.go @@ -0,0 +1,71 @@ +package functions + +import "testing" + +func Test_Trim(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "trim a string", + args: []interface{}{ + " hello ", + }, + expected: "hello", + }, + { + name: "trim a string with multiple spaces", + args: []interface{}{ + " hello ", + }, + expected: "hello", + }, + { + name: "trim a string with tabs", + args: []interface{}{ + " hello ", + }, + expected: "hello", + }, + { + name: "trim a string with new lines", + args: []interface{}{ + ` + +hello + +`, + }, + expected: "hello", + }, + { + name: "trim a string with tabs, spaces and new lines", + args: []interface{}{ + ` + +hello + +`, + }, + expected: "hello", + }, + { + name: "trim a string with non string input", + args: []interface{}{ + 10, + }, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Trim(tt.args...) + if actual != tt.expected { + t.Errorf("Trim(%v) = %v, expected %v", tt.args, actual, tt.expected) + } + }) + } +} diff --git a/pkg/scanners/azure/functions/true.go b/pkg/scanners/azure/functions/true.go new file mode 100644 index 000000000000..9f13af580757 --- /dev/null +++ b/pkg/scanners/azure/functions/true.go @@ -0,0 +1,5 @@ +package functions + +func True(args ...interface{}) interface{} { + return true +} diff --git a/pkg/scanners/azure/functions/union.go b/pkg/scanners/azure/functions/union.go new file mode 100644 index 000000000000..07bb98f28eeb --- /dev/null +++ b/pkg/scanners/azure/functions/union.go @@ -0,0 +1,60 @@ +package functions + +import "sort" + +func Union(args ...interface{}) interface{} { + if len(args) == 0 { + return []interface{}{} + } + if len(args) == 1 { + return args[0] + } + + switch args[0].(type) { + case map[string]interface{}: + return unionMap(args...) + case interface{}: + return unionArray(args...) + } + + return []interface{}{} + +} + +func unionMap(args ...interface{}) interface{} { + result := make(map[string]interface{}) + + for _, arg := range args { + switch iType := arg.(type) { + case map[string]interface{}: + for k, v := range iType { + result[k] = v + } + } + } + + return result +} + +func unionArray(args ...interface{}) interface{} { + result := []interface{}{} + union := make(map[interface{}]bool) + + for _, arg := range args { + switch iType := arg.(type) { + case []interface{}: + for _, item := range iType { + union[item] = true + } + } + } + + for k := range union { + result = append(result, k) + } + sort.Slice(result, func(i, j int) bool { + return result[i].(string) < result[j].(string) + }) + + return result +} diff --git a/pkg/scanners/azure/functions/union_test.go b/pkg/scanners/azure/functions/union_test.go new file mode 100644 index 000000000000..56d5bf809088 --- /dev/null +++ b/pkg/scanners/azure/functions/union_test.go @@ -0,0 +1,110 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Union(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "union single array", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + }, + expected: []interface{}{"a", "b", "c"}, + }, + { + name: "union two arrays", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{"b", "c", "d"}, + }, + expected: []interface{}{"a", "b", "c", "d"}, + }, + { + name: "union two arrays", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{"b", "c", "d"}, + []interface{}{"b", "c", "d", "e"}, + }, + expected: []interface{}{"a", "b", "c", "d", "e"}, + }, + { + name: "union single maps", + args: []interface{}{ + map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + }, + expected: map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + }, + { + name: "union two maps", + args: []interface{}{ + map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + map[string]interface{}{ + "b": "b", + "c": "c", + "d": "d", + }, + }, + expected: map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + "d": "d", + }, + }, + { + name: "union three maps", + args: []interface{}{ + map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + map[string]interface{}{ + "b": "b", + "c": "c", + "d": "d", + }, + map[string]interface{}{ + "b": "b", + "c": "c", + "e": "e", + }, + }, + expected: map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + "d": "d", + "e": "e", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Union(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/unique_string.go b/pkg/scanners/azure/functions/unique_string.go new file mode 100644 index 000000000000..fba35c6459ac --- /dev/null +++ b/pkg/scanners/azure/functions/unique_string.go @@ -0,0 +1,21 @@ +package functions + +import ( + "crypto/sha256" + "fmt" + "strings" +) + +func UniqueString(args ...interface{}) interface{} { + if len(args) == 0 { + return "" + } + + hashParts := make([]string, len(args)) + for i, str := range args { + hashParts[i] = str.(string) + } + + hash := sha256.New().Sum([]byte(strings.Join(hashParts, ""))) + return fmt.Sprintf("%x", hash)[:13] +} diff --git a/pkg/scanners/azure/functions/unique_string_test.go b/pkg/scanners/azure/functions/unique_string_test.go new file mode 100644 index 000000000000..035591eb46aa --- /dev/null +++ b/pkg/scanners/azure/functions/unique_string_test.go @@ -0,0 +1,38 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_UniqueString(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "unique string from a string", + args: []interface{}{ + "hello", + }, + expected: "68656c6c6fe3b", + }, + { + name: "unique string from a string", + args: []interface{}{ + "hello", + "world", + }, + expected: "68656c6c6f776", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := UniqueString(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/functions/uri.go b/pkg/scanners/azure/functions/uri.go new file mode 100644 index 000000000000..949e12235dea --- /dev/null +++ b/pkg/scanners/azure/functions/uri.go @@ -0,0 +1,29 @@ +package functions + +import ( + "net/url" + "path" +) + +func Uri(args ...interface{}) interface{} { + if len(args) != 2 { + return "" + } + + result, err := joinPath(args[0].(string), args[1].(string)) + if err != nil { + return "" + } + return result +} + +// Backport url.JoinPath until we're ready for Go 1.19 +func joinPath(base string, elem ...string) (string, error) { + u, err := url.Parse(base) + if err != nil { + return "", err + } + elem = append([]string{u.EscapedPath()}, elem...) + u.Path = path.Join(elem...) + return u.String(), nil +} diff --git a/pkg/scanners/azure/functions/uri_test.go b/pkg/scanners/azure/functions/uri_test.go new file mode 100644 index 000000000000..1a63fe6bbd01 --- /dev/null +++ b/pkg/scanners/azure/functions/uri_test.go @@ -0,0 +1,48 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Uri(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "uri from a base and relative with no trailing slash", + args: []interface{}{ + "http://contoso.org/firstpath", + "myscript.sh", + }, + expected: "http://contoso.org/firstpath/myscript.sh", + }, + { + name: "uri from a base and relative with trailing slash", + args: []interface{}{ + "http://contoso.org/firstpath/", + "myscript.sh", + }, + expected: "http://contoso.org/firstpath/myscript.sh", + }, + { + name: "uri from a base with trailing slash and relative with ../", + args: []interface{}{ + "http://contoso.org/firstpath/", + "../myscript.sh", + }, + expected: "http://contoso.org/myscript.sh", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Uri(tt.args...) + require.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/scanners/azure/functions/utc_now.go b/pkg/scanners/azure/functions/utc_now.go new file mode 100644 index 000000000000..68c93bd58fee --- /dev/null +++ b/pkg/scanners/azure/functions/utc_now.go @@ -0,0 +1,47 @@ +package functions + +import ( + "strings" + "time" +) + +func UTCNow(args ...interface{}) interface{} { + if len(args) > 1 { + return nil + } + + if len(args) == 1 { + format, ok := args[0].(string) + if ok { + goFormat := convertFormat(format) + return time.Now().UTC().Format(goFormat) + } + } + + return time.Now().UTC().Format(time.RFC3339) +} + +// don't look directly at this code +func convertFormat(format string) string { + goFormat := format + goFormat = strings.ReplaceAll(goFormat, "yyyy", "2006") + goFormat = strings.ReplaceAll(goFormat, "yy", "06") + goFormat = strings.ReplaceAll(goFormat, "MMMM", "January") + goFormat = strings.ReplaceAll(goFormat, "MMM", "Jan") + goFormat = strings.ReplaceAll(goFormat, "MM", "01") + goFormat = strings.ReplaceAll(goFormat, "M", "1") + goFormat = strings.ReplaceAll(goFormat, "dd", "02") + goFormat = strings.ReplaceAll(goFormat, "d", "2") + goFormat = strings.ReplaceAll(goFormat, "HH", "15") + goFormat = strings.ReplaceAll(goFormat, "H", "3") + goFormat = strings.ReplaceAll(goFormat, "hh", "03") + goFormat = strings.ReplaceAll(goFormat, "h", "3") + goFormat = strings.ReplaceAll(goFormat, "mm", "04") + goFormat = strings.ReplaceAll(goFormat, "m", "4") + goFormat = strings.ReplaceAll(goFormat, "ss", "05") + goFormat = strings.ReplaceAll(goFormat, "s", "5") + goFormat = strings.ReplaceAll(goFormat, "tt", "PM") + goFormat = strings.ReplaceAll(goFormat, "t", "PM") + return goFormat + +} diff --git a/pkg/scanners/azure/functions/utc_now_test.go b/pkg/scanners/azure/functions/utc_now_test.go new file mode 100644 index 000000000000..c203c3e70a0a --- /dev/null +++ b/pkg/scanners/azure/functions/utc_now_test.go @@ -0,0 +1,40 @@ +package functions + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func Test_UTCNow(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "utc now day", + args: []interface{}{ + "d", + }, + expected: fmt.Sprintf("%d", time.Now().UTC().Day()), + }, + { + name: "utc now date", + args: []interface{}{ + "yyyy-M-d", + }, + expected: fmt.Sprintf("%d-%d-%d", time.Now().UTC().Year(), time.Now().UTC().Month(), time.Now().UTC().Day()), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := UTCNow(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/scanners/azure/resolver/resolver.go b/pkg/scanners/azure/resolver/resolver.go new file mode 100644 index 000000000000..8a790815fd9c --- /dev/null +++ b/pkg/scanners/azure/resolver/resolver.go @@ -0,0 +1,51 @@ +package resolver + +import ( + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/scanners/azure/expressions" +) + +type Resolver interface { + ResolveExpression(expression azure.Value) azure.Value + SetDeployment(d *azure.Deployment) +} + +func NewResolver() Resolver { + return &resolver{} +} + +type resolver struct { + deployment *azure.Deployment +} + +func (r *resolver) SetDeployment(d *azure.Deployment) { + r.deployment = d +} + +func (r *resolver) ResolveExpression(expression azure.Value) azure.Value { + if expression.Kind != azure.KindExpression { + return expression + } + if r.deployment == nil { + panic("cannot resolve expression on nil deployment") + } + code := expression.AsString() + + resolved, err := r.resolveExpressionString(code, expression.GetMetadata()) + if err != nil { + expression.Kind = azure.KindUnresolvable + return expression + } + return resolved +} + +func (r *resolver) resolveExpressionString(code string, metadata defsecTypes.Metadata) (azure.Value, error) { + et, err := expressions.NewExpressionTree(code) + if err != nil { + return azure.NullValue, err + } + + evaluatedValue := et.Evaluate(r.deployment) + return azure.NewValue(evaluatedValue, metadata), nil +} diff --git a/pkg/scanners/azure/resolver/resolver_test.go b/pkg/scanners/azure/resolver/resolver_test.go new file mode 100644 index 000000000000..2a5cec4ea9e2 --- /dev/null +++ b/pkg/scanners/azure/resolver/resolver_test.go @@ -0,0 +1,101 @@ +package resolver + +import ( + "testing" + "time" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/stretchr/testify/require" +) + +func Test_resolveFunc(t *testing.T) { + + tests := []struct { + name string + expr string + expected string + }{ + { + name: "simple format call", + expr: "format('{0}/{1}', 'myPostgreSQLServer', 'log_checkpoints')", + expected: "myPostgreSQLServer/log_checkpoints", + }, + { + name: "simple format call with numbers", + expr: "format('{0} + {1} = {2}', 1, 2, 3)", + expected: "1 + 2 = 3", + }, + { + name: "format with nested format", + expr: "format('{0} + {1} = {2}', format('{0}', 1), 2, 3)", + expected: "1 + 2 = 3", + }, + { + name: "format with multiple nested format", + expr: "format('{0} + {1} = {2}', format('{0}', 1), 2, format('{0}', 3))", + expected: "1 + 2 = 3", + }, + { + name: "format with nested base64", + expr: "format('the base64 of \"hello, world\" is {0}', base64('hello, world'))", + expected: "the base64 of \"hello, world\" is aGVsbG8sIHdvcmxk", + }, + { + name: "dateTimeAdd with add a day", + expr: "dateTimeAdd(utcNow('yyyy-MM-dd'), 'P1D', 'yyyy-MM-dd')", + expected: time.Now().UTC().AddDate(0, 0, 1).Format("2006-01-02"), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resolver := resolver{} + + resolvedValue, err := resolver.resolveExpressionString(tt.expr, types.NewTestMetadata()) + require.NoError(t, err) + require.Equal(t, azure.KindString, resolvedValue.Kind) + + require.Equal(t, tt.expected, resolvedValue.AsString()) + }) + } +} + +func Test_resolveParameter(t *testing.T) { + tests := []struct { + name string + deployment *azure.Deployment + expr string + expected string + }{ + { + name: "format call with parameter", + deployment: &azure.Deployment{ + Parameters: []azure.Parameter{ + { + Variable: azure.Variable{ + Name: "dbName", + Value: azure.NewValue("myPostgreSQLServer", types.NewTestMetadata()), + }, + }, + }, + }, + expr: "format('{0}/{1}', parameters('dbName'), 'log_checkpoints')", + expected: "myPostgreSQLServer/log_checkpoints", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resolver := resolver{ + deployment: tt.deployment, + } + + resolvedValue, err := resolver.resolveExpressionString(tt.expr, types.NewTestMetadata()) + require.NoError(t, err) + require.Equal(t, azure.KindString, resolvedValue.Kind) + + require.Equal(t, tt.expected, resolvedValue.AsString()) + }) + } + +} diff --git a/pkg/scanners/azure/value.go b/pkg/scanners/azure/value.go new file mode 100644 index 000000000000..c9dc3316ca10 --- /dev/null +++ b/pkg/scanners/azure/value.go @@ -0,0 +1,358 @@ +package azure + +import ( + "strings" + "time" + + "golang.org/x/exp/slices" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser/armjson" +) + +type EvalContext struct{} + +type Kind string + +const ( + KindUnresolvable Kind = "unresolvable" + KindNull Kind = "null" + KindBoolean Kind = "boolean" + KindString Kind = "string" + KindNumber Kind = "number" + KindObject Kind = "object" + KindArray Kind = "array" + KindExpression Kind = "expression" +) + +type Value struct { + types.Metadata + rLit interface{} + rMap map[string]Value + rArr []Value + Kind Kind + Comments []string +} + +var NullValue = Value{ + Kind: KindNull, +} + +func NewValue(value interface{}, metadata types.Metadata) Value { + + v := Value{ + Metadata: metadata, + } + + switch ty := value.(type) { + case []interface{}: + v.Kind = KindArray + for _, child := range ty { + if internal, ok := child.(Value); ok { + v.rArr = append(v.rArr, internal) + } else { + v.rArr = append(v.rArr, NewValue(child, metadata)) + } + } + case []Value: + v.Kind = KindArray + v.rArr = append(v.rArr, ty...) + + case map[string]interface{}: + v.Kind = KindObject + v.rMap = make(map[string]Value) + for key, val := range ty { + if internal, ok := val.(Value); ok { + v.rMap[key] = internal + } else { + v.rMap[key] = NewValue(val, metadata) + } + } + case map[string]Value: + v.Kind = KindObject + v.rMap = make(map[string]Value) + for key, val := range ty { + v.rMap[key] = val + } + case string: + v.Kind = KindString + v.rLit = ty + case int, int64, int32, float32, float64, int8, int16, uint8, uint16, uint32, uint64: + v.Kind = KindNumber + v.rLit = ty + case bool: + v.Kind = KindBoolean + v.rLit = ty + case nil: + v.Kind = KindNull + v.rLit = ty + default: + v.Kind = KindUnresolvable + v.rLit = ty + } + + return v +} + +func (v *Value) GetMetadata() types.Metadata { + return v.Metadata +} + +func (v *Value) UnmarshalJSONWithMetadata(node armjson.Node) error { + + v.updateValueKind(node) + + v.Metadata = node.Metadata() + + switch node.Kind() { + case armjson.KindArray: + err := v.unmarshallArray(node) + if err != nil { + return err + } + case armjson.KindObject: + err := v.unmarshalObject(node) + if err != nil { + return err + } + case armjson.KindString: + err := v.unmarshalString(node) + if err != nil { + return err + } + default: + if err := node.Decode(&v.rLit); err != nil { + return err + } + } + + for _, comment := range node.Comments() { + var str string + if err := comment.Decode(&str); err != nil { + return err + } + // remove `\r` from comment when running windows + str = strings.ReplaceAll(str, "\r", "") + + v.Comments = append(v.Comments, str) + } + return nil +} + +func (v *Value) unmarshalString(node armjson.Node) error { + var str string + if err := node.Decode(&str); err != nil { + return err + } + if strings.HasPrefix(str, "[") && !strings.HasPrefix(str, "[[") && strings.HasSuffix(str, "]") { + // function! + v.Kind = KindExpression + v.rLit = str[1 : len(str)-1] + } else { + v.rLit = str + } + return nil +} + +func (v *Value) unmarshalObject(node armjson.Node) error { + obj := make(map[string]Value) + for i := 0; i < len(node.Content()); i += 2 { + var key string + if err := node.Content()[i].Decode(&key); err != nil { + return err + } + var val Value + if err := val.UnmarshalJSONWithMetadata(node.Content()[i+1]); err != nil { + return err + } + obj[key] = val + } + v.rMap = obj + return nil +} + +func (v *Value) unmarshallArray(node armjson.Node) error { + var arr []Value + for _, child := range node.Content() { + var val Value + if err := val.UnmarshalJSONWithMetadata(child); err != nil { + return err + } + arr = append(arr, val) + } + v.rArr = arr + return nil +} + +func (v *Value) updateValueKind(node armjson.Node) { + switch node.Kind() { + case armjson.KindString: + v.Kind = KindString + case armjson.KindNumber: + v.Kind = KindNumber + case armjson.KindBoolean: + v.Kind = KindBoolean + case armjson.KindObject: + v.Kind = KindObject + case armjson.KindNull: + v.Kind = KindNull + case armjson.KindArray: + v.Kind = KindArray + default: + panic(node.Kind()) + } +} + +func (v Value) AsString() string { + v.Resolve() + + if v.Kind != KindString { + return "" + } + + return v.rLit.(string) +} + +func (v Value) AsBool() bool { + v.Resolve() + if v.Kind != KindBoolean { + return false + } + return v.rLit.(bool) +} + +func (v Value) AsInt() int { + v.Resolve() + if v.Kind != KindNumber { + return 0 + } + return int(v.rLit.(int64)) +} + +func (v Value) AsFloat() float64 { + v.Resolve() + if v.Kind != KindNumber { + return 0 + } + return v.rLit.(float64) +} + +func (v Value) AsIntValue(defaultValue int, metadata types.Metadata) types.IntValue { + v.Resolve() + if v.Kind != KindNumber { + return types.Int(defaultValue, metadata) + } + return types.Int(v.AsInt(), metadata) +} + +func (v Value) AsBoolValue(defaultValue bool, metadata types.Metadata) types.BoolValue { + v.Resolve() + if v.Kind == KindString { + possibleValue := strings.ToLower(v.rLit.(string)) + if slices.Contains([]string{"true", "1", "yes", "on", "enabled"}, possibleValue) { + return types.Bool(true, metadata) + } + } + + if v.Kind != KindBoolean { + return types.Bool(defaultValue, metadata) + } + + return types.Bool(v.rLit.(bool), v.GetMetadata()) +} + +func (v Value) EqualTo(value interface{}) bool { + switch ty := value.(type) { + case string: + return v.AsString() == ty + default: + panic("not supported") + } +} + +func (v Value) AsStringValue(defaultValue string, metadata types.Metadata) types.StringValue { + v.Resolve() + if v.Kind != KindString { + return types.StringDefault(defaultValue, metadata) + } + return types.String(v.rLit.(string), v.Metadata) +} + +func (v Value) GetMapValue(key string) Value { + v.Resolve() + if v.Kind != KindObject { + return NullValue + } + return v.rMap[key] +} + +func (v Value) AsMap() map[string]Value { + v.Resolve() + if v.Kind != KindObject { + return nil + } + return v.rMap +} + +func (v Value) AsList() []Value { + v.Resolve() + if v.Kind != KindArray { + return nil + } + return v.rArr +} + +func (v Value) Raw() interface{} { + switch v.Kind { + case KindArray: + // TODO: recursively build raw array + return nil + case KindObject: + // TODO: recursively build raw object + return nil + default: + return v.rLit + } +} + +func (v *Value) Resolve() { + if v.Kind != KindExpression { + return + } + // if resolver, ok := v.Metadata.Internal().(Resolver); ok { + // *v = resolver.ResolveExpression(*v) + // } +} + +func (v Value) HasKey(key string) bool { + v.Resolve() + _, ok := v.rMap[key] + return ok +} + +func (v Value) AsTimeValue(metadata types.Metadata) types.TimeValue { + v.Resolve() + if v.Kind != KindString { + return types.Time(time.Time{}, metadata) + } + if v.Kind == KindNumber { + return types.Time(time.Unix(int64(v.AsFloat()), 0), metadata) + } + t, err := time.Parse(time.RFC3339, v.rLit.(string)) + if err != nil { + return types.Time(time.Time{}, metadata) + } + return types.Time(t, metadata) +} + +func (v Value) AsStringValuesList(defaultValue string) (stringValues []types.StringValue) { + v.Resolve() + if v.Kind != KindArray { + return + } + for _, item := range v.rArr { + stringValues = append(stringValues, item.AsStringValue(defaultValue, item.Metadata)) + } + + return stringValues +} diff --git a/pkg/scanners/azure/value_test.go b/pkg/scanners/azure/value_test.go new file mode 100644 index 000000000000..7b463722794e --- /dev/null +++ b/pkg/scanners/azure/value_test.go @@ -0,0 +1,13 @@ +package azure + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/stretchr/testify/assert" +) + +func Test_ValueAsInt(t *testing.T) { + val := NewValue(int64(10), types.NewTestMetadata()) + assert.Equal(t, 10, val.AsInt()) +} diff --git a/pkg/scanners/cloudformation/cftypes/types.go b/pkg/scanners/cloudformation/cftypes/types.go new file mode 100644 index 000000000000..44d9c1fd2a93 --- /dev/null +++ b/pkg/scanners/cloudformation/cftypes/types.go @@ -0,0 +1,12 @@ +package cftypes + +type CfType string + +const ( + String CfType = "string" + Int CfType = "int" + Float64 CfType = "float64" + Bool CfType = "bool" + Map CfType = "map" + List CfType = "list" +) diff --git a/pkg/scanners/cloudformation/parser/errors.go b/pkg/scanners/cloudformation/parser/errors.go new file mode 100644 index 000000000000..655f137cd271 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/errors.go @@ -0,0 +1,24 @@ +package parser + +import ( + "fmt" +) + +type InvalidContentError struct { + source string + err error +} + +func NewErrInvalidContent(source string, err error) *InvalidContentError { + return &InvalidContentError{ + source: source, + err: err, + } +} +func (e *InvalidContentError) Error() string { + return fmt.Sprintf("Invalid content in file: %s. Error: %v", e.source, e.err) +} + +func (e *InvalidContentError) Reason() error { + return e.err +} diff --git a/pkg/scanners/cloudformation/parser/file_context.go b/pkg/scanners/cloudformation/parser/file_context.go new file mode 100644 index 000000000000..35f4483018f8 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/file_context.go @@ -0,0 +1,61 @@ +package parser + +import ( + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +type SourceFormat string + +const ( + YamlSourceFormat SourceFormat = "yaml" + JsonSourceFormat SourceFormat = "json" +) + +type FileContexts []*FileContext + +type FileContext struct { + filepath string + lines []string + SourceFormat SourceFormat + Parameters map[string]*Parameter `json:"Parameters" yaml:"Parameters"` + Resources map[string]*Resource `json:"Resources" yaml:"Resources"` + Globals map[string]*Resource `json:"Globals" yaml:"Globals"` + Mappings map[string]interface{} `json:"Mappings,omitempty" yaml:"Mappings"` + Conditions map[string]Property `json:"Conditions,omitempty" yaml:"Conditions"` +} + +func (t *FileContext) GetResourceByLogicalID(name string) *Resource { + for n, r := range t.Resources { + if name == n { + return r + } + } + return nil +} + +func (t *FileContext) GetResourcesByType(names ...string) []*Resource { + var resources []*Resource + for _, r := range t.Resources { + for _, name := range names { + if name == r.Type() { + // + resources = append(resources, r) + } + } + } + return resources +} + +func (t *FileContext) Metadata() defsecTypes.Metadata { + rng := defsecTypes.NewRange(t.filepath, 1, len(t.lines), "", nil) + + return defsecTypes.NewMetadata(rng, NewCFReference("Template", rng).String()) +} + +func (t *FileContext) OverrideParameters(params map[string]any) { + for key := range t.Parameters { + if val, ok := params[key]; ok { + t.Parameters[key].UpdateDefault(val) + } + } +} diff --git a/pkg/scanners/cloudformation/parser/file_context_test.go b/pkg/scanners/cloudformation/parser/file_context_test.go new file mode 100644 index 000000000000..bbf5db4ddc39 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/file_context_test.go @@ -0,0 +1,61 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFileContext_OverrideParameters(t *testing.T) { + tests := []struct { + name string + ctx FileContext + arg map[string]any + expected map[string]*Parameter + }{ + { + name: "happy", + ctx: FileContext{ + Parameters: map[string]*Parameter{ + "BucketName": { + inner: parameterInner{ + Type: "String", + Default: "test", + }, + }, + "QueueName": { + inner: parameterInner{ + Type: "String", + }, + }, + }, + }, + arg: map[string]any{ + "BucketName": "test2", + "QueueName": "test", + "SomeKey": "some_value", + }, + expected: map[string]*Parameter{ + "BucketName": { + inner: parameterInner{ + Type: "String", + Default: "test2", + }, + }, + "QueueName": { + inner: parameterInner{ + Type: "String", + Default: "test", + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tt.ctx.OverrideParameters(tt.arg) + assert.Equal(t, tt.expected, tt.ctx.Parameters) + }) + } +} diff --git a/pkg/scanners/cloudformation/parser/fn_and.go b/pkg/scanners/cloudformation/parser/fn_and.go new file mode 100644 index 000000000000..82a9f7bdcb19 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_and.go @@ -0,0 +1,38 @@ +package parser + +import "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + +func ResolveAnd(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::And"].AsList() + + if len(refValue) < 2 { + return abortIntrinsic(property, "Fn::And should have at least 2 values, returning original Property") + } + + results := make([]bool, len(refValue)) + for i := 0; i < len(refValue); i++ { + + r := false + if refValue[i].IsBool() { + r = refValue[i].AsBool() + } + + results[i] = r + } + + theSame := allSameStrings(results) + return property.deriveResolved(cftypes.Bool, theSame), true +} + +func allSameStrings(a []bool) bool { + for i := 1; i < len(a); i++ { + if a[i] != a[0] { + return false + } + } + return true +} diff --git a/pkg/scanners/cloudformation/parser/fn_and_test.go b/pkg/scanners/cloudformation/parser/fn_and_test.go new file mode 100644 index 000000000000..6112d46fe012 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_and_test.go @@ -0,0 +1,186 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_and_value(t *testing.T) { + + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + + property2 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + andProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::And": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + property2, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(andProperty) + require.True(t, success) + + assert.True(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_and_value_not_the_same(t *testing.T) { + + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + property2 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + andProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::And": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + property2, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(andProperty) + require.True(t, success) + + assert.False(t, resolvedProperty.IsTrue()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_base64.go b/pkg/scanners/cloudformation/parser/fn_base64.go new file mode 100644 index 000000000000..e1b8bcbeca6e --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_base64.go @@ -0,0 +1,19 @@ +package parser + +import ( + "encoding/base64" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func ResolveBase64(property *Property) (*Property, bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Base64"].AsString() + + retVal := base64.StdEncoding.EncodeToString([]byte(refValue)) + + return property.deriveResolved(cftypes.String, retVal), true +} diff --git a/pkg/scanners/cloudformation/parser/fn_base64_test.go b/pkg/scanners/cloudformation/parser/fn_base64_test.go new file mode 100644 index 000000000000..070648bf8f1f --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_base64_test.go @@ -0,0 +1,35 @@ +package parser + +import ( + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "testing" +) + +func Test_resolve_base64_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Base64": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "HelloWorld", + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "SGVsbG9Xb3JsZA==", resolvedProperty.AsString()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_builtin.go b/pkg/scanners/cloudformation/parser/fn_builtin.go new file mode 100644 index 000000000000..f20011618889 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_builtin.go @@ -0,0 +1,65 @@ +package parser + +import ( + "fmt" + "net" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + + "github.com/apparentlymart/go-cidr/cidr" +) + +func GetAzs(property *Property) (*Property, bool) { + return property.deriveResolved(cftypes.List, []*Property{ + property.deriveResolved(cftypes.String, "us-east-1a"), + property.deriveResolved(cftypes.String, "us-east-1a"), + property.deriveResolved(cftypes.String, "us-east-1a"), + }), true +} + +func GetCidr(property *Property) (*Property, bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Cidr"] + if refValue.IsNotList() || len(refValue.AsList()) != 3 { + return abortIntrinsic(property, "Fn::Cidr expects a list of 3 attributes") + } + + listParts := refValue.AsList() + ipaddressProp := listParts[0] + ipAddress := "10.0.0.0/2" + if ipaddressProp.IsString() { + ipAddress = ipaddressProp.AsString() + } + count := listParts[1].AsInt() + bit := listParts[2].AsInt() + + ranges, err := calculateCidrs(ipAddress, count, bit, property) + if err != nil { + return abortIntrinsic(property, "Could not calculate the required ranges") + } + return property.deriveResolved(cftypes.List, ranges), true +} + +func calculateCidrs(ipaddress string, count int, bit int, original *Property) ([]*Property, error) { + + var cidrProperties []*Property + + _, network, err := net.ParseCIDR(ipaddress) + if err != nil { + return nil, err + } + + for i := 0; i < count; i++ { + next, err := cidr.Subnet(network, bit, i) + if err != nil { + return nil, fmt.Errorf("failed to create cidr blocks") + } + + cidrProperties = append(cidrProperties, original.deriveResolved(cftypes.String, next.String())) + } + + return cidrProperties, nil +} diff --git a/pkg/scanners/cloudformation/parser/fn_builtin_test.go b/pkg/scanners/cloudformation/parser/fn_builtin_test.go new file mode 100644 index 000000000000..9a14029344a8 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_builtin_test.go @@ -0,0 +1,63 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_cidr_generator(t *testing.T) { + + original := &Property{ + ctx: nil, + name: "cidr", + comment: "", + Inner: PropertyInner{ + Type: "", + Value: nil, + }, + } + + ranges, err := calculateCidrs("10.1.0.0/16", 4, 4, original) + require.Nil(t, err) + require.Len(t, ranges, 4) + + results := make(map[int]string) + for i, property := range ranges { + value := property.AsString() + results[i] = value + } + + assert.Equal(t, "10.1.0.0/20", results[0]) + assert.Equal(t, "10.1.16.0/20", results[1]) + assert.Equal(t, "10.1.32.0/20", results[2]) + assert.Equal(t, "10.1.48.0/20", results[3]) +} + +func Test_cidr_generator_8_bits(t *testing.T) { + original := &Property{ + ctx: nil, + name: "cidr", + comment: "", + Inner: PropertyInner{ + Type: "", + Value: nil, + }, + } + + ranges, err := calculateCidrs("10.1.0.0/16", 4, 8, original) + require.Nil(t, err) + require.Len(t, ranges, 4) + + results := make(map[int]string) + for i, property := range ranges { + value := property.AsString() + results[i] = value + } + + assert.Equal(t, "10.1.0.0/24", results[0]) + assert.Equal(t, "10.1.1.0/24", results[1]) + assert.Equal(t, "10.1.2.0/24", results[2]) + assert.Equal(t, "10.1.3.0/24", results[3]) +} diff --git a/pkg/scanners/cloudformation/parser/fn_condition.go b/pkg/scanners/cloudformation/parser/fn_condition.go new file mode 100644 index 000000000000..8d5c923936ab --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_condition.go @@ -0,0 +1,21 @@ +package parser + +func ResolveCondition(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refProp := property.AsMap()["Condition"] + if refProp.IsNotString() { + return nil, false + } + refValue := refProp.AsString() + + for k, prop := range property.ctx.Conditions { + if k == refValue { + return prop.resolveValue() + } + } + + return nil, false +} diff --git a/pkg/scanners/cloudformation/parser/fn_condition_test.go b/pkg/scanners/cloudformation/parser/fn_condition_test.go new file mode 100644 index 000000000000..bb8f78e751e5 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_condition_test.go @@ -0,0 +1,98 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_condition_value(t *testing.T) { + + fctx := new(FileContext) + fctx.Conditions = map[string]Property{ + "SomeCondition": { + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "some val", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "some val", + }, + }, + }, + }, + }, + }, + }, + }, + "EnableVersioning": { + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Condition": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "SomeCondition", + }, + }, + }, + }, + }, + } + + property := &Property{ + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::If": { + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "EnableVersioning", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Enabled", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Suspended", + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "Enabled", resolvedProperty.AsString()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_equals.go b/pkg/scanners/cloudformation/parser/fn_equals.go new file mode 100644 index 000000000000..b476342c9a8f --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_equals.go @@ -0,0 +1,21 @@ +package parser + +import ( + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func ResolveEquals(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Equals"].AsList() + + if len(refValue) != 2 { + return abortIntrinsic(property, "Fn::Equals should have exactly 2 values, returning original Property") + } + + propA, _ := refValue[0].resolveValue() + propB, _ := refValue[1].resolveValue() + return property.deriveResolved(cftypes.Bool, propA.EqualTo(propB.RawValue())), true +} diff --git a/pkg/scanners/cloudformation/parser/fn_equals_test.go b/pkg/scanners/cloudformation/parser/fn_equals_test.go new file mode 100644 index 000000000000..ade7a9a0a1ab --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_equals_test.go @@ -0,0 +1,180 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_equals_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.True(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_equals_value_to_false(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.False(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_equals_value_to_true_when_boolean(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: true, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: true, + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + assert.True(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_equals_value_when_one_is_a_reference(t *testing.T) { + + property := &Property{ + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "staging", + }, + }, + { + ctx: &FileContext{ + filepath: "", + Parameters: map[string]*Parameter{ + "Environment": { + inner: parameterInner{ + Type: "string", + Default: "staging", + }, + }, + }, + }, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Ref": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Environment", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.True(t, resolvedProperty.IsTrue()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_find_in_map.go b/pkg/scanners/cloudformation/parser/fn_find_in_map.go new file mode 100644 index 000000000000..3c9a0da29f7b --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_find_in_map.go @@ -0,0 +1,45 @@ +package parser + +import ( + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func ResolveFindInMap(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::FindInMap"].AsList() + + if len(refValue) != 3 { + return abortIntrinsic(property, "Fn::FindInMap should have exactly 3 values, returning original Property") + } + + mapName := refValue[0].AsString() + topLevelKey := refValue[1].AsString() + secondaryLevelKey := refValue[2].AsString() + + if property.ctx == nil { + return abortIntrinsic(property, "the property does not have an attached context, returning original Property") + } + + m, ok := property.ctx.Mappings[mapName] + if !ok { + return abortIntrinsic(property, "could not find map %s, returning original Property") + } + + mapContents := m.(map[string]interface{}) + + k, ok := mapContents[topLevelKey] + if !ok { + return abortIntrinsic(property, "could not find %s in the %s map, returning original Property", topLevelKey, mapName) + } + + mapValues := k.(map[string]interface{}) + + if prop, ok := mapValues[secondaryLevelKey]; !ok { + return abortIntrinsic(property, "could not find a value for %s in %s, returning original Property", secondaryLevelKey, topLevelKey) + } else { + return property.deriveResolved(cftypes.String, prop), true + } +} diff --git a/pkg/scanners/cloudformation/parser/fn_find_in_map_test.go b/pkg/scanners/cloudformation/parser/fn_find_in_map_test.go new file mode 100644 index 000000000000..bbfa372b7121 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_find_in_map_test.go @@ -0,0 +1,100 @@ +package parser + +import ( + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "testing" +) + +func Test_resolve_find_in_map_value(t *testing.T) { + + source := `--- +Parameters: + Environment: + Type: String + Default: production +Mappings: + CacheNodeTypes: + production: + NodeType: cache.t2.large + test: + NodeType: cache.t2.small + dev: + NodeType: cache.t2.micro +Resources: + ElasticacheSecurityGroup: + Type: 'AWS::EC2::SecurityGroup' + Properties: + GroupDescription: Elasticache Security Group + SecurityGroupIngress: + - IpProtocol: tcp + FromPort: 11211 + ToPort: 11211 + SourceSecurityGroupName: !Ref InstanceSecurityGroup + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + Properties: + Engine: memcached + CacheNodeType: !FindInMap [ CacheNodeTypes, production, NodeType ] + NumCacheNodes: '1' + VpcSecurityGroupIds: + - !GetAtt + - ElasticacheSecurityGroup + - GroupId +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("ElasticacheCluster") + assert.NotNil(t, testRes) + + nodeTypeProp := testRes.GetStringProperty("CacheNodeType", "") + assert.Equal(t, "cache.t2.large", nodeTypeProp.Value()) +} + +func Test_resolve_find_in_map_with_nested_intrinsic_value(t *testing.T) { + + source := `--- +Parameters: + Environment: + Type: String + Default: dev +Mappings: + CacheNodeTypes: + production: + NodeType: cache.t2.large + test: + NodeType: cache.t2.small + dev: + NodeType: cache.t2.micro +Resources: + ElasticacheSecurityGroup: + Type: 'AWS::EC2::SecurityGroup' + Properties: + GroupDescription: Elasticache Security Group + SecurityGroupIngress: + - IpProtocol: tcp + FromPort: 11211 + ToPort: 11211 + SourceSecurityGroupName: !Ref InstanceSecurityGroup + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + Properties: + Engine: memcached + CacheNodeType: !FindInMap [ CacheNodeTypes, !Ref Environment, NodeType ] + NumCacheNodes: '1' + VpcSecurityGroupIds: + - !GetAtt + - ElasticacheSecurityGroup + - GroupId +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("ElasticacheCluster") + assert.NotNil(t, testRes) + + nodeTypeProp := testRes.GetStringProperty("CacheNodeType", "") + assert.Equal(t, "cache.t2.micro", nodeTypeProp.Value()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_get_attr.go b/pkg/scanners/cloudformation/parser/fn_get_attr.go new file mode 100644 index 000000000000..53a7891e0252 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_get_attr.go @@ -0,0 +1,46 @@ +package parser + +import ( + "strings" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func ResolveGetAtt(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValueProp := property.AsMap()["Fn::GetAtt"] + + var refValue []string + + if refValueProp.IsString() { + refValue = strings.Split(refValueProp.AsString(), ".") + } + + if refValueProp.IsList() { + for _, p := range refValueProp.AsList() { + refValue = append(refValue, p.AsString()) + } + } + + if len(refValue) != 2 { + return abortIntrinsic(property, "Fn::GetAtt should have exactly 2 values, returning original Property") + } + + logicalId := refValue[0] + attribute := refValue[1] + + referencedResource := property.ctx.GetResourceByLogicalID(logicalId) + if referencedResource == nil || referencedResource.IsNil() { + return property.deriveResolved(cftypes.String, ""), true + } + + referencedProperty := referencedResource.GetProperty(attribute) + if referencedProperty.IsNil() { + return property.deriveResolved(cftypes.String, referencedResource.ID()), true + } + + return property.deriveResolved(referencedProperty.Type(), referencedProperty.RawValue()), true +} diff --git a/pkg/scanners/cloudformation/parser/fn_get_attr_test.go b/pkg/scanners/cloudformation/parser/fn_get_attr_test.go new file mode 100644 index 000000000000..ebd52da035b0 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_get_attr_test.go @@ -0,0 +1,50 @@ +package parser + +import ( + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "testing" +) + +func Test_resolve_get_attr_value(t *testing.T) { + + source := `--- +Resources: + ElasticacheSecurityGroup: + Type: 'AWS::EC2::SecurityGroup' + Properties: + GroupDescription: Elasticache Security Group + SecurityGroupIngress: + - IpProtocol: tcp + FromPort: 11211 + ToPort: 11211 + SourceSecurityGroupName: !Ref InstanceSecurityGroup + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + Properties: + Engine: memcached + CacheNodeType: cache.t2.micro + NumCacheNodes: '1' + VpcSecurityGroupIds: + - !GetAtt + - ElasticacheSecurityGroup + - GroupId +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("ElasticacheCluster") + assert.NotNil(t, testRes) + + sgProp := testRes.GetProperty("VpcSecurityGroupIds") + require.True(t, sgProp.IsNotNil()) + require.True(t, sgProp.IsList()) + + for _, property := range sgProp.AsList() { + resolved, success := ResolveIntrinsicFunc(property) + require.True(t, success) + assert.True(t, resolved.IsNotNil()) + } + +} diff --git a/pkg/scanners/cloudformation/parser/fn_if.go b/pkg/scanners/cloudformation/parser/fn_if.go new file mode 100644 index 000000000000..d444952ff38a --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_if.go @@ -0,0 +1,40 @@ +package parser + +func ResolveIf(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::If"].AsList() + + if len(refValue) != 3 { + return abortIntrinsic(property, "Fn::If should have exactly 3 values, returning original Property") + } + + condition, _ := refValue[0].resolveValue() + trueState, _ := refValue[1].resolveValue() + falseState, _ := refValue[2].resolveValue() + + conditionMet := false + + con, _ := condition.resolveValue() + if con.IsBool() { + conditionMet = con.AsBool() + } else if property.ctx.Conditions != nil && + condition.IsString() { + + condition := property.ctx.Conditions[condition.AsString()] + if condition.isFunction() { + con, _ := condition.resolveValue() + if con.IsBool() { + conditionMet = con.AsBool() + } + } + } + + if conditionMet { + return trueState, true + } else { + return falseState, true + } +} diff --git a/pkg/scanners/cloudformation/parser/fn_if_test.go b/pkg/scanners/cloudformation/parser/fn_if_test.go new file mode 100644 index 000000000000..eba1e080ed20 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_if_test.go @@ -0,0 +1,56 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_if_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::If": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: true, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "foo", resolvedProperty.String()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_join.go b/pkg/scanners/cloudformation/parser/fn_join.go new file mode 100644 index 000000000000..961248a997f2 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_join.go @@ -0,0 +1,34 @@ +package parser + +import ( + "strings" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func ResolveJoin(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Join"].AsList() + + if len(refValue) != 2 { + return abortIntrinsic(property, "Fn::Join should have exactly 2 values, returning original Property") + } + + joiner := refValue[0].AsString() + items := refValue[1].AsList() + + var itemValues []string + for _, item := range items { + resolved, success := item.resolveValue() + if success { + itemValues = append(itemValues, resolved.AsString()) + } + } + + joined := strings.Join(itemValues, joiner) + + return property.deriveResolved(cftypes.String, joined), true +} diff --git a/pkg/scanners/cloudformation/parser/fn_join_test.go b/pkg/scanners/cloudformation/parser/fn_join_test.go new file mode 100644 index 000000000000..a44ab64039b0 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_join_test.go @@ -0,0 +1,152 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_join_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Join": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "::", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "s3", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "part1", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "part2", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "s3::part1::part2", resolvedProperty.AsString()) +} + +func Test_resolve_join_value_with_reference(t *testing.T) { + + property := &Property{ + ctx: &FileContext{ + filepath: "", + Parameters: map[string]*Parameter{ + "Environment": { + inner: parameterInner{ + Type: "string", + Default: "staging", + }, + }, + }, + }, + name: "EnvironmentBucket", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Join": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "::", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "s3", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "part1", + }, + }, + { + ctx: &FileContext{ + filepath: "", + Parameters: map[string]*Parameter{ + "Environment": { + inner: parameterInner{ + Type: "string", + Default: "staging", + }, + }, + }, + }, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Ref": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Environment", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "s3::part1::staging", resolvedProperty.AsString()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_length.go b/pkg/scanners/cloudformation/parser/fn_length.go new file mode 100644 index 000000000000..664bc933c158 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_length.go @@ -0,0 +1,24 @@ +package parser + +import "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + +func ResolveLength(property *Property) (*Property, bool) { + if !property.isFunction() { + return property, true + } + + val := property.AsMap()["Fn::Length"] + if val.IsList() { + return property.deriveResolved(cftypes.Int, val.Len()), true + } else if val.IsMap() { + resolved, _ := val.resolveValue() + + if resolved.IsList() { + return property.deriveResolved(cftypes.Int, resolved.Len()), true + } + return resolved, false + } + + return property, false + +} diff --git a/pkg/scanners/cloudformation/parser/fn_length_test.go b/pkg/scanners/cloudformation/parser/fn_length_test.go new file mode 100644 index 000000000000..af9d842dd339 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_length_test.go @@ -0,0 +1,99 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/require" +) + +func Test_ResolveLength_WhenPropIsArray(t *testing.T) { + prop := &Property{ + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Length": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Int, + Value: 1, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "IntParameter", + }, + }, + }, + }, + }, + }, + }, + } + resolved, ok := ResolveIntrinsicFunc(prop) + require.True(t, ok) + require.True(t, resolved.IsInt()) + require.Equal(t, 2, resolved.AsInt()) +} + +func Test_ResolveLength_WhenPropIsIntrinsicFunction(t *testing.T) { + fctx := &FileContext{ + Parameters: map[string]*Parameter{ + "SomeParameter": { + inner: parameterInner{ + Type: "string", + Default: "a|b|c|d", + }, + }, + }, + } + prop := &Property{ + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Length": { + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Split": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "|", + }, + }, + { + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Ref": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "SomeParameter", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + resolved, ok := ResolveIntrinsicFunc(prop) + require.True(t, ok) + require.True(t, resolved.IsInt()) + require.Equal(t, 4, resolved.AsInt()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_not.go b/pkg/scanners/cloudformation/parser/fn_not.go new file mode 100644 index 000000000000..a61390d26cf3 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_not.go @@ -0,0 +1,23 @@ +package parser + +import "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + +func ResolveNot(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Not"].AsList() + + if len(refValue) != 1 { + return abortIntrinsic(property, "Fn::No should have at only 1 values, returning original Property") + } + + funcToInvert, _ := refValue[0].resolveValue() + + if funcToInvert.IsBool() { + return property.deriveResolved(cftypes.Bool, !funcToInvert.AsBool()), true + } + + return property, false +} diff --git a/pkg/scanners/cloudformation/parser/fn_not_test.go b/pkg/scanners/cloudformation/parser/fn_not_test.go new file mode 100644 index 000000000000..4e19b3ad069f --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_not_test.go @@ -0,0 +1,124 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_not_value(t *testing.T) { + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + notProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Not": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(notProperty) + require.True(t, success) + + assert.True(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_not_value_when_true(t *testing.T) { + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + + notProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Not": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(notProperty) + require.True(t, success) + + assert.False(t, resolvedProperty.IsTrue()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_or.go b/pkg/scanners/cloudformation/parser/fn_or.go new file mode 100644 index 000000000000..0da432b350bf --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_or.go @@ -0,0 +1,39 @@ +package parser + +import "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + +func ResolveOr(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Or"].AsList() + + if len(refValue) < 2 { + return abortIntrinsic(property, "Fn::Or should have at least 2 values, returning original Property") + } + + results := make([]bool, len(refValue)) + for i := 0; i < len(refValue); i++ { + + r := false + if refValue[i].IsBool() { + r = refValue[i].AsBool() + } + + results[i] = r + } + + atleastOne := atleastOne(results) + return property.deriveResolved(cftypes.Bool, atleastOne), true +} + +func atleastOne(a []bool) bool { + for _, b := range a { + if b { + return true + } + } + + return false +} diff --git a/pkg/scanners/cloudformation/parser/fn_or_test.go b/pkg/scanners/cloudformation/parser/fn_or_test.go new file mode 100644 index 000000000000..0c4f3dcd1954 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_or_test.go @@ -0,0 +1,184 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_or_value(t *testing.T) { + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + property2 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + orProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Or": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + property2, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(orProperty) + require.True(t, success) + + assert.True(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_or_value_when_neither_true(t *testing.T) { + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + property2 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + orProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Or": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + property2, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(orProperty) + require.True(t, success) + + assert.False(t, resolvedProperty.IsTrue()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_ref.go b/pkg/scanners/cloudformation/parser/fn_ref.go new file mode 100644 index 000000000000..d2f2ed6eeca4 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_ref.go @@ -0,0 +1,54 @@ +package parser + +import ( + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func ResolveReference(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refProp := property.AsMap()["Ref"] + if refProp.IsNotString() { + return property, false + } + refValue := refProp.AsString() + + if pseudo, ok := pseudoParameters[refValue]; ok { + return property.deriveResolved(pseudo.t, pseudo.val), true + } + + if property.ctx == nil { + return property, false + } + + var param *Parameter + for k := range property.ctx.Parameters { + if k == refValue { + param = property.ctx.Parameters[k] + resolvedType := param.Type() + + switch param.Default().(type) { + case bool: + resolvedType = cftypes.Bool + case string: + resolvedType = cftypes.String + case int: + resolvedType = cftypes.Int + } + + resolved = property.deriveResolved(resolvedType, param.Default()) + return resolved, true + } + } + + for k := range property.ctx.Resources { + if k == refValue { + res := property.ctx.Resources[k] + resolved = property.deriveResolved(cftypes.String, res.ID()) + break + } + } + return resolved, true +} diff --git a/pkg/scanners/cloudformation/parser/fn_ref_test.go b/pkg/scanners/cloudformation/parser/fn_ref_test.go new file mode 100644 index 000000000000..98149e4792ec --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_ref_test.go @@ -0,0 +1,89 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_referenced_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{ + filepath: "", + Parameters: map[string]*Parameter{ + "BucketName": { + inner: parameterInner{ + Type: "string", + Default: "someBucketName", + }, + }, + }, + }, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Ref": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "BucketName", + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "someBucketName", resolvedProperty.AsString()) +} + +func Test_property_value_correct_when_not_reference(t *testing.T) { + + property := &Property{ + ctx: &FileContext{ + filepath: "", + }, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.String, + Value: "someBucketName", + }, + } + + // should fail when trying to resolve function that is not in fact a function + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.False(t, success) + + assert.Equal(t, "someBucketName", resolvedProperty.AsString()) +} + +func Test_resolve_ref_with_pseudo_value(t *testing.T) { + source := `--- +Resources: + TestInstance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + KeyName: !Join [":", ["aws", !Ref AWS::Region, "key" ]] +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("TestInstance") + require.NotNil(t, testRes) + + keyNameProp := testRes.GetProperty("KeyName") + require.NotNil(t, keyNameProp) + + assert.Equal(t, "aws:eu-west-1:key", keyNameProp.AsString()) +} diff --git a/pkg/scanners/cloudformation/parser/fn_select.go b/pkg/scanners/cloudformation/parser/fn_select.go new file mode 100644 index 000000000000..3289004847c8 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_select.go @@ -0,0 +1,41 @@ +package parser + +import ( + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func ResolveSelect(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Select"].AsList() + + if len(refValue) != 2 { + return abortIntrinsic(property, "Fn::Select should have exactly 2 values, returning original Property") + } + + index := refValue[0] + list := refValue[1] + + if index.IsNotInt() { + if index.IsConvertableTo(cftypes.Int) { + // + index = index.ConvertTo(cftypes.Int) + } else { + return abortIntrinsic(property, "index on property [%s] should be an int, returning original Property", property.name) + } + } + + if list.IsNotList() { + return abortIntrinsic(property, "list on property [%s] should be a list, returning original Property", property.name) + } + + listItems := list.AsList() + + if len(listItems) <= index.AsInt() { + return nil, false + } + + return listItems[index.AsInt()], true +} diff --git a/pkg/scanners/cloudformation/parser/fn_select_test.go b/pkg/scanners/cloudformation/parser/fn_select_test.go new file mode 100644 index 000000000000..92b634457b2d --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_select_test.go @@ -0,0 +1,77 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_select_value(t *testing.T) { + + source := `--- +Parameters: + EngineIndex: + Type: Integer + Default: 1 +Resources: + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + Properties: + Engine: !Select [ !Ref EngineIndex, [memcached, redis ]] + CacheNodeType: cache.t2.micro + NumCacheNodes: '1' +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("ElasticacheCluster") + assert.NotNil(t, testRes) + + engineProp := testRes.GetProperty("Engine") + require.True(t, engineProp.IsNotNil()) + require.True(t, engineProp.IsString()) + + require.Equal(t, "redis", engineProp.AsString()) +} + +func Test_SelectPseudoListParam(t *testing.T) { + src := `--- +Resources: + myASGrpOne: + Type: AWS::AutoScaling::AutoScalingGroup + Version: "2009-05-15" + Properties: + AvailabilityZones: + - "us-east-1a" + LaunchConfigurationName: + Ref: MyLaunchConfiguration + MinSize: "0" + MaxSize: "0" + NotificationConfigurations: + - TopicARN: + Fn::Select: + - "1" + - Ref: AWS::NotificationARNs + NotificationTypes: + - autoscaling:EC2_INSTANCE_LAUNCH + - autoscaling:EC2_INSTANCE_LAUNCH_ERROR + +` + + ctx := createTestFileContext(t, src) + require.NotNil(t, ctx) + + resource := ctx.GetResourceByLogicalID("myASGrpOne") + require.NotNil(t, resource) + + notification := resource.GetProperty("NotificationConfigurations") + require.True(t, notification.IsNotNil()) + require.True(t, notification.IsList()) + first := notification.AsList()[0] + require.True(t, first.IsMap()) + topic, ok := first.AsMap()["TopicARN"] + require.True(t, ok) + require.Equal(t, "notification::arn::2", topic.AsString()) + +} diff --git a/pkg/scanners/cloudformation/parser/fn_split.go b/pkg/scanners/cloudformation/parser/fn_split.go new file mode 100644 index 000000000000..6facab992ea7 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_split.go @@ -0,0 +1,44 @@ +package parser + +import ( + "strings" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func ResolveSplit(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Split"].AsList() + + if len(refValue) != 2 { + return abortIntrinsic(property, "Fn::Split should have exactly 2 values, returning original Property") + } + + delimiterProp := refValue[0] + splitProp := refValue[1] + + if !splitProp.IsString() || !delimiterProp.IsString() { + abortIntrinsic(property, "Fn::Split requires two strings as input, returning original Property") + + } + + propertyList := createPropertyList(splitProp, delimiterProp, property) + + return property.deriveResolved(cftypes.List, propertyList), true +} + +func createPropertyList(splitProp *Property, delimiterProp *Property, parent *Property) []*Property { + + splitString := splitProp.AsString() + delimiter := delimiterProp.AsString() + + splits := strings.Split(splitString, delimiter) + var props []*Property + for _, split := range splits { + props = append(props, parent.deriveResolved(cftypes.String, split)) + } + return props +} diff --git a/pkg/scanners/cloudformation/parser/fn_split_test.go b/pkg/scanners/cloudformation/parser/fn_split_test.go new file mode 100644 index 000000000000..1c69cb5459e5 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_split_test.go @@ -0,0 +1,56 @@ +package parser + +import ( + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "testing" +) + +/* + Fn::Split: ["::", "s3::bucket::to::split"] + +*/ + +func Test_resolve_split_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Split": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "::", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "s3::bucket::to::split", + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + assert.True(t, resolvedProperty.IsNotNil()) + assert.True(t, resolvedProperty.IsList()) + listContents := resolvedProperty.AsList() + assert.Len(t, listContents, 4) + +} diff --git a/pkg/scanners/cloudformation/parser/fn_sub.go b/pkg/scanners/cloudformation/parser/fn_sub.go new file mode 100644 index 000000000000..81e8401bcfa7 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_sub.go @@ -0,0 +1,71 @@ +package parser + +import ( + "fmt" + "strconv" + "strings" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func ResolveSub(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Sub"] + + if refValue.IsString() { + return resolveStringSub(refValue, property), true + } + + if refValue.IsList() { + return resolveMapSub(refValue, property) + } + + return property, false +} + +func resolveMapSub(refValue *Property, original *Property) (*Property, bool) { + refValues := refValue.AsList() + if len(refValues) != 2 { + return abortIntrinsic(original, "Fn::Sub with list expects 2 values, returning original property") + } + + workingString := refValues[0].AsString() + components := refValues[1].AsMap() + + for k, v := range components { + replacement := "[failed to resolve]" + switch v.Type() { + case cftypes.Map: + resolved, _ := ResolveIntrinsicFunc(v) + replacement = resolved.AsString() + case cftypes.String: + replacement = v.AsString() + case cftypes.Int: + replacement = strconv.Itoa(v.AsInt()) + case cftypes.Bool: + replacement = fmt.Sprintf("%v", v.AsBool()) + case cftypes.List: + var parts []string + for _, p := range v.AsList() { + parts = append(parts, p.String()) + } + replacement = fmt.Sprintf("[%s]", strings.Join(parts, ", ")) + } + workingString = strings.ReplaceAll(workingString, fmt.Sprintf("${%s}", k), replacement) + } + + return original.deriveResolved(cftypes.String, workingString), true +} + +func resolveStringSub(refValue *Property, original *Property) *Property { + workingString := refValue.AsString() + + for k, param := range pseudoParameters { + workingString = strings.ReplaceAll(workingString, fmt.Sprintf("${%s}", k), fmt.Sprintf("%v", param.getRawValue())) + } + + return original.deriveResolved(cftypes.String, workingString) +} diff --git a/pkg/scanners/cloudformation/parser/fn_sub_test.go b/pkg/scanners/cloudformation/parser/fn_sub_test.go new file mode 100644 index 000000000000..5ab98a59692b --- /dev/null +++ b/pkg/scanners/cloudformation/parser/fn_sub_test.go @@ -0,0 +1,103 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_sub_value(t *testing.T) { + source := `--- +Resources: + TestInstance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + KeyName: "testkey" + UserData: + !Sub | + #!/bin/bash -xe + yum update -y aws-cfn-bootstrap + /opt/aws/bin/cfn-init -v --stack ${AWS::StackName} --resource LaunchConfig --configsets wordpress_install --region ${AWS::Region} + /opt/aws/bin/cfn-signal -e $? --stack ${AWS::StackName} --resource WebServerGroup --region ${AWS::Region} +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("TestInstance") + require.NotNil(t, testRes) + + userDataProp := testRes.GetProperty("UserData") + require.NotNil(t, userDataProp) + + assert.Equal(t, "#!/bin/bash -xe\nyum update -y aws-cfn-bootstrap\n/opt/aws/bin/cfn-init -v --stack cfsec-test-stack --resource LaunchConfig --configsets wordpress_install --region eu-west-1\n/opt/aws/bin/cfn-signal -e $? --stack cfsec-test-stack --resource WebServerGroup --region eu-west-1\n", userDataProp.AsString()) +} + +func Test_resolve_sub_value_with_base64(t *testing.T) { + + source := `--- +Resources: + TestInstance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + KeyName: "testkey" + UserData: + Fn::Base64: + !Sub | + #!/bin/bash -xe + yum update -y aws-cfn-bootstrap + /opt/aws/bin/cfn-init -v --stack ${AWS::StackName} --resource LaunchConfig --configsets wordpress_install --region ${AWS::Region} + /opt/aws/bin/cfn-signal -e $? --stack ${AWS::StackName} --resource WebServerGroup --region ${AWS::Region}` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("TestInstance") + require.NotNil(t, testRes) + + userDataProp := testRes.GetProperty("UserData") + require.NotNil(t, userDataProp) + + assert.Equal(t, "IyEvYmluL2Jhc2ggLXhlCnl1bSB1cGRhdGUgLXkgYXdzLWNmbi1ib290c3RyYXAKL29wdC9hd3MvYmluL2Nmbi1pbml0IC12IC0tc3RhY2sgY2ZzZWMtdGVzdC1zdGFjayAtLXJlc291cmNlIExhdW5jaENvbmZpZyAtLWNvbmZpZ3NldHMgd29yZHByZXNzX2luc3RhbGwgLS1yZWdpb24gZXUtd2VzdC0xCi9vcHQvYXdzL2Jpbi9jZm4tc2lnbmFsIC1lICQ/IC0tc3RhY2sgY2ZzZWMtdGVzdC1zdGFjayAtLXJlc291cmNlIFdlYlNlcnZlckdyb3VwIC0tcmVnaW9uIGV1LXdlc3QtMQ==", userDataProp.AsString()) +} + +func Test_resolve_sub_value_with_map(t *testing.T) { + + source := `--- +Parameters: + RootDomainName: + Type: String + Default: somedomain.com +Resources: + TestDistribution: + Type: AWS::CloudFront::Distribution + Properties: + DistributionConfig: + DefaultCacheBehavior: + TargetOriginId: target + ViewerProtocolPolicy: https-only + Enabled: true + Origins: + - DomainName: + !Sub + - www.${Domain} + - { Domain: !Ref RootDomainName } + Id: somedomain1 + + +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("TestDistribution") + require.NotNil(t, testRes) + + originsList := testRes.GetProperty("DistributionConfig.Origins") + + domainNameProp := originsList.AsList()[0].GetProperty("DomainName") + require.NotNil(t, domainNameProp) + + assert.Equal(t, "www.somedomain.com", domainNameProp.AsString()) + +} diff --git a/pkg/scanners/cloudformation/parser/intrinsics.go b/pkg/scanners/cloudformation/parser/intrinsics.go new file mode 100644 index 000000000000..d455fd3d5c6e --- /dev/null +++ b/pkg/scanners/cloudformation/parser/intrinsics.go @@ -0,0 +1,101 @@ +package parser + +import ( + "fmt" + "strings" + + "gopkg.in/yaml.v3" +) + +var intrinsicFuncs map[string]func(property *Property) (*Property, bool) + +func init() { + intrinsicFuncs = map[string]func(property *Property) (*Property, bool){ + "Ref": ResolveReference, + "Fn::Base64": ResolveBase64, + "Fn::Equals": ResolveEquals, + "Fn::Join": ResolveJoin, + "Fn::Split": ResolveSplit, + "Fn::Sub": ResolveSub, + "Fn::FindInMap": ResolveFindInMap, + "Fn::Select": ResolveSelect, + "Fn::GetAtt": ResolveGetAtt, + "Fn::GetAZs": GetAzs, + "Fn::Cidr": GetCidr, + "Fn::ImportValue": ImportPlaceholder, + "Fn::If": ResolveIf, + "Fn::And": ResolveAnd, + "Fn::Or": ResolveOr, + "Fn::Not": ResolveNot, + "Fn::Length": ResolveLength, + "Condition": ResolveCondition, + } +} + +func ImportPlaceholder(property *Property) (*Property, bool) { + property.unresolved = true + return property, false +} + +func PassthroughResolution(property *Property) (*Property, bool) { + return property, false +} + +func IsIntrinsicFunc(node *yaml.Node) bool { + if node == nil || node.Tag == "" { + return false + } + + nodeTag := strings.TrimPrefix(node.Tag, "!") + if nodeTag != "Ref" && nodeTag != "Condition" { + nodeTag = fmt.Sprintf("Fn::%s", nodeTag) + } + for tag := range intrinsicFuncs { + + if nodeTag == tag { + return true + } + } + return false +} + +func IsIntrinsic(key string) bool { + for tag := range intrinsicFuncs { + if tag == key { + return true + } + } + return false +} + +func ResolveIntrinsicFunc(property *Property) (*Property, bool) { + if property == nil { + return nil, false + } + if !property.IsMap() { + return property, false + } + + for funcName := range property.AsMap() { + if fn := intrinsicFuncs[funcName]; fn != nil { + // + return fn(property) + } + } + return property, false +} + +func getIntrinsicTag(tag string) string { + tag = strings.TrimPrefix(tag, "!") + switch tag { + case "Ref", "Contains": + return tag + default: + return fmt.Sprintf("Fn::%s", tag) + } +} + +func abortIntrinsic(property *Property, msg string, components ...string) (*Property, bool) { + // + return property, false +} diff --git a/pkg/scanners/cloudformation/parser/intrinsics_test.go b/pkg/scanners/cloudformation/parser/intrinsics_test.go new file mode 100644 index 000000000000..a69e04dd0fba --- /dev/null +++ b/pkg/scanners/cloudformation/parser/intrinsics_test.go @@ -0,0 +1,45 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "gopkg.in/yaml.v3" +) + +func Test_is_intrinsic_returns_expected(t *testing.T) { + + testCases := []struct { + nodeTag string + expectedResult bool + }{ + { + nodeTag: "!Ref", + expectedResult: true, + }, + { + nodeTag: "!Join", + expectedResult: true, + }, + { + nodeTag: "!Sub", + expectedResult: true, + }, + { + nodeTag: "!Equals", + expectedResult: true, + }, + { + nodeTag: "!Equal", + expectedResult: false, + }, + } + + for _, tt := range testCases { + n := &yaml.Node{ + Tag: tt.nodeTag, + } + assert.Equal(t, tt.expectedResult, IsIntrinsicFunc(n)) + } + +} diff --git a/pkg/scanners/cloudformation/parser/parameter.go b/pkg/scanners/cloudformation/parser/parameter.go new file mode 100644 index 000000000000..493dea756168 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/parameter.go @@ -0,0 +1,129 @@ +package parser + +import ( + "bytes" + "encoding/json" + "fmt" + "strconv" + "strings" + + "github.com/liamg/jfather" + "gopkg.in/yaml.v3" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +type Parameter struct { + inner parameterInner +} + +type parameterInner struct { + Type string `yaml:"Type"` + Default interface{} `yaml:"Default"` +} + +func (p *Parameter) UnmarshalYAML(node *yaml.Node) error { + return node.Decode(&p.inner) +} + +func (p *Parameter) UnmarshalJSONWithMetadata(node jfather.Node) error { + return node.Decode(&p.inner) +} + +func (p *Parameter) Type() cftypes.CfType { + switch p.inner.Type { + case "Boolean": + return cftypes.Bool + case "String": + return cftypes.String + case "Integer": + return cftypes.Int + default: + return cftypes.String + } +} + +func (p *Parameter) Default() interface{} { + return p.inner.Default +} + +func (p *Parameter) UpdateDefault(inVal interface{}) { + passedVal := inVal.(string) + + switch p.inner.Type { + case "Boolean": + p.inner.Default, _ = strconv.ParseBool(passedVal) + case "String": + p.inner.Default = passedVal + case "Integer": + p.inner.Default, _ = strconv.Atoi(passedVal) + default: + p.inner.Default = passedVal + } +} + +type Parameters map[string]any + +func (p *Parameters) Merge(other Parameters) { + for k, v := range other { + (*p)[k] = v + } +} + +func (p *Parameters) UnmarshalJSON(data []byte) error { + (*p) = make(Parameters) + + if len(data) == 0 { + return nil + } + + switch { + case data[0] == '{' && data[len(data)-1] == '}': // object + // CodePipeline like format + var params struct { + Params map[string]any `json:"Parameters"` + } + + if err := json.Unmarshal(data, ¶ms); err != nil { + return err + } + + (*p) = params.Params + case data[0] == '[' && data[len(data)-1] == ']': // array + { + // Original format + var params []string + + if err := json.Unmarshal(data, ¶ms); err == nil { + for _, param := range params { + parts := strings.Split(param, "=") + if len(parts) != 2 { + return fmt.Errorf("invalid key-value parameter: %q", param) + } + (*p)[parts[0]] = parts[1] + } + return nil + } + + // CloudFormation like format + var cfparams []struct { + ParameterKey string `json:"ParameterKey"` + ParameterValue string `json:"ParameterValue"` + } + + d := json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&cfparams); err != nil { + return err + } + + for _, param := range cfparams { + (*p)[param.ParameterKey] = param.ParameterValue + } + } + default: + return fmt.Errorf("unsupported parameters format") + } + + return nil +} diff --git a/pkg/scanners/cloudformation/parser/parameters_test.go b/pkg/scanners/cloudformation/parser/parameters_test.go new file mode 100644 index 000000000000..703f07f5fe12 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/parameters_test.go @@ -0,0 +1,89 @@ +package parser + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParameters_UnmarshalJSON(t *testing.T) { + tests := []struct { + name string + source string + expected Parameters + wantErr bool + }{ + { + name: "original format", + source: `[ + "Key1=Value1", + "Key2=Value2" + ]`, + expected: map[string]any{ + "Key1": "Value1", + "Key2": "Value2", + }, + }, + { + name: "CloudFormation like format", + source: `[ + { + "ParameterKey": "Key1", + "ParameterValue": "Value1" + }, + { + "ParameterKey": "Key2", + "ParameterValue": "Value2" + } + ]`, + expected: map[string]any{ + "Key1": "Value1", + "Key2": "Value2", + }, + }, + { + name: "CloudFormation like format, with unknown fields", + source: `[ + { + "ParameterKey": "Key1", + "ParameterValue": "Value1" + }, + { + "ParameterKey": "Key2", + "ParameterValue": "Value2", + "UsePreviousValue": true + } + ]`, + wantErr: true, + }, + { + name: "CodePipeline like format", + source: `{ + "Parameters": { + "Key1": "Value1", + "Key2": "Value2" + } + }`, + expected: map[string]any{ + "Key1": "Value1", + "Key2": "Value2", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var params Parameters + + err := json.Unmarshal([]byte(tt.source), ¶ms) + if tt.wantErr { + require.Error(t, err) + return + } + require.NoError(t, err) + assert.Equal(t, tt.expected, params) + }) + } +} diff --git a/pkg/scanners/cloudformation/parser/parser.go b/pkg/scanners/cloudformation/parser/parser.go new file mode 100644 index 000000000000..21aa0004304b --- /dev/null +++ b/pkg/scanners/cloudformation/parser/parser.go @@ -0,0 +1,236 @@ +package parser + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/liamg/jfather" + "gopkg.in/yaml.v3" + + "github.com/aquasecurity/trivy/pkg/detection" +) + +var _ options.ConfigurableParser = (*Parser)(nil) + +type Parser struct { + debug debug.Logger + skipRequired bool + parameterFiles []string + parameters map[string]any + overridedParameters Parameters + configsFS fs.FS +} + +func WithParameters(params map[string]any) options.ParserOption { + return func(cp options.ConfigurableParser) { + if p, ok := cp.(*Parser); ok { + p.parameters = params + } + } +} + +func WithParameterFiles(files ...string) options.ParserOption { + return func(cp options.ConfigurableParser) { + if p, ok := cp.(*Parser); ok { + p.parameterFiles = files + } + } +} + +func WithConfigsFS(fsys fs.FS) options.ParserOption { + return func(cp options.ConfigurableParser) { + if p, ok := cp.(*Parser); ok { + p.configsFS = fsys + } + } +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "cloudformation", "parser") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +func New(options ...options.ParserOption) *Parser { + p := &Parser{} + for _, option := range options { + option(p) + } + return p +} + +func (p *Parser) ParseFS(ctx context.Context, fsys fs.FS, dir string) (FileContexts, error) { + var contexts FileContexts + if err := fs.WalkDir(fsys, filepath.ToSlash(dir), func(path string, entry fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + + if !p.Required(fsys, path) { + p.debug.Log("not a CloudFormation file, skipping %s", path) + return nil + } + + c, err := p.ParseFile(ctx, fsys, path) + if err != nil { + p.debug.Log("Error parsing file '%s': %s", path, err) + return nil + } + contexts = append(contexts, c) + return nil + }); err != nil { + return nil, err + } + return contexts, nil +} + +func (p *Parser) Required(fs fs.FS, path string) bool { + if p.skipRequired { + return true + } + + f, err := fs.Open(filepath.ToSlash(path)) + if err != nil { + return false + } + defer func() { _ = f.Close() }() + if data, err := io.ReadAll(f); err == nil { + return detection.IsType(path, bytes.NewReader(data), detection.FileTypeCloudFormation) + } + return false + +} + +func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (context *FileContext, err error) { + defer func() { + if e := recover(); e != nil { + err = fmt.Errorf("panic during parse: %s", e) + } + }() + + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + + if p.configsFS == nil { + p.configsFS = fsys + } + + if err := p.parseParams(); err != nil { + return nil, fmt.Errorf("failed to parse parameters file: %w", err) + } + + sourceFmt := YamlSourceFormat + if strings.HasSuffix(strings.ToLower(path), ".json") { + sourceFmt = JsonSourceFormat + } + + f, err := fsys.Open(filepath.ToSlash(path)) + if err != nil { + return nil, err + } + defer func() { _ = f.Close() }() + + content, err := io.ReadAll(f) + if err != nil { + return nil, err + } + + lines := strings.Split(string(content), "\n") + + context = &FileContext{ + filepath: path, + lines: lines, + SourceFormat: sourceFmt, + } + + if strings.HasSuffix(strings.ToLower(path), ".json") { + if err := jfather.Unmarshal(content, context); err != nil { + return nil, NewErrInvalidContent(path, err) + } + } else { + if err := yaml.Unmarshal(content, context); err != nil { + return nil, NewErrInvalidContent(path, err) + } + } + + context.OverrideParameters(p.overridedParameters) + + context.lines = lines + context.SourceFormat = sourceFmt + context.filepath = path + + p.debug.Log("Context loaded from source %s", path) + + // the context must be set to conditions before resources + for _, c := range context.Conditions { + c.setContext(context) + } + + for name, r := range context.Resources { + r.ConfigureResource(name, fsys, path, context) + } + + return context, nil +} + +func (p *Parser) parseParams() error { + if p.overridedParameters != nil { // parameters have already been parsed + return nil + } + + params := make(Parameters) + + var errs []error + + for _, path := range p.parameterFiles { + if parameters, err := p.parseParametersFile(path); err != nil { + errs = append(errs, err) + } else { + params.Merge(parameters) + } + } + + if len(errs) != 0 { + return errors.Join(errs...) + } + + params.Merge(p.parameters) + + p.overridedParameters = params + return nil +} + +func (p *Parser) parseParametersFile(path string) (Parameters, error) { + f, err := p.configsFS.Open(path) + if err != nil { + return nil, fmt.Errorf("parameters file %q open error: %w", path, err) + } + + var parameters Parameters + if err := json.NewDecoder(f).Decode(¶meters); err != nil { + return nil, err + } + return parameters, nil +} diff --git a/pkg/scanners/cloudformation/parser/parser_test.go b/pkg/scanners/cloudformation/parser/parser_test.go new file mode 100644 index 000000000000..8a5e3844ff01 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/parser_test.go @@ -0,0 +1,374 @@ +package parser + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func parseFile(t *testing.T, source string, name string) (FileContexts, error) { + tmp, err := os.MkdirTemp(os.TempDir(), "defsec") + require.NoError(t, err) + defer func() { _ = os.RemoveAll(tmp) }() + require.NoError(t, os.WriteFile(filepath.Join(tmp, name), []byte(source), 0600)) + fs := os.DirFS(tmp) + return New().ParseFS(context.TODO(), fs, ".") +} + +func Test_parse_yaml(t *testing.T) { + + source := `--- +Parameters: + BucketName: + Type: String + Default: naughty + EncryptBucket: + Type: Boolean + Default: false +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: naughty + BucketEncryption: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: + Ref: EncryptBucket` + + files, err := parseFile(t, source, "cf.yaml") + require.NoError(t, err) + assert.Len(t, files, 1) + file := files[0] + + assert.Len(t, file.Resources, 1) + assert.Len(t, file.Parameters, 2) + + bucket, ok := file.Resources["S3Bucket"] + require.True(t, ok, "S3Bucket resource should be available") + assert.Equal(t, "cf.yaml", bucket.Range().GetFilename()) + assert.Equal(t, 10, bucket.Range().GetStartLine()) + assert.Equal(t, 17, bucket.Range().GetEndLine()) +} + +func Test_parse_json(t *testing.T) { + source := `{ + "Parameters": { + "BucketName": { + "Type": "String", + "Default": "naughty" + }, + "BucketKeyEnabled": { + "Type": "Boolean", + "Default": false + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "properties": { + "BucketName": { + "Ref": "BucketName" + }, + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + { + "BucketKeyEnabled": { + "Ref": "BucketKeyEnabled" + } + } + ] + } + } + } + } +} +` + + files, err := parseFile(t, source, "cf.json") + require.NoError(t, err) + assert.Len(t, files, 1) + file := files[0] + + assert.Len(t, file.Resources, 1) + assert.Len(t, file.Parameters, 2) +} + +func Test_parse_yaml_with_map_ref(t *testing.T) { + + source := `--- +Parameters: + BucketName: + Type: String + Default: referencedBucket + EncryptBucket: + Type: Boolean + Default: false +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: + Ref: BucketName + BucketEncryption: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: + Ref: EncryptBucket` + + files, err := parseFile(t, source, "cf.yaml") + require.NoError(t, err) + assert.Len(t, files, 1) + file := files[0] + + assert.Len(t, file.Resources, 1) + assert.Len(t, file.Parameters, 2) + + res := file.GetResourceByLogicalID("S3Bucket") + assert.NotNil(t, res) + + refProp := res.GetProperty("BucketName") + assert.False(t, refProp.IsNil()) + assert.Equal(t, "referencedBucket", refProp.AsString()) +} + +func Test_parse_yaml_with_intrinsic_functions(t *testing.T) { + + source := `--- +Parameters: + BucketName: + Type: String + Default: somebucket + EncryptBucket: + Type: Boolean + Default: false +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: !Ref BucketName + BucketEncryption: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: false +` + + files, err := parseFile(t, source, "cf.yaml") + require.NoError(t, err) + assert.Len(t, files, 1) + ctx := files[0] + + assert.Len(t, ctx.Resources, 1) + assert.Len(t, ctx.Parameters, 2) + + res := ctx.GetResourceByLogicalID("S3Bucket") + assert.NotNil(t, res) + + refProp := res.GetProperty("BucketName") + assert.False(t, refProp.IsNil()) + assert.Equal(t, "somebucket", refProp.AsString()) +} + +func createTestFileContext(t *testing.T, source string) *FileContext { + contexts, err := parseFile(t, source, "main.yaml") + require.NoError(t, err) + require.Len(t, contexts, 1) + return contexts[0] +} + +func Test_parse_yaml_use_condition_in_resource(t *testing.T) { + source := `--- +AWSTemplateFormatVersion: "2010-09-09" +Description: some description +Parameters: + ServiceName: + Type: String + Description: The service name + EnvName: + Type: String + Description: Optional environment name to prefix all resources with + Default: "" + +Conditions: + SuffixResources: !Not [!Equals [!Ref EnvName, ""]] + +Resources: + ErrorTimedOutMetricFilter: + Type: AWS::Logs::MetricFilter + Properties: + FilterPattern: '?ERROR ?error ?Error ?"timed out"' # If log contains one of these error words or timed out + LogGroupName: + !If [ + SuffixResources, + !Sub "/aws/lambda/${ServiceName}-${EnvName}", + !Sub "/aws/lambda/${ServiceName}", + ] + MetricTransformations: + - MetricName: !Sub "${ServiceName}-ErrorLogCount" + MetricNamespace: market-LogMetrics + MetricValue: 1 + DefaultValue: 0 +` + + files, err := parseFile(t, source, "cf.yaml") + require.NoError(t, err) + assert.Len(t, files, 1) + ctx := files[0] + + assert.Len(t, ctx.Parameters, 2) + assert.Len(t, ctx.Conditions, 1) + assert.Len(t, ctx.Resources, 1) + + res := ctx.GetResourceByLogicalID("ErrorTimedOutMetricFilter") + assert.NotNil(t, res) + + refProp := res.GetProperty("LogGroupName") + assert.False(t, refProp.IsNil()) + assert.Equal(t, "/aws/lambda/${ServiceName}", refProp.AsString()) +} + +func TestParse_WithParameters(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "main.yaml": `AWSTemplateFormatVersion: 2010-09-09 +Parameters: + KmsMasterKeyId: + Type: String +Resources: + TestQueue: + Type: 'AWS::SQS::Queue' + Properties: + QueueName: test-queue + KmsMasterKeyId: !Ref KmsMasterKeyId + `, + }) + + params := map[string]any{ + "KmsMasterKeyId": "some_id", + } + p := New(WithParameters(params)) + + files, err := p.ParseFS(context.TODO(), fs, ".") + require.NoError(t, err) + require.Len(t, files, 1) + + file := files[0] + res := file.GetResourceByLogicalID("TestQueue") + assert.NotNil(t, res) + + kmsProp := res.GetProperty("KmsMasterKeyId") + assert.False(t, kmsProp.IsNil()) + assert.Equal(t, "some_id", kmsProp.AsString()) +} + +func TestParse_WithParameterFiles(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.yaml": `AWSTemplateFormatVersion: 2010-09-09 +Parameters: + KmsMasterKeyId: + Type: String +Resources: + TestQueue: + Type: 'AWS::SQS::Queue' + Properties: + QueueName: test-queue + KmsMasterKeyId: !Ref KmsMasterKeyId +`, + "params.json": `[ + { + "ParameterKey": "KmsMasterKeyId", + "ParameterValue": "some_id" + } +] + `, + }) + + p := New(WithParameterFiles("params.json")) + + files, err := p.ParseFS(context.TODO(), fs, ".") + require.NoError(t, err) + require.Len(t, files, 1) + + file := files[0] + res := file.GetResourceByLogicalID("TestQueue") + assert.NotNil(t, res) + + kmsProp := res.GetProperty("KmsMasterKeyId") + assert.False(t, kmsProp.IsNil()) + assert.Equal(t, "some_id", kmsProp.AsString()) +} + +func TestParse_WithConfigFS(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "queue.yaml": `AWSTemplateFormatVersion: 2010-09-09 +Parameters: + KmsMasterKeyId: + Type: String +Resources: + TestQueue: + Type: 'AWS::SQS::Queue' + Properties: + QueueName: testqueue + KmsMasterKeyId: !Ref KmsMasterKeyId +`, + "bucket.yaml": `AWSTemplateFormatVersion: '2010-09-09' +Description: Bucket +Parameters: + BucketName: + Type: String +Resources: + S3Bucket: + Type: AWS::S3::Bucket + Properties: + BucketName: !Ref BucketName +`, + }) + + configFS := testutil.CreateFS(t, map[string]string{ + "/workdir/parameters/queue.json": `[ + { + "ParameterKey": "KmsMasterKeyId", + "ParameterValue": "some_id" + } + ] + `, + "/workdir/parameters/s3.json": `[ + { + "ParameterKey": "BucketName", + "ParameterValue": "testbucket" + } + ]`, + }) + + p := New( + WithParameterFiles("/workdir/parameters/queue.json", "/workdir/parameters/s3.json"), + WithConfigsFS(configFS), + ) + + files, err := p.ParseFS(context.TODO(), fs, ".") + require.NoError(t, err) + require.Len(t, files, 2) + + for _, file := range files { + if strings.Contains(file.filepath, "queue") { + res := file.GetResourceByLogicalID("TestQueue") + assert.NotNil(t, res) + + kmsProp := res.GetProperty("KmsMasterKeyId") + assert.False(t, kmsProp.IsNil()) + assert.Equal(t, "some_id", kmsProp.AsString()) + } else if strings.Contains(file.filepath, "s3") { + res := file.GetResourceByLogicalID("S3Bucket") + assert.NotNil(t, res) + + bucketNameProp := res.GetProperty("BucketName") + assert.False(t, bucketNameProp.IsNil()) + assert.Equal(t, "testbucket", bucketNameProp.AsString()) + } + } + +} diff --git a/pkg/scanners/cloudformation/parser/property.go b/pkg/scanners/cloudformation/parser/property.go new file mode 100644 index 000000000000..33336f9a2cda --- /dev/null +++ b/pkg/scanners/cloudformation/parser/property.go @@ -0,0 +1,428 @@ +package parser + +import ( + "encoding/json" + "io/fs" + "strconv" + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + + "github.com/liamg/jfather" + "gopkg.in/yaml.v3" +) + +type EqualityOptions = int + +const ( + IgnoreCase EqualityOptions = iota +) + +type Property struct { + ctx *FileContext + name string + comment string + rng defsecTypes.Range + parentRange defsecTypes.Range + Inner PropertyInner + logicalId string + unresolved bool +} + +type PropertyInner struct { + Type cftypes.CfType + Value interface{} `json:"Value" yaml:"Value"` +} + +func (p *Property) Comment() string { + return p.comment +} + +func (p *Property) setName(name string) { + p.name = name + if p.Type() == cftypes.Map { + for n, subProp := range p.AsMap() { + if subProp == nil { + continue + } + subProp.setName(n) + } + } +} + +func (p *Property) setContext(ctx *FileContext) { + p.ctx = ctx + + if p.IsMap() { + for _, subProp := range p.AsMap() { + if subProp == nil { + continue + } + subProp.setContext(ctx) + } + } + + if p.IsList() { + for _, subProp := range p.AsList() { + subProp.setContext(ctx) + } + } +} + +func (p *Property) setFileAndParentRange(target fs.FS, filepath string, parentRange defsecTypes.Range) { + p.rng = defsecTypes.NewRange(filepath, p.rng.GetStartLine(), p.rng.GetEndLine(), p.rng.GetSourcePrefix(), target) + p.parentRange = parentRange + + switch p.Type() { + case cftypes.Map: + for _, subProp := range p.AsMap() { + if subProp == nil { + continue + } + subProp.setFileAndParentRange(target, filepath, parentRange) + } + case cftypes.List: + for _, subProp := range p.AsList() { + if subProp == nil { + continue + } + subProp.setFileAndParentRange(target, filepath, parentRange) + } + } +} + +func (p *Property) UnmarshalYAML(node *yaml.Node) error { + p.rng = defsecTypes.NewRange("", node.Line, calculateEndLine(node), "", nil) + + p.comment = node.LineComment + return setPropertyValueFromYaml(node, &p.Inner) +} + +func (p *Property) UnmarshalJSONWithMetadata(node jfather.Node) error { + p.rng = defsecTypes.NewRange("", node.Range().Start.Line, node.Range().End.Line, "", nil) + return setPropertyValueFromJson(node, &p.Inner) +} + +func (p *Property) Type() cftypes.CfType { + return p.Inner.Type +} + +func (p *Property) Range() defsecTypes.Range { + return p.rng +} + +func (p *Property) Metadata() defsecTypes.Metadata { + base := p + if p.isFunction() { + if resolved, ok := p.resolveValue(); ok { + base = resolved + } + } + ref := NewCFReferenceWithValue(p.parentRange, *base, p.logicalId) + return defsecTypes.NewMetadata(p.Range(), ref.String()) +} + +func (p *Property) MetadataWithValue(resolvedValue *Property) defsecTypes.Metadata { + ref := NewCFReferenceWithValue(p.parentRange, *resolvedValue, p.logicalId) + return defsecTypes.NewMetadata(p.Range(), ref.String()) +} + +func (p *Property) isFunction() bool { + if p == nil { + return false + } + if p.Type() == cftypes.Map { + for n := range p.AsMap() { + return IsIntrinsic(n) + } + } + return false +} + +func (p *Property) RawValue() interface{} { + return p.Inner.Value +} + +func (p *Property) AsRawStrings() ([]string, error) { + + if len(p.ctx.lines) < p.rng.GetEndLine() { + return p.ctx.lines, nil + } + return p.ctx.lines[p.rng.GetStartLine()-1 : p.rng.GetEndLine()], nil +} + +func (p *Property) resolveValue() (*Property, bool) { + if !p.isFunction() || p.IsUnresolved() { + return p, true + } + + resolved, ok := ResolveIntrinsicFunc(p) + if ok { + return resolved, true + } + + p.unresolved = true + return p, false +} + +func (p *Property) GetStringProperty(path string, defaultValue ...string) defsecTypes.StringValue { + defVal := "" + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + if p.IsUnresolved() { + return defsecTypes.StringUnresolvable(p.Metadata()) + } + + prop := p.GetProperty(path) + if prop.IsNotString() { + return p.StringDefault(defVal) + } + return prop.AsStringValue() +} + +func (p *Property) StringDefault(defaultValue string) defsecTypes.StringValue { + return defsecTypes.StringDefault(defaultValue, p.Metadata()) +} + +func (p *Property) GetBoolProperty(path string, defaultValue ...bool) defsecTypes.BoolValue { + defVal := false + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + if p.IsUnresolved() { + return defsecTypes.BoolUnresolvable(p.Metadata()) + } + + prop := p.GetProperty(path) + + if prop.isFunction() { + prop, _ = prop.resolveValue() + } + + if prop.IsNotBool() { + return p.inferBool(prop, defVal) + } + return prop.AsBoolValue() +} + +func (p *Property) GetIntProperty(path string, defaultValue ...int) defsecTypes.IntValue { + defVal := 0 + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + if p.IsUnresolved() { + return defsecTypes.IntUnresolvable(p.Metadata()) + } + + prop := p.GetProperty(path) + + if prop.IsNotInt() { + return p.IntDefault(defVal) + } + return prop.AsIntValue() +} + +func (p *Property) BoolDefault(defaultValue bool) defsecTypes.BoolValue { + return defsecTypes.BoolDefault(defaultValue, p.Metadata()) +} + +func (p *Property) IntDefault(defaultValue int) defsecTypes.IntValue { + return defsecTypes.IntDefault(defaultValue, p.Metadata()) +} + +func (p *Property) GetProperty(path string) *Property { + + pathParts := strings.Split(path, ".") + + first := pathParts[0] + property := p + + if p.isFunction() { + property, _ = p.resolveValue() + } + + if property.IsNotMap() { + return nil + } + + for n, p := range property.AsMap() { + if n == first { + property = p + break + } + } + + if len(pathParts) == 1 || property == nil { + return property + } + + if nestedProperty := property.GetProperty(strings.Join(pathParts[1:], ".")); nestedProperty != nil { + if nestedProperty.isFunction() { + resolved, _ := nestedProperty.resolveValue() + return resolved + } else { + return nestedProperty + } + } + + return &Property{} +} + +func (p *Property) deriveResolved(propType cftypes.CfType, propValue interface{}) *Property { + return &Property{ + ctx: p.ctx, + name: p.name, + comment: p.comment, + rng: p.rng, + parentRange: p.parentRange, + logicalId: p.logicalId, + Inner: PropertyInner{ + Type: propType, + Value: propValue, + }, + } +} + +func (p *Property) ParentRange() defsecTypes.Range { + return p.parentRange +} + +func (p *Property) inferBool(prop *Property, defaultValue bool) defsecTypes.BoolValue { + if prop.IsString() { + if prop.EqualTo("true", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("yes", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("1", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("false", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo("no", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo("0", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + } + + if prop.IsInt() { + if prop.EqualTo(0) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo(1) { + return defsecTypes.Bool(true, prop.Metadata()) + } + } + + return p.BoolDefault(defaultValue) +} + +func (p *Property) String() string { + r := "" + switch p.Type() { + case cftypes.String: + r = p.AsString() + case cftypes.Int: + r = strconv.Itoa(p.AsInt()) + } + return r +} + +func (p *Property) SetLogicalResource(id string) { + p.logicalId = id + + if p.isFunction() { + return + } + + if p.IsMap() { + for _, subProp := range p.AsMap() { + if subProp == nil { + continue + } + subProp.SetLogicalResource(id) + } + } + + if p.IsList() { + for _, subProp := range p.AsList() { + subProp.SetLogicalResource(id) + } + } + +} + +func (p *Property) GetJsonBytes(squashList ...bool) []byte { + if p.IsNil() { + return []byte{} + } + lines, err := p.AsRawStrings() + if err != nil { + return nil + } + if p.ctx.SourceFormat == JsonSourceFormat { + return []byte(strings.Join(lines, " ")) + } + + if len(squashList) > 0 { + lines[0] = strings.Replace(lines[0], "-", " ", 1) + } + + lines = removeLeftMargin(lines) + + yamlContent := strings.Join(lines, "\n") + var body interface{} + if err := yaml.Unmarshal([]byte(yamlContent), &body); err != nil { + return nil + } + jsonBody := convert(body) + policyJson, err := json.Marshal(jsonBody) + if err != nil { + return nil + } + return policyJson +} + +func (p *Property) GetJsonBytesAsString(squashList ...bool) string { + return string(p.GetJsonBytes(squashList...)) +} + +func removeLeftMargin(lines []string) []string { + if len(lines) == 0 { + return lines + } + prefixSpace := len(lines[0]) - len(strings.TrimLeft(lines[0], " ")) + + for i, line := range lines { + if len(line) >= prefixSpace { + lines[i] = line[prefixSpace:] + } + } + return lines +} + +func convert(input interface{}) interface{} { + switch x := input.(type) { + case map[interface{}]interface{}: + outpMap := map[string]interface{}{} + for k, v := range x { + outpMap[k.(string)] = convert(v) + } + return outpMap + case []interface{}: + for i, v := range x { + x[i] = convert(v) + } + } + return input +} diff --git a/pkg/scanners/cloudformation/parser/property_conversion.go b/pkg/scanners/cloudformation/parser/property_conversion.go new file mode 100644 index 000000000000..45ff7f3dc927 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/property_conversion.go @@ -0,0 +1,129 @@ +package parser + +import ( + "fmt" + "os" + "strconv" + "strings" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func (p *Property) IsConvertableTo(conversionType cftypes.CfType) bool { + switch conversionType { + case cftypes.Int: + return p.isConvertableToInt() + case cftypes.Bool: + return p.isConvertableToBool() + case cftypes.String: + return p.isConvertableToString() + } + return false +} + +func (p *Property) isConvertableToString() bool { + switch p.Type() { + case cftypes.Map: + return false + case cftypes.List: + for _, p := range p.AsList() { + if !p.IsString() { + return false + } + } + } + return true +} + +func (p *Property) isConvertableToBool() bool { + switch p.Type() { + case cftypes.String: + return p.EqualTo("true", IgnoreCase) || p.EqualTo("false", IgnoreCase) || + p.EqualTo("1", IgnoreCase) || p.EqualTo("0", IgnoreCase) + + case cftypes.Int: + return p.EqualTo(1) || p.EqualTo(0) + } + return false +} + +func (p *Property) isConvertableToInt() bool { + switch p.Type() { + case cftypes.String: + if _, err := strconv.Atoi(p.AsString()); err == nil { + return true + } + case cftypes.Bool: + return true + } + return false +} + +func (p *Property) ConvertTo(conversionType cftypes.CfType) *Property { + + if !p.IsConvertableTo(conversionType) { + _, _ = fmt.Fprintf(os.Stderr, "property of type %s cannot be converted to %s\n", p.Type(), conversionType) + return p + } + switch conversionType { + case cftypes.Int: + return p.convertToInt() + case cftypes.Bool: + return p.convertToBool() + case cftypes.String: + return p.convertToString() + } + return p +} + +func (p *Property) convertToString() *Property { + switch p.Type() { + case cftypes.Int: + return p.deriveResolved(cftypes.String, strconv.Itoa(p.AsInt())) + case cftypes.Bool: + return p.deriveResolved(cftypes.String, fmt.Sprintf("%v", p.AsBool())) + case cftypes.List: + var parts []string + for _, property := range p.AsList() { + parts = append(parts, property.AsString()) + } + return p.deriveResolved(cftypes.String, fmt.Sprintf("[%s]", strings.Join(parts, ", "))) + } + return p +} + +func (p *Property) convertToBool() *Property { + switch p.Type() { + case cftypes.String: + if p.EqualTo("true", IgnoreCase) || p.EqualTo("1") { + return p.deriveResolved(cftypes.Bool, true) + } + if p.EqualTo("false", IgnoreCase) || p.EqualTo("0") { + return p.deriveResolved(cftypes.Bool, false) + } + case cftypes.Int: + if p.EqualTo(1) { + return p.deriveResolved(cftypes.Bool, true) + } + if p.EqualTo(0) { + return p.deriveResolved(cftypes.Bool, false) + } + } + return p +} + +func (p *Property) convertToInt() *Property { + // + switch p.Type() { + case cftypes.String: + if val, err := strconv.Atoi(p.AsString()); err == nil { + return p.deriveResolved(cftypes.Int, val) + } + case cftypes.Bool: + if p.IsTrue() { + return p.deriveResolved(cftypes.Int, 1) + } + return p.deriveResolved(cftypes.Int, 0) + } + return p +} diff --git a/pkg/scanners/cloudformation/parser/property_helpers.go b/pkg/scanners/cloudformation/parser/property_helpers.go new file mode 100644 index 000000000000..6883930ea4a6 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/property_helpers.go @@ -0,0 +1,267 @@ +package parser + +import ( + "strconv" + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +) + +func (p *Property) IsNil() bool { + return p == nil || p.Inner.Value == nil +} + +func (p *Property) IsNotNil() bool { + return !p.IsUnresolved() && !p.IsNil() +} + +func (p *Property) Is(t cftypes.CfType) bool { + if p.IsNil() || p.IsUnresolved() { + return false + } + if p.isFunction() { + if prop, success := p.resolveValue(); success && prop != p { + return prop.Is(t) + } + } + return p.Inner.Type == t +} + +func (p *Property) IsString() bool { + return p.Is(cftypes.String) +} + +func (p *Property) IsNotString() bool { + return !p.IsUnresolved() && !p.IsString() +} + +func (p *Property) IsInt() bool { + return p.Is(cftypes.Int) +} + +func (p *Property) IsNotInt() bool { + return !p.IsUnresolved() && !p.IsInt() +} + +func (p *Property) IsMap() bool { + if p.IsNil() || p.IsUnresolved() { + return false + } + return p.Inner.Type == cftypes.Map +} + +func (p *Property) IsNotMap() bool { + return !p.IsUnresolved() && !p.IsMap() +} + +func (p *Property) IsList() bool { + return p.Is(cftypes.List) +} + +func (p *Property) IsNotList() bool { + return !p.IsUnresolved() && !p.IsList() +} + +func (p *Property) IsBool() bool { + return p.Is(cftypes.Bool) +} + +func (p *Property) IsUnresolved() bool { + return p != nil && p.unresolved +} + +func (p *Property) IsNotBool() bool { + return !p.IsUnresolved() && !p.IsBool() +} + +func (p *Property) AsString() string { + if p.isFunction() { + if prop, success := p.resolveValue(); success && prop != p { + return prop.AsString() + } + return "" + } + if p.IsNil() { + return "" + } + if !p.IsString() { + return "" + } + + return p.Inner.Value.(string) +} + +func (p *Property) AsStringValue() defsecTypes.StringValue { + if p.unresolved { + return defsecTypes.StringUnresolvable(p.Metadata()) + } + return defsecTypes.StringExplicit(p.AsString(), p.Metadata()) +} + +func (p *Property) AsInt() int { + if p.isFunction() { + if prop, success := p.resolveValue(); success && prop != p { + return prop.AsInt() + } + return 0 + } + if p.IsNotInt() { + if p.isConvertableToInt() { + return p.convertToInt().AsInt() + } + return 0 + } + + return p.Inner.Value.(int) +} + +func (p *Property) AsIntValue() defsecTypes.IntValue { + if p.unresolved { + return defsecTypes.IntUnresolvable(p.Metadata()) + } + return defsecTypes.IntExplicit(p.AsInt(), p.Metadata()) +} + +func (p *Property) AsBool() bool { + if p.isFunction() { + if prop, success := p.resolveValue(); success && prop != p { + return prop.AsBool() + } + return false + } + if !p.IsBool() { + return false + } + return p.Inner.Value.(bool) +} + +func (p *Property) AsBoolValue() defsecTypes.BoolValue { + if p.unresolved { + return defsecTypes.BoolUnresolvable(p.Metadata()) + } + return defsecTypes.Bool(p.AsBool(), p.Metadata()) +} + +func (p *Property) AsMap() map[string]*Property { + val, ok := p.Inner.Value.(map[string]*Property) + if !ok { + return nil + } + return val +} + +func (p *Property) AsList() []*Property { + if p.isFunction() { + if prop, success := p.resolveValue(); success && prop != p { + return prop.AsList() + } + return []*Property{} + } + + if list, ok := p.Inner.Value.([]*Property); ok { + return list + } + return nil +} + +func (p *Property) Len() int { + return len(p.AsList()) +} + +func (p *Property) EqualTo(checkValue interface{}, equalityOptions ...EqualityOptions) bool { + var ignoreCase bool + for _, option := range equalityOptions { + if option == IgnoreCase { + ignoreCase = true + } + } + + switch checkerVal := checkValue.(type) { + case string: + if p.IsNil() { + return false + } + + if p.Inner.Type == cftypes.String || p.IsString() { + if ignoreCase { + return strings.EqualFold(p.AsString(), checkerVal) + } + return p.AsString() == checkerVal + } else if p.Inner.Type == cftypes.Int || p.IsInt() { + if val, err := strconv.Atoi(checkerVal); err == nil { + return p.AsInt() == val + } + } + return false + case bool: + if p.Inner.Type == cftypes.Bool || p.IsBool() { + return p.AsBool() == checkerVal + } + case int: + if p.Inner.Type == cftypes.Int || p.IsInt() { + return p.AsInt() == checkerVal + } + } + + return false + +} + +func (p *Property) IsTrue() bool { + if p.IsNil() || !p.IsBool() { + return false + } + + return p.AsBool() +} + +func (p *Property) IsEmpty() bool { + + if p.IsNil() { + return true + } + if p.IsUnresolved() { + return false + } + + switch p.Inner.Type { + case cftypes.String: + return p.AsString() == "" + case cftypes.List, cftypes.Map: + return len(p.AsList()) == 0 + default: + return false + } +} + +func (p *Property) Contains(checkVal interface{}) bool { + if p == nil || p.IsNil() { + return false + } + + switch p.Type() { + case cftypes.List: + for _, p := range p.AsList() { + if p.EqualTo(checkVal) { + return true + } + } + case cftypes.Map: + if _, ok := checkVal.(string); !ok { + return false + } + for key := range p.AsMap() { + if key == checkVal.(string) { + return true + } + } + case cftypes.String: + if _, ok := checkVal.(string); !ok { + return false + } + return strings.Contains(p.AsString(), checkVal.(string)) + } + return false +} diff --git a/pkg/scanners/cloudformation/parser/property_helpers_test.go b/pkg/scanners/cloudformation/parser/property_helpers_test.go new file mode 100644 index 000000000000..c421cf3b9357 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/property_helpers_test.go @@ -0,0 +1,195 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/assert" +) + +func newProp(inner PropertyInner) *Property { + return &Property{ + name: "test_prop", + ctx: &FileContext{}, + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: inner, + } +} + +func Test_EqualTo(t *testing.T) { + tests := []struct { + name string + property *Property + checkValue interface{} + opts []EqualityOptions + isEqual bool + }{ + { + name: "prop is nil", + property: nil, + checkValue: "some value", + isEqual: false, + }, + { + name: "compare strings", + property: newProp(PropertyInner{ + Type: cftypes.String, + Value: "is str", + }), + checkValue: "is str", + isEqual: true, + }, + { + name: "compare strings ignoring case", + property: newProp(PropertyInner{ + Type: cftypes.String, + Value: "is str", + }), + opts: []EqualityOptions{IgnoreCase}, + checkValue: "Is StR", + isEqual: true, + }, + { + name: "strings ate not equal", + property: newProp(PropertyInner{ + Type: cftypes.String, + Value: "some value", + }), + checkValue: "some other value", + isEqual: false, + }, + { + name: "compare prop with a int represented by a string", + property: newProp(PropertyInner{ + Type: cftypes.Int, + Value: 147, + }), + checkValue: "147", + isEqual: true, + }, + { + name: "compare ints", + property: newProp(PropertyInner{ + Type: cftypes.Int, + Value: 701, + }), + checkValue: 701, + isEqual: true, + }, + { + name: "compare bools", + property: newProp(PropertyInner{ + Type: cftypes.Bool, + Value: true, + }), + checkValue: true, + isEqual: true, + }, + { + name: "prop is string fn", + property: newProp(PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::If": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: false, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bad", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "some value", + }, + }, + }, + }, + }, + }, + }), + checkValue: "some value", + isEqual: true, + }, + { + name: "prop is int fn", + property: newProp(PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::If": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: true, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.Int, + Value: 121, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.Int, + Value: -1, + }, + }, + }, + }, + }, + }, + }), + checkValue: 121, + isEqual: true, + }, + { + name: "prop is bool fn", + property: newProp(PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }), + checkValue: true, + isEqual: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.isEqual, tt.property.EqualTo(tt.checkValue, tt.opts...)) + }) + } +} diff --git a/pkg/scanners/cloudformation/parser/pseudo_parameters.go b/pkg/scanners/cloudformation/parser/pseudo_parameters.go new file mode 100644 index 000000000000..3027095c13b7 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/pseudo_parameters.go @@ -0,0 +1,46 @@ +package parser + +import "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + +type pseudoParameter struct { + t cftypes.CfType + val interface{} + raw interface{} +} + +var pseudoParameters = map[string]pseudoParameter{ + "AWS::AccountId": {t: cftypes.String, val: "123456789012"}, + "AWS::NotificationARNs": { + t: cftypes.List, + val: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "notification::arn::1", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "notification::arn::2", + }, + }, + }, + raw: []string{"notification::arn::1", "notification::arn::2"}, + }, + "AWS::NoValue": {t: cftypes.String, val: ""}, + "AWS::Partition": {t: cftypes.String, val: "aws"}, + "AWS::Region": {t: cftypes.String, val: "eu-west-1"}, + "AWS::StackId": {t: cftypes.String, val: "arn:aws:cloudformation:eu-west-1:stack/ID"}, + "AWS::StackName": {t: cftypes.String, val: "cfsec-test-stack"}, + "AWS::URLSuffix": {t: cftypes.String, val: "amazonaws.com"}, +} + +func (p pseudoParameter) getRawValue() interface{} { + switch p.t { + case cftypes.List: + return p.raw + default: + return p.val + } +} diff --git a/pkg/scanners/cloudformation/parser/pseudo_parameters_test.go b/pkg/scanners/cloudformation/parser/pseudo_parameters_test.go new file mode 100644 index 000000000000..281bf9083a14 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/pseudo_parameters_test.go @@ -0,0 +1,36 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Raw(t *testing.T) { + tests := []struct { + name string + key string + expected interface{} + }{ + { + name: "parameter with a string type value", + key: "AWS::AccountId", + expected: "123456789012", + }, + { + name: "a parameter with a list type value", + key: "AWS::NotificationARNs", + expected: []string{"notification::arn::1", "notification::arn::2"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if val, ok := pseudoParameters[tt.key]; ok { + assert.Equal(t, tt.expected, val.getRawValue()) + } else { + t.Fatal("unexpected parameter key") + } + }) + } +} diff --git a/pkg/scanners/cloudformation/parser/reference.go b/pkg/scanners/cloudformation/parser/reference.go new file mode 100644 index 000000000000..2ff10058d868 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/reference.go @@ -0,0 +1,58 @@ +package parser + +import ( + "fmt" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +type CFReference struct { + logicalId string + resourceRange defsecTypes.Range + resolvedValue Property +} + +func NewCFReference(id string, resourceRange defsecTypes.Range) CFReference { + return CFReference{ + logicalId: id, + resourceRange: resourceRange, + } +} + +func NewCFReferenceWithValue(resourceRange defsecTypes.Range, resolvedValue Property, logicalId string) CFReference { + return CFReference{ + resourceRange: resourceRange, + resolvedValue: resolvedValue, + logicalId: logicalId, + } +} + +func (cf CFReference) String() string { + return cf.resourceRange.String() +} + +func (cf CFReference) LogicalID() string { + return cf.logicalId +} + +func (cf CFReference) ResourceRange() defsecTypes.Range { + return cf.resourceRange +} + +func (cf CFReference) PropertyRange() defsecTypes.Range { + if cf.resolvedValue.IsNotNil() { + return cf.resolvedValue.Range() + } + return defsecTypes.Range{} +} + +func (cf CFReference) DisplayValue() string { + if cf.resolvedValue.IsNotNil() { + return fmt.Sprintf("%v", cf.resolvedValue.RawValue()) + } + return "" +} + +func (cf *CFReference) Comment() string { + return cf.resolvedValue.Comment() +} diff --git a/pkg/scanners/cloudformation/parser/resource.go b/pkg/scanners/cloudformation/parser/resource.go new file mode 100644 index 000000000000..1258ac3fd5c0 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/resource.go @@ -0,0 +1,211 @@ +package parser + +import ( + "io/fs" + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/liamg/jfather" + "gopkg.in/yaml.v3" +) + +type Resource struct { + ctx *FileContext + rng defsecTypes.Range + id string + comment string + Inner ResourceInner +} + +type ResourceInner struct { + Type string `json:"Type" yaml:"Type"` + Properties map[string]*Property `json:"Properties" yaml:"Properties"` +} + +func (r *Resource) ConfigureResource(id string, target fs.FS, filepath string, ctx *FileContext) { + r.setId(id) + r.setFile(target, filepath) + r.setContext(ctx) +} + +func (r *Resource) setId(id string) { + r.id = id + + for n, p := range r.properties() { + p.setName(n) + } +} + +func (r *Resource) setFile(target fs.FS, filepath string) { + r.rng = defsecTypes.NewRange(filepath, r.rng.GetStartLine(), r.rng.GetEndLine(), r.rng.GetSourcePrefix(), target) + + for _, p := range r.Inner.Properties { + p.setFileAndParentRange(target, filepath, r.rng) + } +} + +func (r *Resource) setContext(ctx *FileContext) { + r.ctx = ctx + + for _, p := range r.Inner.Properties { + p.SetLogicalResource(r.id) + p.setContext(ctx) + } +} + +func (r *Resource) UnmarshalYAML(value *yaml.Node) error { + r.rng = defsecTypes.NewRange("", value.Line-1, calculateEndLine(value), "", nil) + r.comment = value.LineComment + return value.Decode(&r.Inner) +} + +func (r *Resource) UnmarshalJSONWithMetadata(node jfather.Node) error { + r.rng = defsecTypes.NewRange("", node.Range().Start.Line, node.Range().End.Line, "", nil) + return node.Decode(&r.Inner) +} + +func (r *Resource) ID() string { + return r.id +} + +func (r *Resource) Type() string { + return r.Inner.Type +} + +func (r *Resource) Range() defsecTypes.Range { + return r.rng +} + +func (r *Resource) SourceFormat() SourceFormat { + return r.ctx.SourceFormat +} + +func (r *Resource) Metadata() defsecTypes.Metadata { + return defsecTypes.NewMetadata(r.Range(), NewCFReference(r.id, r.rng).String()) +} + +func (r *Resource) properties() map[string]*Property { + return r.Inner.Properties +} + +func (r *Resource) IsNil() bool { + return r.id == "" +} + +func (r *Resource) GetProperty(path string) *Property { + + pathParts := strings.Split(path, ".") + + first := pathParts[0] + property := &Property{} + + for n, p := range r.properties() { + if n == first { + property = p + break + } + } + + if len(pathParts) == 1 || property.IsNil() { + if property.isFunction() { + resolved, _ := property.resolveValue() + return resolved + } + return property + } + + if nestedProperty := property.GetProperty(strings.Join(pathParts[1:], ".")); nestedProperty != nil { + return nestedProperty + } + + return &Property{} +} + +func (r *Resource) GetStringProperty(path string, defaultValue ...string) defsecTypes.StringValue { + defVal := "" + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + prop := r.GetProperty(path) + + if prop.IsNotString() { + return r.StringDefault(defVal) + } + return prop.AsStringValue() +} + +func (r *Resource) GetBoolProperty(path string, defaultValue ...bool) defsecTypes.BoolValue { + defVal := false + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + prop := r.GetProperty(path) + + if prop.IsNotBool() { + return r.inferBool(prop, defVal) + } + return prop.AsBoolValue() +} + +func (r *Resource) GetIntProperty(path string, defaultValue ...int) defsecTypes.IntValue { + defVal := 0 + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + prop := r.GetProperty(path) + + if prop.IsNotInt() { + return r.IntDefault(defVal) + } + return prop.AsIntValue() +} + +func (r *Resource) StringDefault(defaultValue string) defsecTypes.StringValue { + return defsecTypes.StringDefault(defaultValue, r.Metadata()) +} + +func (r *Resource) BoolDefault(defaultValue bool) defsecTypes.BoolValue { + return defsecTypes.BoolDefault(defaultValue, r.Metadata()) +} + +func (r *Resource) IntDefault(defaultValue int) defsecTypes.IntValue { + return defsecTypes.IntDefault(defaultValue, r.Metadata()) +} + +func (r *Resource) inferBool(prop *Property, defaultValue bool) defsecTypes.BoolValue { + if prop.IsString() { + if prop.EqualTo("true", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("yes", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("1", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("false", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo("no", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo("0", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + } + + if prop.IsInt() { + if prop.EqualTo(0) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo(1) { + return defsecTypes.Bool(true, prop.Metadata()) + } + } + + return r.BoolDefault(defaultValue) +} diff --git a/pkg/scanners/cloudformation/parser/resource_test.go b/pkg/scanners/cloudformation/parser/resource_test.go new file mode 100644 index 000000000000..eff28ae63931 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/resource_test.go @@ -0,0 +1,75 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/require" +) + +func Test_GetProperty_PropIsFunction(t *testing.T) { + resource := Resource{ + Inner: ResourceInner{ + Type: "AWS::S3::Bucket", + Properties: map[string]*Property{ + "BucketName": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "mybucket", + }, + }, + "VersioningConfiguration": { + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::If": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: false, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Status": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Enabled", + }, + }, + }, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Status": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Suspended", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + prop := resource.GetProperty("VersioningConfiguration.Status") + require.NotNil(t, prop) + require.True(t, prop.IsString()) + require.Equal(t, "Suspended", prop.AsString()) +} diff --git a/pkg/scanners/cloudformation/parser/util.go b/pkg/scanners/cloudformation/parser/util.go new file mode 100644 index 000000000000..a00a8ec8dd78 --- /dev/null +++ b/pkg/scanners/cloudformation/parser/util.go @@ -0,0 +1,139 @@ +package parser + +import ( + "strconv" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + + "github.com/liamg/jfather" + "gopkg.in/yaml.v3" +) + +func setPropertyValueFromJson(node jfather.Node, propertyData *PropertyInner) error { + + switch node.Kind() { + + case jfather.KindNumber: + propertyData.Type = cftypes.Float64 + return node.Decode(&propertyData.Value) + case jfather.KindBoolean: + propertyData.Type = cftypes.Bool + return node.Decode(&propertyData.Value) + case jfather.KindString: + propertyData.Type = cftypes.String + return node.Decode(&propertyData.Value) + case jfather.KindObject: + var childData map[string]*Property + if err := node.Decode(&childData); err != nil { + return err + } + propertyData.Type = cftypes.Map + propertyData.Value = childData + return nil + case jfather.KindArray: + var childData []*Property + if err := node.Decode(&childData); err != nil { + return err + } + propertyData.Type = cftypes.List + propertyData.Value = childData + return nil + default: + propertyData.Type = cftypes.String + return node.Decode(&propertyData.Value) + } + +} + +func setPropertyValueFromYaml(node *yaml.Node, propertyData *PropertyInner) error { + if IsIntrinsicFunc(node) { + var newContent []*yaml.Node + + newContent = append(newContent, &yaml.Node{ + Tag: "!!str", + Value: getIntrinsicTag(node.Tag), + Kind: yaml.ScalarNode, + }) + + newContent = createNode(node, newContent) + + node.Tag = "!!map" + node.Kind = yaml.MappingNode + node.Content = newContent + } + + if node.Content == nil { + + switch node.Tag { + + case "!!int": + propertyData.Type = cftypes.Int + propertyData.Value, _ = strconv.Atoi(node.Value) + case "!!bool": + propertyData.Type = cftypes.Bool + propertyData.Value, _ = strconv.ParseBool(node.Value) + case "!!str", "!!string": + propertyData.Type = cftypes.String + propertyData.Value = node.Value + } + return nil + } + + switch node.Tag { + case "!!map": + var childData map[string]*Property + if err := node.Decode(&childData); err != nil { + return err + } + propertyData.Type = cftypes.Map + propertyData.Value = childData + return nil + case "!!seq": + var childData []*Property + if err := node.Decode(&childData); err != nil { + return err + } + propertyData.Type = cftypes.List + propertyData.Value = childData + return nil + } + + return nil +} + +func createNode(node *yaml.Node, newContent []*yaml.Node) []*yaml.Node { + if node.Content == nil { + newContent = append(newContent, &yaml.Node{ + Tag: "!!str", + Value: node.Value, + Kind: yaml.ScalarNode, + }) + } else { + + newNode := &yaml.Node{ + Content: node.Content, + Kind: node.Kind, + } + + switch node.Kind { + case yaml.SequenceNode: + newNode.Tag = "!!seq" + case yaml.MappingNode: + newNode.Tag = "!!map" + case yaml.ScalarNode: + default: + newNode.Tag = node.Tag + } + newContent = append(newContent, newNode) + } + return newContent +} + +func calculateEndLine(node *yaml.Node) int { + if node.Content == nil { + return node.Line + } + + return calculateEndLine(node.Content[len(node.Content)-1]) + +} diff --git a/pkg/scanners/cloudformation/scanner.go b/pkg/scanners/cloudformation/scanner.go new file mode 100644 index 000000000000..f68f89cf4285 --- /dev/null +++ b/pkg/scanners/cloudformation/scanner.go @@ -0,0 +1,263 @@ +package cloudformation + +import ( + "context" + "fmt" + "io" + "io/fs" + "sort" + "sync" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" + + adapter "github.com/aquasecurity/trivy/internal/adapters/cloudformation" + "github.com/aquasecurity/trivy/pkg/scanners" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" +) + +func WithParameters(params map[string]any) options.ScannerOption { + return func(cs options.ConfigurableScanner) { + if s, ok := cs.(*Scanner); ok { + s.addParserOptions(parser.WithParameters(params)) + } + } +} + +func WithParameterFiles(files ...string) options.ScannerOption { + return func(cs options.ConfigurableScanner) { + if s, ok := cs.(*Scanner); ok { + s.addParserOptions(parser.WithParameterFiles(files...)) + } + } +} + +func WithConfigsFS(fsys fs.FS) options.ScannerOption { + return func(cs options.ConfigurableScanner) { + if s, ok := cs.(*Scanner); ok { + s.addParserOptions(parser.WithConfigsFS(fsys)) + } + } +} + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + debug debug.Logger + policyDirs []string + policyReaders []io.Reader + parser *parser.Parser + regoScanner *rego.Scanner + skipRequired bool + regoOnly bool + loadEmbeddedPolicies bool + loadEmbeddedLibraries bool + options []options.ScannerOption + parserOptions []options.ParserOption + frameworks []framework.Framework + spec string + sync.Mutex +} + +func (s *Scanner) addParserOptions(opt options.ParserOption) { + s.parserOptions = append(s.parserOptions, opt) +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) SetRegoOnly(regoOnly bool) { + s.regoOnly = regoOnly +} + +func (s *Scanner) Name() string { + return "CloudFormation" +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.skipRequired = skip +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "cloudformation", "scanner") +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +func (s *Scanner) SetTraceWriter(_ io.Writer) {} +func (s *Scanner) SetPerResultTracingEnabled(_ bool) {} +func (s *Scanner) SetDataDirs(_ ...string) {} +func (s *Scanner) SetPolicyNamespaces(_ ...string) {} + +// New creates a new Scanner +func New(opts ...options.ScannerOption) *Scanner { + s := &Scanner{ + options: opts, + } + for _, opt := range opts { + opt(s) + } + s.addParserOptions(options.ParserWithSkipRequiredCheck(s.skipRequired)) + s.parser = parser.New(s.parserOptions...) + return s +} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return s.regoScanner, nil + } + regoScanner := rego.NewScanner(types.SourceCloud, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return nil, err + } + s.regoScanner = regoScanner + return regoScanner, nil +} + +func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (results scan.Results, err error) { + + contexts, err := s.parser.ParseFS(ctx, fs, dir) + if err != nil { + return nil, err + } + + if len(contexts) == 0 { + return nil, nil + } + + regoScanner, err := s.initRegoScanner(fs) + if err != nil { + return nil, err + } + + for _, cfCtx := range contexts { + if cfCtx == nil { + continue + } + fileResults, err := s.scanFileContext(ctx, regoScanner, cfCtx, fs) + if err != nil { + return nil, err + } + results = append(results, fileResults...) + } + sort.Slice(results, func(i, j int) bool { + return results[i].Rule().AVDID < results[j].Rule().AVDID + }) + return results, nil +} + +func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { + + cfCtx, err := s.parser.ParseFile(ctx, fs, path) + if err != nil { + return nil, err + } + + regoScanner, err := s.initRegoScanner(fs) + if err != nil { + return nil, err + } + + results, err := s.scanFileContext(ctx, regoScanner, cfCtx, fs) + if err != nil { + return nil, err + } + results.SetSourceAndFilesystem("", fs, false) + + sort.Slice(results, func(i, j int) bool { + return results[i].Rule().AVDID < results[j].Rule().AVDID + }) + return results, nil +} + +func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser.FileContext, fs fs.FS) (results scan.Results, err error) { + state := adapter.Adapt(*cfCtx) + if state == nil { + return nil, nil + } + if !s.regoOnly { + for _, rule := range rules.GetRegistered(s.frameworks...) { + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + if rule.GetRule().RegoPackage != "" { + continue + } + evalResult := rule.Evaluate(state) + if len(evalResult) > 0 { + s.debug.Log("Found %d results for %s", len(evalResult), rule.GetRule().AVDID) + for _, scanResult := range evalResult { + + ref := scanResult.Metadata().Reference() + + if ref == "" && scanResult.Metadata().Parent() != nil { + ref = scanResult.Metadata().Parent().Reference() + } + + description := getDescription(scanResult, ref) + scanResult.OverrideDescription(description) + results = append(results, scanResult) + } + } + } + } + regoResults, err := regoScanner.ScanInput(ctx, rego.Input{ + Path: cfCtx.Metadata().Range().GetFilename(), + FS: fs, + Contents: state.ToRego(), + }) + if err != nil { + return nil, fmt.Errorf("rego scan error: %w", err) + } + return append(results, regoResults...), nil +} + +func getDescription(scanResult scan.Result, ref string) string { + switch scanResult.Status() { + case scan.StatusPassed: + return fmt.Sprintf("Resource '%s' passed check: %s", ref, scanResult.Rule().Summary) + case scan.StatusIgnored: + return fmt.Sprintf("Resource '%s' had check ignored: %s", ref, scanResult.Rule().Summary) + default: + return scanResult.Description() + } +} diff --git a/pkg/scanners/cloudformation/scanner_test.go b/pkg/scanners/cloudformation/scanner_test.go new file mode 100644 index 000000000000..6f6792195b32 --- /dev/null +++ b/pkg/scanners/cloudformation/scanner_test.go @@ -0,0 +1,103 @@ +package cloudformation + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_BasicScan(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.yaml": `--- +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: public-bucket + +`, + "/rules/rule.rego": `package builtin.dockerfile.DS006 + +__rego_metadata__ := { + "id": "DS006", + "avd_id": "AVD-DS-0006", + "title": "COPY '--from' referring to the current image", + "short_code": "no-self-referencing-copy-from", + "version": "v1.0.0", + "severity": "CRITICAL", + "type": "Dockerfile Security Check", + "description": "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself.", + "recommended_actions": "Change the '--from' so that it will not refer to itself", + "url": "https://docs.docker.com/develop/develop-images/multistage-build/", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "defsec", "subtypes": [{"service": "s3", "provider": "aws"}]}], +} + +deny[res] { + res := { + "msg": "oh no", + "filepath": "code/main.yaml", + "startline": 6, + "endline": 6, + } +} + +`, + }) + + scanner := New(options.ScannerWithPolicyDirs("rules"), options.ScannerWithRegoOnly(true)) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + + assert.Equal(t, scan.Rule{ + AVDID: "AVD-DS-0006", + Aliases: []string{"DS006"}, + ShortCode: "no-self-referencing-copy-from", + Summary: "COPY '--from' referring to the current image", + Explanation: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself.", + Impact: "", + Resolution: "Change the '--from' so that it will not refer to itself", + Provider: "cloud", + Service: "general", + Links: []string{"https://docs.docker.com/develop/develop-images/multistage-build/"}, + Severity: "CRITICAL", + Terraform: &scan.EngineMetadata{}, + CloudFormation: &scan.EngineMetadata{}, + CustomChecks: scan.CustomChecks{ + Terraform: (*scan.TerraformCustomCheck)(nil), + }, + RegoPackage: "data.builtin.dockerfile.DS006", + Frameworks: map[framework.Framework][]string{}, + }, results.GetFailed()[0].Rule()) + + failure := results.GetFailed()[0] + actualCode, err := failure.GetCode() + require.NoError(t, err) + for i := range actualCode.Lines { + actualCode.Lines[i].Highlighted = "" + } + assert.Equal(t, []scan.Line{ + { + Number: 6, + Content: " BucketName: public-bucket", + IsCause: true, + FirstCause: true, + LastCause: true, + Annotation: "", + }, + }, actualCode.Lines) +} diff --git a/pkg/scanners/cloudformation/test/cf_scanning_test.go b/pkg/scanners/cloudformation/test/cf_scanning_test.go new file mode 100644 index 000000000000..04a8d5f1c4ec --- /dev/null +++ b/pkg/scanners/cloudformation/test/cf_scanning_test.go @@ -0,0 +1,48 @@ +package test + +import ( + "bytes" + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation" +) + +func Test_basic_cloudformation_scanning(t *testing.T) { + cfScanner := cloudformation.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + results, err := cfScanner.ScanFS(context.TODO(), os.DirFS("./examples/bucket"), ".") + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 0) +} + +func Test_cloudformation_scanning_has_expected_errors(t *testing.T) { + cfScanner := cloudformation.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + results, err := cfScanner.ScanFS(context.TODO(), os.DirFS("./examples/bucket"), ".") + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 0) +} + +func Test_cloudformation_scanning_with_debug(t *testing.T) { + + debugWriter := bytes.NewBufferString("") + + scannerOptions := []options.ScannerOption{ + options.ScannerWithDebug(debugWriter), + } + cfScanner := cloudformation.New(scannerOptions...) + + _, err := cfScanner.ScanFS(context.TODO(), os.DirFS("./examples/bucket"), ".") + require.NoError(t, err) + + // check debug is as expected + assert.Greater(t, len(debugWriter.String()), 0) +} diff --git a/pkg/scanners/cloudformation/test/examples/bucket/bucket.yaml b/pkg/scanners/cloudformation/test/examples/bucket/bucket.yaml new file mode 100644 index 000000000000..21f1c25042b0 --- /dev/null +++ b/pkg/scanners/cloudformation/test/examples/bucket/bucket.yaml @@ -0,0 +1,24 @@ +--- +AWSTemplateFormatVersion: "2010-09-09" +Description: An example Stack for a bucket +Parameters: + BucketName: + Type: String + Default: naughty-bucket + EncryptBucket: + Type: Boolean + Default: false +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: + Ref: BucketName + PublicAccessBlockConfiguration: + BlockPublicAcls: false + BlockPublicPolicy: false + IgnorePublicAcls: true + RestrictPublicBuckets: false + BucketEncryption: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: !Ref EncryptBucket diff --git a/pkg/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml b/pkg/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml new file mode 100644 index 000000000000..ec5e8a8d7661 --- /dev/null +++ b/pkg/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml @@ -0,0 +1,24 @@ +--- +AWSTemplateFormatVersion: "2010-09-09" +Description: An example Stack for a bucket +Parameters: + BucketName: + Type: String + Default: naughty-bucket + EncryptBucket: + Type: Boolean + Default: false +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: + Ref: BucketName + PublicAccessBlockConfiguration: + BlockPublicAcls: false + BlockPublicPolicy: false # cfsec:ignore:AVD-AWS-0087 + IgnorePublicAcls: true + RestrictPublicBuckets: false + BucketEncryption: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: !Ref EncryptBucket diff --git a/pkg/scanners/cloudformation/test/examples/roles/roles.yml b/pkg/scanners/cloudformation/test/examples/roles/roles.yml new file mode 100644 index 000000000000..5b927457762b --- /dev/null +++ b/pkg/scanners/cloudformation/test/examples/roles/roles.yml @@ -0,0 +1,51 @@ +Resources: + LambdaAPIRole: + Type: "AWS::IAM::Role" + Properties: + RoleName: "${self:service}-${self:provider.stage}-LambdaAPI" + Policies: + - PolicyName: "${self:service}-${self:provider.stage}-lambda" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "logs:CreateLogStream" + - "logs:CreateLogGroup" + - "logs:PutLogEvents" + Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/lambda/${self:service}-${self:provider.stage}*:*" + - !If + - EnableCrossAccountSnsPublish + - PolicyName: "${self:service}-${self:provider.stage}-asngen-sns-publish" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "SNS:Publish" + Resource: + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-PurchaseOrder.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-Vendor.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-Customer.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-Manufacturer.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-ManufacturerItem.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-Item.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-VendorItem.fifo" + - !Ref "AWS::NoValue" + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Principal: + Service: + - "lambda.amazonaws.com" + Action: + - "sts:AssumeRole" + + + + +Conditions: + EnableCrossAccountSnsPublish: !Equals + - ${env:ALLOW_SNS_PUBLISH, true} + - true diff --git a/pkg/scanners/dockerfile/parser/parser.go b/pkg/scanners/dockerfile/parser/parser.go new file mode 100644 index 000000000000..b45c97adc80f --- /dev/null +++ b/pkg/scanners/dockerfile/parser/parser.go @@ -0,0 +1,151 @@ +package parser + +import ( + "context" + "fmt" + "io" + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/providers/dockerfile" + "github.com/moby/buildkit/frontend/dockerfile/instructions" + "github.com/moby/buildkit/frontend/dockerfile/parser" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/detection" +) + +var _ options.ConfigurableParser = (*Parser)(nil) + +type Parser struct { + debug debug.Logger + skipRequired bool +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "dockerfile", "parser") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +// New creates a new Dockerfile parser +func New(options ...options.ParserOption) *Parser { + p := &Parser{} + for _, option := range options { + option(p) + } + return p +} + +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string]*dockerfile.Dockerfile, error) { + + files := make(map[string]*dockerfile.Dockerfile) + if err := fs.WalkDir(target, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + if !p.Required(path) { + return nil + } + df, err := p.ParseFile(ctx, target, path) + if err != nil { + // TODO add debug for parse errors + return nil + } + files[path] = df + return nil + }); err != nil { + return nil, err + } + return files, nil +} + +// ParseFile parses Dockerfile content from the provided filesystem path. +func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) (*dockerfile.Dockerfile, error) { + f, err := fs.Open(filepath.ToSlash(path)) + if err != nil { + return nil, err + } + defer func() { _ = f.Close() }() + return p.parse(path, f) +} + +func (p *Parser) Required(path string) bool { + if p.skipRequired { + return true + } + return detection.IsType(path, nil, detection.FileTypeDockerfile) +} + +func (p *Parser) parse(path string, r io.Reader) (*dockerfile.Dockerfile, error) { + parsed, err := parser.Parse(r) + if err != nil { + return nil, fmt.Errorf("dockerfile parse error: %w", err) + } + + var parsedFile dockerfile.Dockerfile + var stage dockerfile.Stage + var stageIndex int + fromValue := "args" + for _, child := range parsed.AST.Children { + child.Value = strings.ToLower(child.Value) + + instr, err := instructions.ParseInstruction(child) + if err != nil { + return nil, fmt.Errorf("process dockerfile instructions: %w", err) + } + + if _, ok := instr.(*instructions.Stage); ok { + if len(stage.Commands) > 0 { + parsedFile.Stages = append(parsedFile.Stages, stage) + } + if fromValue != "args" { + stageIndex++ + } + fromValue = strings.TrimSpace(strings.TrimPrefix(child.Original, "FROM ")) + stage = dockerfile.Stage{ + Name: fromValue, + } + } + + cmd := dockerfile.Command{ + Cmd: child.Value, + Original: child.Original, + Flags: child.Flags, + Stage: stageIndex, + Path: path, + StartLine: child.StartLine, + EndLine: child.EndLine, + } + + if child.Next != nil && len(child.Next.Children) > 0 { + cmd.SubCmd = child.Next.Children[0].Value + child = child.Next.Children[0] + } + + cmd.JSON = child.Attributes["json"] + for n := child.Next; n != nil; n = n.Next { + cmd.Value = append(cmd.Value, n.Value) + } + + stage.Commands = append(stage.Commands, cmd) + + } + if len(stage.Commands) > 0 { + parsedFile.Stages = append(parsedFile.Stages, stage) + } + + return &parsedFile, nil +} diff --git a/pkg/scanners/dockerfile/parser/parser_test.go b/pkg/scanners/dockerfile/parser/parser_test.go new file mode 100644 index 000000000000..04a45ea4695d --- /dev/null +++ b/pkg/scanners/dockerfile/parser/parser_test.go @@ -0,0 +1,56 @@ +package parser + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Parser(t *testing.T) { + input := `FROM ubuntu:18.04 +COPY . /app +RUN make /app +CMD python /app/app.py +` + + df, err := New().parse("Dockerfile", strings.NewReader(input)) + require.NoError(t, err) + + assert.Equal(t, 1, len(df.Stages)) + + require.Len(t, df.Stages, 1) + + assert.Equal(t, "ubuntu:18.04", df.Stages[0].Name) + commands := df.Stages[0].Commands + assert.Equal(t, 4, len(commands)) + + // FROM ubuntu:18.04 + assert.Equal(t, "from", commands[0].Cmd) + assert.Equal(t, "ubuntu:18.04", commands[0].Value[0]) + assert.Equal(t, "Dockerfile", commands[0].Path) + assert.Equal(t, 1, commands[0].StartLine) + assert.Equal(t, 1, commands[0].EndLine) + + // COPY . /app + assert.Equal(t, "copy", commands[1].Cmd) + assert.Equal(t, ". /app", strings.Join(commands[1].Value, " ")) + assert.Equal(t, "Dockerfile", commands[1].Path) + assert.Equal(t, 2, commands[1].StartLine) + assert.Equal(t, 2, commands[1].EndLine) + + // RUN make /app + assert.Equal(t, "run", commands[2].Cmd) + assert.Equal(t, "make /app", commands[2].Value[0]) + assert.Equal(t, "Dockerfile", commands[2].Path) + assert.Equal(t, 3, commands[2].StartLine) + assert.Equal(t, 3, commands[2].EndLine) + + // CMD python /app/app.py + assert.Equal(t, "cmd", commands[3].Cmd) + assert.Equal(t, "python /app/app.py", commands[3].Value[0]) + assert.Equal(t, "Dockerfile", commands[3].Path) + assert.Equal(t, 4, commands[3].StartLine) + assert.Equal(t, 4, commands[3].EndLine) +} diff --git a/pkg/scanners/dockerfile/scanner.go b/pkg/scanners/dockerfile/scanner.go new file mode 100644 index 000000000000..a0d3e60f47a4 --- /dev/null +++ b/pkg/scanners/dockerfile/scanner.go @@ -0,0 +1,182 @@ +package dockerfile + +import ( + "context" + "io" + "io/fs" + "sync" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/scanners" + "github.com/aquasecurity/trivy/pkg/scanners/dockerfile/parser" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + debug debug.Logger + policyDirs []string + policyReaders []io.Reader + parser *parser.Parser + regoScanner *rego.Scanner + skipRequired bool + options []options.ScannerOption + frameworks []framework.Framework + spec string + sync.Mutex + loadEmbeddedLibraries bool + loadEmbeddedPolicies bool +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(bool) { +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) Name() string { + return "Dockerfile" +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.skipRequired = skip +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "dockerfile", "scanner") +} + +func (s *Scanner) SetTraceWriter(_ io.Writer) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPerResultTracingEnabled(_ bool) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetDataDirs(_ ...string) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPolicyNamespaces(_ ...string) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetRegoErrorLimit(_ int) { + // handled by rego when option is passed on +} + +func NewScanner(opts ...options.ScannerOption) *Scanner { + s := &Scanner{ + options: opts, + } + for _, opt := range opts { + opt(s) + } + s.parser = parser.New(options.ParserWithSkipRequiredCheck(s.skipRequired)) + return s +} + +func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { + + files, err := s.parser.ParseFS(ctx, fs, path) + if err != nil { + return nil, err + } + + if len(files) == 0 { + return nil, nil + } + + var inputs []rego.Input + for path, dfile := range files { + inputs = append(inputs, rego.Input{ + Path: path, + FS: fs, + Contents: dfile.ToRego(), + }) + } + + results, err := s.scanRego(ctx, fs, inputs...) + if err != nil { + return nil, err + } + return results, nil +} + +func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { + dockerfile, err := s.parser.ParseFile(ctx, fs, path) + if err != nil { + return nil, err + } + s.debug.Log("Scanning %s...", path) + return s.scanRego(ctx, fs, rego.Input{ + Path: path, + Contents: dockerfile.ToRego(), + }) +} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return s.regoScanner, nil + } + + regoScanner := rego.NewScanner(types.SourceDockerfile, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return nil, err + } + s.regoScanner = regoScanner + return regoScanner, nil +} + +func (s *Scanner) scanRego(ctx context.Context, srcFS fs.FS, inputs ...rego.Input) (scan.Results, error) { + regoScanner, err := s.initRegoScanner(srcFS) + if err != nil { + return nil, err + } + results, err := regoScanner.ScanInput(ctx, inputs...) + if err != nil { + return nil, err + } + results.SetSourceAndFilesystem("", srcFS, false) + return results, nil +} diff --git a/pkg/scanners/dockerfile/scanner_test.go b/pkg/scanners/dockerfile/scanner_test.go new file mode 100644 index 000000000000..a4cf64b377a9 --- /dev/null +++ b/pkg/scanners/dockerfile/scanner_test.go @@ -0,0 +1,638 @@ +package dockerfile + +import ( + "bytes" + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/rego/schemas" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +const DS006PolicyWithDockerfileSchema = `# METADATA +# title: "COPY '--from' referring to the current image" +# description: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself." +# scope: package +# schemas: +# - input: schema["dockerfile"] +# related_resources: +# - https://docs.docker.com/develop/develop-images/multistage-build/ +# custom: +# id: DS006 +# avd_id: AVD-DS-0006 +# severity: CRITICAL +# short_code: no-self-referencing-copy-from +# recommended_action: "Change the '--from' so that it will not refer to itself" +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS006 + +import data.lib.docker + +get_alias_from_copy[output] { + copies := docker.stage_copies[stage] + + copy := copies[_] + flag := copy.Flags[_] + contains(flag, "--from=") + parts := split(flag, "=") + + is_alias_current_from_alias(stage.Name, parts[1]) + args := parts[1] + output := { + "args": args, + "cmd": copy, + } +} + +is_alias_current_from_alias(current_name, current_alias) = allow { + current_name_lower := lower(current_name) + current_alias_lower := lower(current_alias) + + #expecting stage name as "myimage:tag as dep" + [_, alias] := regex.split(` + "`\\s+as\\s+`" + `, current_name_lower) + + alias == current_alias + + allow = true +} + +deny[res] { + output := get_alias_from_copy[_] + msg := sprintf("'COPY --from' should not mention current alias '%s' since it is impossible to copy from itself", [output.args]) + res := result.new(msg, output.cmd) +} +` + +const DS006PolicyWithMyFancyDockerfileSchema = `# METADATA +# title: "COPY '--from' referring to the current image" +# description: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself." +# scope: package +# schemas: +# - input: schema["myfancydockerfile"] +# related_resources: +# - https://docs.docker.com/develop/develop-images/multistage-build/ +# custom: +# id: DS006 +# avd_id: AVD-DS-0006 +# severity: CRITICAL +# short_code: no-self-referencing-copy-from +# recommended_action: "Change the '--from' so that it will not refer to itself" +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS006 + +import data.lib.docker + +get_alias_from_copy[output] { +copies := docker.stage_copies[stage] + +copy := copies[_] +flag := copy.Flags[_] +contains(flag, "--from=") +parts := split(flag, "=") + +is_alias_current_from_alias(stage.Name, parts[1]) +args := parts[1] +output := { +"args": args, +"cmd": copy, +} +} + +is_alias_current_from_alias(current_name, current_alias) = allow { +current_name_lower := lower(current_name) +current_alias_lower := lower(current_alias) + +#expecting stage name as "myimage:tag as dep" +[_, alias] := regex.split(` + "`\\s+as\\s+`" + `, current_name_lower) + +alias == current_alias + +allow = true +} + +deny[res] { +output := get_alias_from_copy[_] +msg := sprintf("'COPY --from' should not mention current alias '%s' since it is impossible to copy from itself", [output.args]) +res := result.new(msg, output.cmd) +} +` + +const DS006PolicyWithOldSchemaSelector = `# METADATA +# title: "COPY '--from' referring to the current image" +# description: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself." +# scope: package +# schemas: +# - input: schema["input"] +# related_resources: +# - https://docs.docker.com/develop/develop-images/multistage-build/ +# custom: +# id: DS006 +# avd_id: AVD-DS-0006 +# severity: CRITICAL +# short_code: no-self-referencing-copy-from +# recommended_action: "Change the '--from' so that it will not refer to itself" +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS006 + +import data.lib.docker + +get_alias_from_copy[output] { + copies := docker.stage_copies[stage] + + copy := copies[_] + flag := copy.Flags[_] + contains(flag, "--from=") + parts := split(flag, "=") + + is_alias_current_from_alias(stage.Name, parts[1]) + args := parts[1] + output := { + "args": args, + "cmd": copy, + } +} + +is_alias_current_from_alias(current_name, current_alias) = allow { + current_name_lower := lower(current_name) + current_alias_lower := lower(current_alias) + + #expecting stage name as "myimage:tag as dep" + [_, alias] := regex.split(` + "`\\s+as\\s+`" + `, current_name_lower) + + alias == current_alias + + allow = true +} + +deny[res] { + output := get_alias_from_copy[_] + msg := sprintf("'COPY --from' should not mention current alias '%s' since it is impossible to copy from itself", [output.args]) + res := result.new(msg, output.cmd) +} +` +const DS006LegacyWithOldStyleMetadata = `package builtin.dockerfile.DS006 + +__rego_metadata__ := { + "id": "DS006", + "avd_id": "AVD-DS-0006", + "title": "COPY '--from' referring to the current image", + "short_code": "no-self-referencing-copy-from", + "version": "v1.0.0", + "severity": "CRITICAL", + "type": "Dockerfile Security Check", + "description": "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself.", + "recommended_actions": "Change the '--from' so that it will not refer to itself", + "url": "https://docs.docker.com/develop/develop-images/multistage-build/", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "dockerfile"}], +} + +deny[res] { + res := { + "msg": "oh no", + "filepath": "code/Dockerfile", + "startline": 1, + "endline": 1, + } +}` + +func Test_BasicScanLegacyRegoMetadata(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "/code/Dockerfile": `FROM ubuntu +USER root +`, + "/rules/rule.rego": DS006LegacyWithOldStyleMetadata, + }) + + scanner := NewScanner(options.ScannerWithPolicyDirs("rules")) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + + failure := results.GetFailed()[0] + metadata := failure.Metadata() + assert.Equal(t, 1, metadata.Range().GetStartLine()) + assert.Equal(t, 1, metadata.Range().GetEndLine()) + assert.Equal(t, "code/Dockerfile", metadata.Range().GetFilename()) + + assert.Equal( + t, + scan.Rule{ + AVDID: "AVD-DS-0006", + Aliases: []string{"DS006"}, + ShortCode: "no-self-referencing-copy-from", + Summary: "COPY '--from' referring to the current image", + Explanation: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself.", + Impact: "", + Resolution: "Change the '--from' so that it will not refer to itself", + Provider: "dockerfile", + Service: "general", + Links: []string{"https://docs.docker.com/develop/develop-images/multistage-build/"}, + Severity: "CRITICAL", + Terraform: &scan.EngineMetadata{}, + CloudFormation: &scan.EngineMetadata{}, + CustomChecks: scan.CustomChecks{ + Terraform: (*scan.TerraformCustomCheck)(nil)}, + RegoPackage: "data.builtin.dockerfile.DS006", + Frameworks: map[framework.Framework][]string{}, + }, + results.GetFailed()[0].Rule(), + ) + + actualCode, err := results.GetFailed()[0].GetCode() + require.NoError(t, err) + for i := range actualCode.Lines { + actualCode.Lines[i].Highlighted = "" + } + assert.Equal(t, []scan.Line{ + { + Number: 1, + Content: "FROM ubuntu", + IsCause: true, + FirstCause: true, + LastCause: true, + Annotation: "", + }, + }, actualCode.Lines) +} + +func Test_BasicScanNewRegoMetadata(t *testing.T) { + var testCases = []struct { + name string + inputRegoPolicy string + expectedError string + expectedInputTraceLogs string + expectedOutputTraceLogs string + }{ + { + name: "old schema selector schema.input", + inputRegoPolicy: DS006PolicyWithOldSchemaSelector, + expectedInputTraceLogs: `REGO INPUT: +{ + "path": "code/Dockerfile", + "contents": { + "Stages": [ + { + "Commands": [ + { + "Cmd": "from", + "EndLine": 1, + "Flags": [], + "JSON": false, + "Original": "FROM golang:1.7.3 as dep", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 1, + "SubCmd": "", + "Value": [ + "golang:1.7.3", + "as", + "dep" + ] + }, + { + "Cmd": "copy", + "EndLine": 2, + "Flags": [ + "--from=dep" + ], + "JSON": false, + "Original": "COPY --from=dep /binary /", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 2, + "SubCmd": "", + "Value": [ + "/binary", + "/" + ] + } + ], + "Name": "golang:1.7.3 as dep" + } + ] + } +} +END REGO INPUT +`, + expectedOutputTraceLogs: `REGO RESULTSET: +[ + { + "expressions": [ + { + "value": [ + { + "endline": 2, + "explicit": false, + "filepath": "code/Dockerfile", + "fskey": "", + "managed": true, + "msg": "'COPY --from' should not mention current alias 'dep' since it is impossible to copy from itself", + "parent": null, + "resource": "", + "sourceprefix": "", + "startline": 2 + } + ], + "text": "data.builtin.dockerfile.DS006.deny", + "location": { + "row": 1, + "col": 1 + } + } + ] + } +] +END REGO RESULTSET + +`, + }, + { + name: "new schema selector schema.dockerfile", + inputRegoPolicy: DS006PolicyWithDockerfileSchema, + expectedInputTraceLogs: `REGO INPUT: +{ + "path": "code/Dockerfile", + "contents": { + "Stages": [ + { + "Commands": [ + { + "Cmd": "from", + "EndLine": 1, + "Flags": [], + "JSON": false, + "Original": "FROM golang:1.7.3 as dep", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 1, + "SubCmd": "", + "Value": [ + "golang:1.7.3", + "as", + "dep" + ] + }, + { + "Cmd": "copy", + "EndLine": 2, + "Flags": [ + "--from=dep" + ], + "JSON": false, + "Original": "COPY --from=dep /binary /", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 2, + "SubCmd": "", + "Value": [ + "/binary", + "/" + ] + } + ], + "Name": "golang:1.7.3 as dep" + } + ] + } +} +END REGO INPUT +`, + expectedOutputTraceLogs: `REGO RESULTSET: +[ + { + "expressions": [ + { + "value": [ + { + "endline": 2, + "explicit": false, + "filepath": "code/Dockerfile", + "fskey": "", + "managed": true, + "msg": "'COPY --from' should not mention current alias 'dep' since it is impossible to copy from itself", + "parent": null, + "resource": "", + "sourceprefix": "", + "startline": 2 + } + ], + "text": "data.builtin.dockerfile.DS006.deny", + "location": { + "row": 1, + "col": 1 + } + } + ] + } +] +END REGO RESULTSET + +`, + }, + { + name: "new schema selector with custom schema.myfancydockerfile", + inputRegoPolicy: DS006PolicyWithMyFancyDockerfileSchema, + expectedInputTraceLogs: `REGO INPUT: +{ + "path": "code/Dockerfile", + "contents": { + "Stages": [ + { + "Commands": [ + { + "Cmd": "from", + "EndLine": 1, + "Flags": [], + "JSON": false, + "Original": "FROM golang:1.7.3 as dep", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 1, + "SubCmd": "", + "Value": [ + "golang:1.7.3", + "as", + "dep" + ] + }, + { + "Cmd": "copy", + "EndLine": 2, + "Flags": [ + "--from=dep" + ], + "JSON": false, + "Original": "COPY --from=dep /binary /", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 2, + "SubCmd": "", + "Value": [ + "/binary", + "/" + ] + } + ], + "Name": "golang:1.7.3 as dep" + } + ] + } +} +END REGO INPUT +`, + expectedOutputTraceLogs: `REGO RESULTSET: +[ + { + "expressions": [ + { + "value": [ + { + "endline": 2, + "explicit": false, + "filepath": "code/Dockerfile", + "fskey": "", + "managed": true, + "msg": "'COPY --from' should not mention current alias 'dep' since it is impossible to copy from itself", + "parent": null, + "resource": "", + "sourceprefix": "", + "startline": 2 + } + ], + "text": "data.builtin.dockerfile.DS006.deny", + "location": { + "row": 1, + "col": 1 + } + } + ] + } +] +END REGO RESULTSET + +`, + }, + { + name: "new schema selector but invalid", + inputRegoPolicy: `# METADATA +# title: "COPY '--from' referring to the current image" +# description: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself." +# scope: package +# schemas: +# - input: schema["spooky-schema"] +# custom: +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS006 +deny[res]{ +res := true +}`, + expectedError: `1 error occurred: rules/rule.rego:12: rego_type_error: undefined schema: schema["spooky-schema"]`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + regoMap := make(map[string]string) + libs, err := rego.LoadEmbeddedLibraries() + require.NoError(t, err) + for name, library := range libs { + regoMap["/rules/"+name] = library.String() + } + regoMap["/code/Dockerfile"] = `FROM golang:1.7.3 as dep +COPY --from=dep /binary /` + regoMap["/rules/rule.rego"] = tc.inputRegoPolicy + regoMap["/rules/schemas/myfancydockerfile.json"] = string(schemas.Dockerfile) // just use the same for testing + fs := testutil.CreateFS(t, regoMap) + + var traceBuf bytes.Buffer + var debugBuf bytes.Buffer + + scanner := NewScanner( + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithTrace(&traceBuf), + options.ScannerWithDebug(&debugBuf), + options.ScannerWithRegoErrorLimits(0), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + if tc.expectedError != "" && err != nil { + require.Equal(t, tc.expectedError, err.Error(), tc.name) + } else { + require.NoError(t, err) + require.Len(t, results.GetFailed(), 1) + + failure := results.GetFailed()[0] + metadata := failure.Metadata() + assert.Equal(t, 2, metadata.Range().GetStartLine()) + assert.Equal(t, 2, metadata.Range().GetEndLine()) + assert.Equal(t, "code/Dockerfile", metadata.Range().GetFilename()) + + assert.Equal( + t, + scan.Rule{ + AVDID: "AVD-DS-0006", + Aliases: []string{"DS006"}, + ShortCode: "no-self-referencing-copy-from", + Summary: "COPY '--from' referring to the current image", + Explanation: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself.", + Impact: "", + Resolution: "Change the '--from' so that it will not refer to itself", + Provider: "dockerfile", + Service: "general", + Links: []string{"https://docs.docker.com/develop/develop-images/multistage-build/"}, + Severity: "CRITICAL", + Terraform: &scan.EngineMetadata{}, + CloudFormation: &scan.EngineMetadata{}, + CustomChecks: scan.CustomChecks{ + Terraform: (*scan.TerraformCustomCheck)(nil)}, + RegoPackage: "data.builtin.dockerfile.DS006", + Frameworks: map[framework.Framework][]string{}, + }, + results.GetFailed()[0].Rule(), + ) + + actualCode, err := results.GetFailed()[0].GetCode() + require.NoError(t, err) + for i := range actualCode.Lines { + actualCode.Lines[i].Highlighted = "" + } + assert.Equal(t, []scan.Line{ + { + Number: 2, + Content: "COPY --from=dep /binary /", + IsCause: true, + FirstCause: true, + LastCause: true, + Annotation: "", + }, + }, actualCode.Lines) + + // assert logs + assert.Contains(t, traceBuf.String(), tc.expectedInputTraceLogs, traceBuf.String()) + assert.Contains(t, traceBuf.String(), tc.expectedOutputTraceLogs, traceBuf.String()) + } + }) + } + +} diff --git a/pkg/scanners/helm/options.go b/pkg/scanners/helm/options.go new file mode 100644 index 000000000000..7754f2978df8 --- /dev/null +++ b/pkg/scanners/helm/options.go @@ -0,0 +1,51 @@ +package helm + +import ( + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/scanners/helm/parser" +) + +type ConfigurableHelmScanner interface { + options.ConfigurableScanner + AddParserOptions(options ...options.ParserOption) +} + +func ScannerWithValuesFile(paths ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithValuesFile(paths...)) + } + } +} + +func ScannerWithValues(values ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithValues(values...)) + } + } +} + +func ScannerWithFileValues(values ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithFileValues(values...)) + } + } +} + +func ScannerWithStringValues(values ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithStringValues(values...)) + } + } +} + +func ScannerWithAPIVersions(values ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithAPIVersions(values...)) + } + } +} diff --git a/pkg/scanners/helm/parser/option.go b/pkg/scanners/helm/parser/option.go new file mode 100644 index 000000000000..6cf79009615d --- /dev/null +++ b/pkg/scanners/helm/parser/option.go @@ -0,0 +1,52 @@ +package parser + +import "github.com/aquasecurity/defsec/pkg/scanners/options" + +type ConfigurableHelmParser interface { + options.ConfigurableParser + SetValuesFile(...string) + SetValues(...string) + SetFileValues(...string) + SetStringValues(...string) + SetAPIVersions(...string) +} + +func OptionWithValuesFile(paths ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetValuesFile(paths...) + } + } +} + +func OptionWithValues(values ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetValues(values...) + } + } +} + +func OptionWithFileValues(values ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetValues(values...) + } + } +} + +func OptionWithStringValues(values ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetValues(values...) + } + } +} + +func OptionWithAPIVersions(values ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetAPIVersions(values...) + } + } +} diff --git a/pkg/scanners/helm/parser/parser.go b/pkg/scanners/helm/parser/parser.go new file mode 100644 index 000000000000..408f43069aff --- /dev/null +++ b/pkg/scanners/helm/parser/parser.go @@ -0,0 +1,322 @@ +package parser + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "regexp" + "sort" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/google/uuid" + "helm.sh/helm/v3/pkg/action" + "helm.sh/helm/v3/pkg/chart" + "helm.sh/helm/v3/pkg/chart/loader" + "helm.sh/helm/v3/pkg/release" + "helm.sh/helm/v3/pkg/releaseutil" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/detection" +) + +var manifestNameRegex = regexp.MustCompile("# Source: [^/]+/(.+)") + +type Parser struct { + helmClient *action.Install + rootPath string + ChartSource string + filepaths []string + debug debug.Logger + skipRequired bool + workingFS fs.FS + valuesFiles []string + values []string + fileValues []string + stringValues []string + apiVersions []string +} + +type ChartFile struct { + TemplateFilePath string + ManifestContent string +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "helm", "parser") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +func (p *Parser) SetValuesFile(s ...string) { + p.valuesFiles = s +} + +func (p *Parser) SetValues(values ...string) { + p.values = values +} + +func (p *Parser) SetFileValues(values ...string) { + p.fileValues = values +} + +func (p *Parser) SetStringValues(values ...string) { + p.stringValues = values +} + +func (p *Parser) SetAPIVersions(values ...string) { + p.apiVersions = values +} + +func New(path string, options ...options.ParserOption) *Parser { + + client := action.NewInstall(&action.Configuration{}) + client.DryRun = true // don't do anything + client.Replace = true // skip name check + client.ClientOnly = true // don't try to talk to a cluster + + p := &Parser{ + helmClient: client, + ChartSource: path, + } + + for _, option := range options { + option(p) + } + + if p.apiVersions != nil { + p.helmClient.APIVersions = p.apiVersions + } + + return p +} + +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) error { + p.workingFS = target + + if err := fs.WalkDir(p.workingFS, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + + if !p.required(path, p.workingFS) { + return nil + } + + if detection.IsArchive(path) { + tarFS, err := p.addTarToFS(path) + if errors.Is(err, errSkipFS) { + // an unpacked Chart already exists + return nil + } else if err != nil { + return fmt.Errorf("failed to add tar %q to FS: %w", path, err) + } + + targetPath := filepath.Dir(path) + if targetPath == "" { + targetPath = "." + } + + if err := p.ParseFS(ctx, tarFS, targetPath); err != nil { + return fmt.Errorf("parse tar FS error: %w", err) + } + return nil + } else { + return p.addPaths(path) + } + }); err != nil { + return fmt.Errorf("walk dir error: %w", err) + } + + return nil +} + +func (p *Parser) addPaths(paths ...string) error { + for _, path := range paths { + if _, err := fs.Stat(p.workingFS, path); err != nil { + return err + } + + if strings.HasSuffix(path, "Chart.yaml") && p.rootPath == "" { + if err := p.extractChartName(path); err != nil { + return err + } + p.rootPath = filepath.Dir(path) + } + p.filepaths = append(p.filepaths, path) + } + return nil +} + +func (p *Parser) extractChartName(chartPath string) error { + + chart, err := p.workingFS.Open(chartPath) + if err != nil { + return err + } + defer func() { _ = chart.Close() }() + + var chartContent map[string]interface{} + if err := yaml.NewDecoder(chart).Decode(&chartContent); err != nil { + // the chart likely has the name templated and so cannot be parsed as yaml - use a temporary name + if dir := filepath.Dir(chartPath); dir != "" && dir != "." { + p.helmClient.ReleaseName = dir + } else { + p.helmClient.ReleaseName = uuid.NewString() + } + return nil + } + + if name, ok := chartContent["name"]; !ok { + return fmt.Errorf("could not extract the chart name from %s", chartPath) + } else { + p.helmClient.ReleaseName = fmt.Sprintf("%v", name) + } + return nil +} + +func (p *Parser) RenderedChartFiles() ([]ChartFile, error) { + + tempDir, err := os.MkdirTemp(os.TempDir(), "defsec") + if err != nil { + return nil, err + } + + if err := p.writeBuildFiles(tempDir); err != nil { + return nil, err + } + + workingChart, err := loadChart(tempDir) + if err != nil { + return nil, err + } + + workingRelease, err := p.getRelease(workingChart) + if err != nil { + return nil, err + } + + var manifests bytes.Buffer + _, _ = fmt.Fprintln(&manifests, strings.TrimSpace(workingRelease.Manifest)) + + splitManifests := releaseutil.SplitManifests(manifests.String()) + manifestsKeys := make([]string, 0, len(splitManifests)) + for k := range splitManifests { + manifestsKeys = append(manifestsKeys, k) + } + return p.getRenderedManifests(manifestsKeys, splitManifests), nil +} + +func (p *Parser) getRelease(chart *chart.Chart) (*release.Release, error) { + opts := &ValueOptions{ + ValueFiles: p.valuesFiles, + Values: p.values, + FileValues: p.fileValues, + StringValues: p.stringValues, + } + + vals, err := opts.MergeValues() + if err != nil { + return nil, err + } + r, err := p.helmClient.RunWithContext(context.Background(), chart, vals) + if err != nil { + return nil, err + } + + if r == nil { + return nil, fmt.Errorf("there is nothing in the release") + } + return r, nil +} + +func loadChart(tempFs string) (*chart.Chart, error) { + loadedChart, err := loader.Load(tempFs) + if err != nil { + return nil, err + } + + if req := loadedChart.Metadata.Dependencies; req != nil { + if err := action.CheckDependencies(loadedChart, req); err != nil { + return nil, err + } + } + + return loadedChart, nil +} + +func (*Parser) getRenderedManifests(manifestsKeys []string, splitManifests map[string]string) []ChartFile { + sort.Sort(releaseutil.BySplitManifestsOrder(manifestsKeys)) + var manifestsToRender []ChartFile + for _, manifestKey := range manifestsKeys { + manifest := splitManifests[manifestKey] + submatch := manifestNameRegex.FindStringSubmatch(manifest) + if len(submatch) == 0 { + continue + } + manifestsToRender = append(manifestsToRender, ChartFile{ + TemplateFilePath: getManifestPath(manifest), + ManifestContent: manifest, + }) + } + return manifestsToRender +} + +func getManifestPath(manifest string) string { + lines := strings.Split(manifest, "\n") + if len(lines) == 0 { + return "unknown.yaml" + } + manifestFilePathParts := strings.SplitN(strings.TrimPrefix(lines[0], "# Source: "), "/", 2) + if len(manifestFilePathParts) > 1 { + return manifestFilePathParts[1] + } + return manifestFilePathParts[0] +} + +func (p *Parser) writeBuildFiles(tempFs string) error { + for _, path := range p.filepaths { + content, err := fs.ReadFile(p.workingFS, path) + if err != nil { + return err + } + workingPath := strings.TrimPrefix(path, p.rootPath) + workingPath = filepath.Join(tempFs, workingPath) + if err := os.MkdirAll(filepath.Dir(workingPath), os.ModePerm); err != nil { + return err + } + if err := os.WriteFile(workingPath, content, os.ModePerm); err != nil { + return err + } + } + return nil +} + +func (p *Parser) required(path string, workingFS fs.FS) bool { + if p.skipRequired { + return true + } + content, err := fs.ReadFile(workingFS, path) + if err != nil { + return false + } + + return detection.IsType(path, bytes.NewReader(content), detection.FileTypeHelm) +} diff --git a/pkg/scanners/helm/parser/parser_tar.go b/pkg/scanners/helm/parser/parser_tar.go new file mode 100644 index 000000000000..0c77408ea7d6 --- /dev/null +++ b/pkg/scanners/helm/parser/parser_tar.go @@ -0,0 +1,110 @@ +package parser + +import ( + "archive/tar" + "bytes" + "compress/gzip" + "errors" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + + "github.com/aquasecurity/trivy/pkg/detection" + "github.com/liamg/memoryfs" +) + +var errSkipFS = errors.New("skip parse FS") + +func (p *Parser) addTarToFS(path string) (fs.FS, error) { + tarFS := memoryfs.CloneFS(p.workingFS) + + file, err := tarFS.Open(path) + if err != nil { + return nil, fmt.Errorf("failed to open tar: %w", err) + } + defer file.Close() + + var tr *tar.Reader + + if detection.IsZip(path) { + zipped, err := gzip.NewReader(file) + if err != nil { + return nil, fmt.Errorf("failed to create gzip reader: %w", err) + } + defer zipped.Close() + tr = tar.NewReader(zipped) + } else { + tr = tar.NewReader(file) + } + + checkExistedChart := true + + for { + header, err := tr.Next() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return nil, fmt.Errorf("failed to get next entry: %w", err) + } + + if checkExistedChart { + // Do not add archive files to FS if the chart already exists + // This can happen when the source chart is located next to an archived chart (with the `helm package` command) + // The first level folder in the archive is equal to the Chart name + if _, err := tarFS.Stat(filepath.Dir(path) + "/" + filepath.Dir(header.Name)); err == nil { + return nil, errSkipFS + } + checkExistedChart = false + } + + // get the individual path and extract to the current directory + entryPath := header.Name + + switch header.Typeflag { + case tar.TypeDir: + if err := tarFS.MkdirAll(entryPath, os.FileMode(header.Mode)); err != nil && !errors.Is(err, fs.ErrExist) { + return nil, err + } + case tar.TypeReg: + writePath := filepath.Dir(path) + "/" + entryPath + p.debug.Log("Unpacking tar entry %s", writePath) + + _ = tarFS.MkdirAll(filepath.Dir(writePath), fs.ModePerm) + + buf, err := copyChunked(tr, 1024) + if err != nil { + return nil, err + } + + p.debug.Log("writing file contents to %s", writePath) + if err := tarFS.WriteFile(writePath, buf.Bytes(), fs.ModePerm); err != nil { + return nil, fmt.Errorf("write file error: %w", err) + } + default: + return nil, fmt.Errorf("header type %q is not supported", header.Typeflag) + } + } + + if err := tarFS.Remove(path); err != nil { + return nil, fmt.Errorf("failed to remove tar from FS: %w", err) + } + + return tarFS, nil +} + +func copyChunked(src io.Reader, chunkSize int64) (*bytes.Buffer, error) { + buf := new(bytes.Buffer) + for { + if _, err := io.CopyN(buf, src, chunkSize); err != nil { + if errors.Is(err, io.EOF) { + break + } + return nil, fmt.Errorf("failed to copy: %w", err) + } + } + + return buf, nil +} diff --git a/pkg/scanners/helm/parser/parser_test.go b/pkg/scanners/helm/parser/parser_test.go new file mode 100644 index 000000000000..c146b8f9e18f --- /dev/null +++ b/pkg/scanners/helm/parser/parser_test.go @@ -0,0 +1,24 @@ +package parser + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParseFS(t *testing.T) { + t.Run("source chart is located next to an same archived chart", func(t *testing.T) { + p := New(".") + require.NoError(t, p.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", "chart-and-archived-chart")), ".")) + + expectedFiles := []string{ + "my-chart/Chart.yaml", + "my-chart/templates/pod.yaml", + } + assert.Equal(t, expectedFiles, p.filepaths) + }) +} diff --git a/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz b/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..e36b2b474f3e54d8048b61ca6f9bc5a47afda833 GIT binary patch literal 419 zcmV;U0bKqciwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PK{_Zq+alg}wGu%nKyfPTUrGhhTvO3s&r>$p9`&G!Bo=J7p^-@)s zkljt!GY}WAbVMk$-%HJm$+F|Mz&mDjD{Bjq8PSmwR=@QJL0 zd1T;wNbvdLmXfDJnR|4kVHqf1rbrqvMr#JF(fqt*7OiXr|KF`2FveE@7jn^Jp?r#A zyzD*jC;wk{YUh8YRCoCQU$FH*#+XlM$$uC>YWPUehRKmH+)=?13u+GV=yHE&{Lf~5 zmH=41C7Y`RYNjU*&yj|@nP|QUvWA`mWU@4rkRIg@%W(f?6aOLuz%rXp>P9Surk1T#ZMS}lA(hQ(&2!SPAr%epUd_KhI5>FQ{0aa7 N|NoRUvhx58002m6!sGw| literal 0 HcmV?d00001 diff --git a/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml b/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml new file mode 100644 index 000000000000..767f748a8d59 --- /dev/null +++ b/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: my-chart +description: A Helm chart for Kubernetes +type: application +version: 0.1.0 +appVersion: "1.16.0" diff --git a/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml b/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml new file mode 100644 index 000000000000..3649247c1bb1 --- /dev/null +++ b/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml @@ -0,0 +1,21 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: nginx-deployment + labels: + app: nginx +spec: + replicas: 3 + selector: + matchLabels: + app: nginx + template: + metadata: + labels: + app: nginx + spec: + containers: + - name: nginx + image: nginx:1.14.2 + ports: + - containerPort: 80 \ No newline at end of file diff --git a/pkg/scanners/helm/parser/vals.go b/pkg/scanners/helm/parser/vals.go new file mode 100644 index 000000000000..300dad819730 --- /dev/null +++ b/pkg/scanners/helm/parser/vals.go @@ -0,0 +1,114 @@ +package parser + +import ( + "fmt" + "io" + "net/url" + "os" + "strings" + + "gopkg.in/yaml.v3" + "helm.sh/helm/v3/pkg/getter" + "helm.sh/helm/v3/pkg/strvals" +) + +type ValueOptions struct { + ValueFiles []string + StringValues []string + Values []string + FileValues []string +} + +// MergeValues merges values from files specified via -f/--values and directly +// via --set, --set-string, or --set-file, marshaling them to YAML +func (opts *ValueOptions) MergeValues() (map[string]interface{}, error) { + base := map[string]interface{}{} + + // User specified a values files via -f/--values + for _, filePath := range opts.ValueFiles { + currentMap := map[string]interface{}{} + + bytes, err := readFile(filePath) + if err != nil { + return nil, err + } + + if err := yaml.Unmarshal(bytes, ¤tMap); err != nil { + return nil, fmt.Errorf("failed to parse %s: %w", filePath, err) + } + // Merge with the previous map + base = mergeMaps(base, currentMap) + } + + // User specified a value via --set + for _, value := range opts.Values { + if err := strvals.ParseInto(value, base); err != nil { + return nil, fmt.Errorf("failed parsing --set data, %w", err) + } + } + + // User specified a value via --set-string + for _, value := range opts.StringValues { + if err := strvals.ParseIntoString(value, base); err != nil { + return nil, fmt.Errorf("failed parsing --set-string data %w", err) + } + } + + // User specified a value via --set-file + for _, value := range opts.FileValues { + reader := func(rs []rune) (interface{}, error) { + bytes, err := readFile(string(rs)) + if err != nil { + return nil, err + } + return string(bytes), err + } + if err := strvals.ParseIntoFile(value, base, reader); err != nil { + return nil, fmt.Errorf("failed parsing --set-file data: %w", err) + } + } + + return base, nil +} + +func mergeMaps(a, b map[string]interface{}) map[string]interface{} { + out := make(map[string]interface{}, len(a)) + for k, v := range a { + out[k] = v + } + for k, v := range b { + if v, ok := v.(map[string]interface{}); ok { + if bv, ok := out[k]; ok { + if bv, ok := bv.(map[string]interface{}); ok { + out[k] = mergeMaps(bv, v) + continue + } + } + } + out[k] = v + } + return out +} + +// readFile load a file from stdin, the local directory, or a remote file with a url. +func readFile(filePath string) ([]byte, error) { + if strings.TrimSpace(filePath) == "-" { + return io.ReadAll(os.Stdin) + } + u, _ := url.Parse(filePath) + + // FIXME: maybe someone handle other protocols like ftp. + if u.Scheme == "http" || u.Scheme == "https" { + g, err := getter.NewHTTPGetter() + if err != nil { + return nil, err + } + data, err := g.Get(filePath, getter.WithURL(filePath)) + if err != nil { + return nil, err + } + return data.Bytes(), err + } else { + return os.ReadFile(filePath) + } +} diff --git a/pkg/scanners/helm/scanner.go b/pkg/scanners/helm/scanner.go new file mode 100644 index 000000000000..81c386c43213 --- /dev/null +++ b/pkg/scanners/helm/scanner.go @@ -0,0 +1,221 @@ +package helm + +import ( + "context" + "fmt" + "io" + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/liamg/memoryfs" + + "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/detection" + "github.com/aquasecurity/trivy/pkg/scanners" + "github.com/aquasecurity/trivy/pkg/scanners/helm/parser" + kparser "github.com/aquasecurity/trivy/pkg/scanners/kubernetes/parser" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + policyDirs []string + dataDirs []string + debug debug.Logger + options []options.ScannerOption + parserOptions []options.ParserOption + policyReaders []io.Reader + loadEmbeddedLibraries bool + loadEmbeddedPolicies bool + policyFS fs.FS + skipRequired bool + frameworks []framework.Framework + spec string +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(bool) { +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +// New creates a new Scanner +func New(options ...options.ScannerOption) *Scanner { + s := &Scanner{ + options: options, + } + + for _, option := range options { + option(s) + } + return s +} + +func (s *Scanner) AddParserOptions(options ...options.ParserOption) { + s.parserOptions = append(s.parserOptions, options...) +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) Name() string { + return "Helm" +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.skipRequired = skip +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "helm", "scanner") +} + +func (s *Scanner) SetTraceWriter(_ io.Writer) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPerResultTracingEnabled(_ bool) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetDataDirs(dirs ...string) { + s.dataDirs = dirs +} + +func (s *Scanner) SetPolicyNamespaces(namespaces ...string) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPolicyFilesystem(policyFS fs.FS) { + s.policyFS = policyFS +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) {} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, path string) (scan.Results, error) { + + var results []scan.Result + if err := fs.WalkDir(target, path, func(path string, d fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + if err != nil { + return err + } + + if d.IsDir() { + return nil + } + + if detection.IsArchive(path) { + if scanResults, err := s.getScanResults(path, ctx, target); err != nil { + return err + } else { + results = append(results, scanResults...) + } + } + + if strings.HasSuffix(path, "Chart.yaml") { + if scanResults, err := s.getScanResults(filepath.Dir(path), ctx, target); err != nil { + return err + } else { + results = append(results, scanResults...) + } + } + + return nil + }); err != nil { + return nil, err + } + + return results, nil + +} + +func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS) (results []scan.Result, err error) { + helmParser := parser.New(path, s.parserOptions...) + + if err := helmParser.ParseFS(ctx, target, path); err != nil { + return nil, err + } + + chartFiles, err := helmParser.RenderedChartFiles() + if err != nil { // not valid helm, maybe some other yaml etc., abort + s.debug.Log("Failed to render Chart files: %s", err) + return nil, nil + } + + regoScanner := rego.NewScanner(types.SourceKubernetes, s.options...) + policyFS := target + if s.policyFS != nil { + policyFS = s.policyFS + } + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, policyFS, s.policyDirs, s.policyReaders); err != nil { + return nil, fmt.Errorf("policies load: %w", err) + } + for _, file := range chartFiles { + file := file + s.debug.Log("Processing rendered chart file: %s", file.TemplateFilePath) + + manifests, err := kparser.New().Parse(strings.NewReader(file.ManifestContent), file.TemplateFilePath) + if err != nil { + return nil, fmt.Errorf("unmarshal yaml: %w", err) + } + for _, manifest := range manifests { + fileResults, err := regoScanner.ScanInput(ctx, rego.Input{ + Path: file.TemplateFilePath, + Contents: manifest, + FS: target, + }) + if err != nil { + return nil, fmt.Errorf("scanning error: %w", err) + } + + if len(fileResults) > 0 { + renderedFS := memoryfs.New() + if err := renderedFS.MkdirAll(filepath.Dir(file.TemplateFilePath), fs.ModePerm); err != nil { + return nil, err + } + if err := renderedFS.WriteLazyFile(file.TemplateFilePath, func() (io.Reader, error) { + return strings.NewReader(file.ManifestContent), nil + }, fs.ModePerm); err != nil { + return nil, err + } + fileResults.SetSourceAndFilesystem(helmParser.ChartSource, renderedFS, detection.IsArchive(helmParser.ChartSource)) + } + + results = append(results, fileResults...) + } + + } + return results, nil +} diff --git a/pkg/scanners/helm/test/mysql/.helmignore b/pkg/scanners/helm/test/mysql/.helmignore new file mode 100644 index 000000000000..f0c131944441 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/.helmignore @@ -0,0 +1,21 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj diff --git a/pkg/scanners/helm/test/mysql/Chart.lock b/pkg/scanners/helm/test/mysql/Chart.lock new file mode 100644 index 000000000000..2a6356005c25 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/Chart.lock @@ -0,0 +1,6 @@ +dependencies: +- name: common + repository: https://charts.bitnami.com/bitnami + version: 1.11.1 +digest: sha256:a000bcd4d4cdd813c67d633b5523b4a4cd478fb95f1cae665d9b0ba5c45b40e2 +generated: "2022-02-16T22:19:57.971058445Z" diff --git a/pkg/scanners/helm/test/mysql/Chart.yaml b/pkg/scanners/helm/test/mysql/Chart.yaml new file mode 100644 index 000000000000..7d5f5c6ce834 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/Chart.yaml @@ -0,0 +1,28 @@ +annotations: + category: Database +apiVersion: v2 +appVersion: 8.0.28 +dependencies: +- name: common + repository: https://charts.bitnami.com/bitnami + tags: + - bitnami-common + version: 1.x.x +description: MySQL is a fast, reliable, scalable, and easy to use open source relational + database system. Designed to handle mission-critical, heavy-load production applications. +home: https://github.com/bitnami/charts/tree/master/bitnami/mysql +icon: https://bitnami.com/assets/stacks/mysql/img/mysql-stack-220x234.png +keywords: +- mysql +- database +- sql +- cluster +- high availability +maintainers: +- email: containers@bitnami.com + name: Bitnami +name: mysql +sources: +- https://github.com/bitnami/bitnami-docker-mysql +- https://mysql.com +version: 8.8.26 diff --git a/pkg/scanners/helm/test/mysql/README.md b/pkg/scanners/helm/test/mysql/README.md new file mode 100644 index 000000000000..b03fa495893f --- /dev/null +++ b/pkg/scanners/helm/test/mysql/README.md @@ -0,0 +1,491 @@ + + +# MySQL packaged by Bitnami + +MySQL is a fast, reliable, scalable, and easy to use open source relational database system. Designed to handle mission-critical, heavy-load production applications. + +[Overview of MySQL](http://www.mysql.com) + +Trademarks: This software listing is packaged by Bitnami. The respective trademarks mentioned in the offering are owned by the respective companies, and use of them does not imply any affiliation or endorsement. + +## TL;DR + +```bash +$ helm repo add bitnami https://charts.bitnami.com/bitnami +$ helm install my-release bitnami/mysql +``` + +## Introduction + +This chart bootstraps a [MySQL](https://github.com/bitnami/bitnami-docker-mysql) replication cluster deployment on a [Kubernetes](https://kubernetes.io) cluster using the [Helm](https://helm.sh) package manager. + +Bitnami charts can be used with [Kubeapps](https://kubeapps.com/) for deployment and management of Helm Charts in clusters. This Helm chart has been tested on top of [Bitnami Kubernetes Production Runtime](https://kubeprod.io/) (BKPR). Deploy BKPR to get automated TLS certificates, logging and monitoring for your applications. + +## Prerequisites + +- Kubernetes 1.19+ +- Helm 3.2.0+ +- PV provisioner support in the underlying infrastructure + +## Installing the Chart + +To install the chart with the release name `my-release`: + +```bash +$ helm repo add bitnami https://charts.bitnami.com/bitnami +$ helm install my-release bitnami/mysql +``` + +These commands deploy MySQL on the Kubernetes cluster in the default configuration. The [Parameters](#parameters) section lists the parameters that can be configured during installation. + +> **Tip**: List all releases using `helm list` + +## Uninstalling the Chart + +To uninstall/delete the `my-release` deployment: + +```bash +$ helm delete my-release +``` + +The command removes all the Kubernetes components associated with the chart and deletes the release. + +## Parameters + +### Global parameters + +| Name | Description | Value | +| ------------------------- | ----------------------------------------------- | ----- | +| `global.imageRegistry` | Global Docker image registry | `""` | +| `global.imagePullSecrets` | Global Docker registry secret names as an array | `[]` | +| `global.storageClass` | Global StorageClass for Persistent Volume(s) | `""` | + + +### Common parameters + +| Name | Description | Value | +| ------------------------ | --------------------------------------------------------------------------------------------------------- | --------------- | +| `nameOverride` | String to partially override common.names.fullname template (will maintain the release name) | `""` | +| `fullnameOverride` | String to fully override common.names.fullname template | `""` | +| `clusterDomain` | Cluster domain | `cluster.local` | +| `commonAnnotations` | Common annotations to add to all MySQL resources (sub-charts are not considered). Evaluated as a template | `{}` | +| `commonLabels` | Common labels to add to all MySQL resources (sub-charts are not considered). Evaluated as a template | `{}` | +| `extraDeploy` | Array with extra yaml to deploy with the chart. Evaluated as a template | `[]` | +| `schedulerName` | Use an alternate scheduler, e.g. "stork". | `""` | +| `diagnosticMode.enabled` | Enable diagnostic mode (all probes will be disabled and the command will be overridden) | `false` | +| `diagnosticMode.command` | Command to override all containers in the deployment | `["sleep"]` | +| `diagnosticMode.args` | Args to override all containers in the deployment | `["infinity"]` | + + +### MySQL common parameters + +| Name | Description | Value | +| -------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------- | +| `image.registry` | MySQL image registry | `docker.io` | +| `image.repository` | MySQL image repository | `bitnami/mysql` | +| `image.tag` | MySQL image tag (immutable tags are recommended) | `8.0.28-debian-10-r0` | +| `image.pullPolicy` | MySQL image pull policy | `IfNotPresent` | +| `image.pullSecrets` | Specify docker-registry secret names as an array | `[]` | +| `image.debug` | Specify if debug logs should be enabled | `false` | +| `architecture` | MySQL architecture (`standalone` or `replication`) | `standalone` | +| `auth.rootPassword` | Password for the `root` user. Ignored if existing secret is provided | `""` | +| `auth.database` | Name for a custom database to create | `my_database` | +| `auth.username` | Name for a custom user to create | `""` | +| `auth.password` | Password for the new user. Ignored if existing secret is provided | `""` | +| `auth.replicationUser` | MySQL replication user | `replicator` | +| `auth.replicationPassword` | MySQL replication user password. Ignored if existing secret is provided | `""` | +| `auth.existingSecret` | Use existing secret for password details. The secret has to contain the keys `mysql-root-password`, `mysql-replication-password` and `mysql-password` | `""` | +| `auth.forcePassword` | Force users to specify required passwords | `false` | +| `auth.usePasswordFiles` | Mount credentials as files instead of using an environment variable | `false` | +| `auth.customPasswordFiles` | Use custom password files when `auth.usePasswordFiles` is set to `true`. Define path for keys `root` and `user`, also define `replicator` if `architecture` is set to `replication` | `{}` | +| `initdbScripts` | Dictionary of initdb scripts | `{}` | +| `initdbScriptsConfigMap` | ConfigMap with the initdb scripts (Note: Overrides `initdbScripts`) | `""` | + + +### MySQL Primary parameters + +| Name | Description | Value | +| -------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | ------------------- | +| `primary.command` | Override default container command on MySQL Primary container(s) (useful when using custom images) | `[]` | +| `primary.args` | Override default container args on MySQL Primary container(s) (useful when using custom images) | `[]` | +| `primary.hostAliases` | Deployment pod host aliases | `[]` | +| `primary.configuration` | Configure MySQL Primary with a custom my.cnf file | `""` | +| `primary.existingConfigmap` | Name of existing ConfigMap with MySQL Primary configuration. | `""` | +| `primary.updateStrategy` | Update strategy type for the MySQL primary statefulset | `RollingUpdate` | +| `primary.rollingUpdatePartition` | Partition update strategy for MySQL Primary statefulset | `""` | +| `primary.podAnnotations` | Additional pod annotations for MySQL primary pods | `{}` | +| `primary.podAffinityPreset` | MySQL primary pod affinity preset. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` | `""` | +| `primary.podAntiAffinityPreset` | MySQL primary pod anti-affinity preset. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` | `soft` | +| `primary.nodeAffinityPreset.type` | MySQL primary node affinity preset type. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` | `""` | +| `primary.nodeAffinityPreset.key` | MySQL primary node label key to match Ignored if `primary.affinity` is set. | `""` | +| `primary.nodeAffinityPreset.values` | MySQL primary node label values to match. Ignored if `primary.affinity` is set. | `[]` | +| `primary.affinity` | Affinity for MySQL primary pods assignment | `{}` | +| `primary.nodeSelector` | Node labels for MySQL primary pods assignment | `{}` | +| `primary.tolerations` | Tolerations for MySQL primary pods assignment | `[]` | +| `primary.podSecurityContext.enabled` | Enable security context for MySQL primary pods | `true` | +| `primary.podSecurityContext.fsGroup` | Group ID for the mounted volumes' filesystem | `1001` | +| `primary.containerSecurityContext.enabled` | MySQL primary container securityContext | `true` | +| `primary.containerSecurityContext.runAsUser` | User ID for the MySQL primary container | `1001` | +| `primary.resources.limits` | The resources limits for MySQL primary containers | `{}` | +| `primary.resources.requests` | The requested resources for MySQL primary containers | `{}` | +| `primary.livenessProbe.enabled` | Enable livenessProbe | `true` | +| `primary.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `5` | +| `primary.livenessProbe.periodSeconds` | Period seconds for livenessProbe | `10` | +| `primary.livenessProbe.timeoutSeconds` | Timeout seconds for livenessProbe | `1` | +| `primary.livenessProbe.failureThreshold` | Failure threshold for livenessProbe | `3` | +| `primary.livenessProbe.successThreshold` | Success threshold for livenessProbe | `1` | +| `primary.readinessProbe.enabled` | Enable readinessProbe | `true` | +| `primary.readinessProbe.initialDelaySeconds` | Initial delay seconds for readinessProbe | `5` | +| `primary.readinessProbe.periodSeconds` | Period seconds for readinessProbe | `10` | +| `primary.readinessProbe.timeoutSeconds` | Timeout seconds for readinessProbe | `1` | +| `primary.readinessProbe.failureThreshold` | Failure threshold for readinessProbe | `3` | +| `primary.readinessProbe.successThreshold` | Success threshold for readinessProbe | `1` | +| `primary.startupProbe.enabled` | Enable startupProbe | `true` | +| `primary.startupProbe.initialDelaySeconds` | Initial delay seconds for startupProbe | `15` | +| `primary.startupProbe.periodSeconds` | Period seconds for startupProbe | `10` | +| `primary.startupProbe.timeoutSeconds` | Timeout seconds for startupProbe | `1` | +| `primary.startupProbe.failureThreshold` | Failure threshold for startupProbe | `10` | +| `primary.startupProbe.successThreshold` | Success threshold for startupProbe | `1` | +| `primary.customLivenessProbe` | Override default liveness probe for MySQL primary containers | `{}` | +| `primary.customReadinessProbe` | Override default readiness probe for MySQL primary containers | `{}` | +| `primary.customStartupProbe` | Override default startup probe for MySQL primary containers | `{}` | +| `primary.extraFlags` | MySQL primary additional command line flags | `""` | +| `primary.extraEnvVars` | Extra environment variables to be set on MySQL primary containers | `[]` | +| `primary.extraEnvVarsCM` | Name of existing ConfigMap containing extra env vars for MySQL primary containers | `""` | +| `primary.extraEnvVarsSecret` | Name of existing Secret containing extra env vars for MySQL primary containers | `""` | +| `primary.persistence.enabled` | Enable persistence on MySQL primary replicas using a `PersistentVolumeClaim`. If false, use emptyDir | `true` | +| `primary.persistence.existingClaim` | Name of an existing `PersistentVolumeClaim` for MySQL primary replicas | `""` | +| `primary.persistence.storageClass` | MySQL primary persistent volume storage Class | `""` | +| `primary.persistence.annotations` | MySQL primary persistent volume claim annotations | `{}` | +| `primary.persistence.accessModes` | MySQL primary persistent volume access Modes | `["ReadWriteOnce"]` | +| `primary.persistence.size` | MySQL primary persistent volume size | `8Gi` | +| `primary.persistence.selector` | Selector to match an existing Persistent Volume | `{}` | +| `primary.extraVolumes` | Optionally specify extra list of additional volumes to the MySQL Primary pod(s) | `[]` | +| `primary.extraVolumeMounts` | Optionally specify extra list of additional volumeMounts for the MySQL Primary container(s) | `[]` | +| `primary.initContainers` | Add additional init containers for the MySQL Primary pod(s) | `[]` | +| `primary.sidecars` | Add additional sidecar containers for the MySQL Primary pod(s) | `[]` | +| `primary.service.type` | MySQL Primary K8s service type | `ClusterIP` | +| `primary.service.port` | MySQL Primary K8s service port | `3306` | +| `primary.service.nodePort` | MySQL Primary K8s service node port | `""` | +| `primary.service.clusterIP` | MySQL Primary K8s service clusterIP IP | `""` | +| `primary.service.loadBalancerIP` | MySQL Primary loadBalancerIP if service type is `LoadBalancer` | `""` | +| `primary.service.externalTrafficPolicy` | Enable client source IP preservation | `Cluster` | +| `primary.service.loadBalancerSourceRanges` | Addresses that are allowed when MySQL Primary service is LoadBalancer | `[]` | +| `primary.service.annotations` | Provide any additional annotations which may be required | `{}` | +| `primary.pdb.enabled` | Enable/disable a Pod Disruption Budget creation for MySQL primary pods | `false` | +| `primary.pdb.minAvailable` | Minimum number/percentage of MySQL primary pods that should remain scheduled | `1` | +| `primary.pdb.maxUnavailable` | Maximum number/percentage of MySQL primary pods that may be made unavailable | `""` | +| `primary.podLabels` | MySQL Primary pod label. If labels are same as commonLabels , this will take precedence | `{}` | + + +### MySQL Secondary parameters + +| Name | Description | Value | +| ---------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------- | +| `secondary.replicaCount` | Number of MySQL secondary replicas | `1` | +| `secondary.hostAliases` | Deployment pod host aliases | `[]` | +| `secondary.command` | Override default container command on MySQL Secondary container(s) (useful when using custom images) | `[]` | +| `secondary.args` | Override default container args on MySQL Secondary container(s) (useful when using custom images) | `[]` | +| `secondary.configuration` | Configure MySQL Secondary with a custom my.cnf file | `""` | +| `secondary.existingConfigmap` | Name of existing ConfigMap with MySQL Secondary configuration. | `""` | +| `secondary.updateStrategy` | Update strategy type for the MySQL secondary statefulset | `RollingUpdate` | +| `secondary.rollingUpdatePartition` | Partition update strategy for MySQL Secondary statefulset | `""` | +| `secondary.podAnnotations` | Additional pod annotations for MySQL secondary pods | `{}` | +| `secondary.podAffinityPreset` | MySQL secondary pod affinity preset. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` | `""` | +| `secondary.podAntiAffinityPreset` | MySQL secondary pod anti-affinity preset. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` | `soft` | +| `secondary.nodeAffinityPreset.type` | MySQL secondary node affinity preset type. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` | `""` | +| `secondary.nodeAffinityPreset.key` | MySQL secondary node label key to match Ignored if `secondary.affinity` is set. | `""` | +| `secondary.nodeAffinityPreset.values` | MySQL secondary node label values to match. Ignored if `secondary.affinity` is set. | `[]` | +| `secondary.affinity` | Affinity for MySQL secondary pods assignment | `{}` | +| `secondary.nodeSelector` | Node labels for MySQL secondary pods assignment | `{}` | +| `secondary.tolerations` | Tolerations for MySQL secondary pods assignment | `[]` | +| `secondary.podSecurityContext.enabled` | Enable security context for MySQL secondary pods | `true` | +| `secondary.podSecurityContext.fsGroup` | Group ID for the mounted volumes' filesystem | `1001` | +| `secondary.containerSecurityContext.enabled` | MySQL secondary container securityContext | `true` | +| `secondary.containerSecurityContext.runAsUser` | User ID for the MySQL secondary container | `1001` | +| `secondary.resources.limits` | The resources limits for MySQL secondary containers | `{}` | +| `secondary.resources.requests` | The requested resources for MySQL secondary containers | `{}` | +| `secondary.livenessProbe.enabled` | Enable livenessProbe | `true` | +| `secondary.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `5` | +| `secondary.livenessProbe.periodSeconds` | Period seconds for livenessProbe | `10` | +| `secondary.livenessProbe.timeoutSeconds` | Timeout seconds for livenessProbe | `1` | +| `secondary.livenessProbe.failureThreshold` | Failure threshold for livenessProbe | `3` | +| `secondary.livenessProbe.successThreshold` | Success threshold for livenessProbe | `1` | +| `secondary.readinessProbe.enabled` | Enable readinessProbe | `true` | +| `secondary.readinessProbe.initialDelaySeconds` | Initial delay seconds for readinessProbe | `5` | +| `secondary.readinessProbe.periodSeconds` | Period seconds for readinessProbe | `10` | +| `secondary.readinessProbe.timeoutSeconds` | Timeout seconds for readinessProbe | `1` | +| `secondary.readinessProbe.failureThreshold` | Failure threshold for readinessProbe | `3` | +| `secondary.readinessProbe.successThreshold` | Success threshold for readinessProbe | `1` | +| `secondary.startupProbe.enabled` | Enable startupProbe | `true` | +| `secondary.startupProbe.initialDelaySeconds` | Initial delay seconds for startupProbe | `15` | +| `secondary.startupProbe.periodSeconds` | Period seconds for startupProbe | `10` | +| `secondary.startupProbe.timeoutSeconds` | Timeout seconds for startupProbe | `1` | +| `secondary.startupProbe.failureThreshold` | Failure threshold for startupProbe | `15` | +| `secondary.startupProbe.successThreshold` | Success threshold for startupProbe | `1` | +| `secondary.customLivenessProbe` | Override default liveness probe for MySQL secondary containers | `{}` | +| `secondary.customReadinessProbe` | Override default readiness probe for MySQL secondary containers | `{}` | +| `secondary.customStartupProbe` | Override default startup probe for MySQL secondary containers | `{}` | +| `secondary.extraFlags` | MySQL secondary additional command line flags | `""` | +| `secondary.extraEnvVars` | An array to add extra environment variables on MySQL secondary containers | `[]` | +| `secondary.extraEnvVarsCM` | Name of existing ConfigMap containing extra env vars for MySQL secondary containers | `""` | +| `secondary.extraEnvVarsSecret` | Name of existing Secret containing extra env vars for MySQL secondary containers | `""` | +| `secondary.persistence.enabled` | Enable persistence on MySQL secondary replicas using a `PersistentVolumeClaim` | `true` | +| `secondary.persistence.storageClass` | MySQL secondary persistent volume storage Class | `""` | +| `secondary.persistence.annotations` | MySQL secondary persistent volume claim annotations | `{}` | +| `secondary.persistence.accessModes` | MySQL secondary persistent volume access Modes | `["ReadWriteOnce"]` | +| `secondary.persistence.size` | MySQL secondary persistent volume size | `8Gi` | +| `secondary.persistence.selector` | Selector to match an existing Persistent Volume | `{}` | +| `secondary.extraVolumes` | Optionally specify extra list of additional volumes to the MySQL secondary pod(s) | `[]` | +| `secondary.extraVolumeMounts` | Optionally specify extra list of additional volumeMounts for the MySQL secondary container(s) | `[]` | +| `secondary.initContainers` | Add additional init containers for the MySQL secondary pod(s) | `[]` | +| `secondary.sidecars` | Add additional sidecar containers for the MySQL secondary pod(s) | `[]` | +| `secondary.service.type` | MySQL secondary Kubernetes service type | `ClusterIP` | +| `secondary.service.port` | MySQL secondary Kubernetes service port | `3306` | +| `secondary.service.nodePort` | MySQL secondary Kubernetes service node port | `""` | +| `secondary.service.clusterIP` | MySQL secondary Kubernetes service clusterIP IP | `""` | +| `secondary.service.loadBalancerIP` | MySQL secondary loadBalancerIP if service type is `LoadBalancer` | `""` | +| `secondary.service.externalTrafficPolicy` | Enable client source IP preservation | `Cluster` | +| `secondary.service.loadBalancerSourceRanges` | Addresses that are allowed when MySQL secondary service is LoadBalancer | `[]` | +| `secondary.service.annotations` | Provide any additional annotations which may be required | `{}` | +| `secondary.pdb.enabled` | Enable/disable a Pod Disruption Budget creation for MySQL secondary pods | `false` | +| `secondary.pdb.minAvailable` | Minimum number/percentage of MySQL secondary pods that should remain scheduled | `1` | +| `secondary.pdb.maxUnavailable` | Maximum number/percentage of MySQL secondary pods that may be made unavailable | `""` | +| `secondary.podLabels` | Additional pod labels for MySQL secondary pods | `{}` | + + +### RBAC parameters + +| Name | Description | Value | +| ---------------------------- | ------------------------------------------------------ | ------- | +| `serviceAccount.create` | Enable the creation of a ServiceAccount for MySQL pods | `true` | +| `serviceAccount.name` | Name of the created ServiceAccount | `""` | +| `serviceAccount.annotations` | Annotations for MySQL Service Account | `{}` | +| `rbac.create` | Whether to create & use RBAC resources or not | `false` | + + +### Network Policy + +| Name | Description | Value | +| ------------------------------------------ | --------------------------------------------------------------------------------------------------------------- | ------- | +| `networkPolicy.enabled` | Enable creation of NetworkPolicy resources | `false` | +| `networkPolicy.allowExternal` | The Policy model to apply. | `true` | +| `networkPolicy.explicitNamespacesSelector` | A Kubernetes LabelSelector to explicitly select namespaces from which ingress traffic could be allowed to MySQL | `{}` | + + +### Volume Permissions parameters + +| Name | Description | Value | +| ------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | ----------------------- | +| `volumePermissions.enabled` | Enable init container that changes the owner and group of the persistent volume(s) mountpoint to `runAsUser:fsGroup` | `false` | +| `volumePermissions.image.registry` | Init container volume-permissions image registry | `docker.io` | +| `volumePermissions.image.repository` | Init container volume-permissions image repository | `bitnami/bitnami-shell` | +| `volumePermissions.image.tag` | Init container volume-permissions image tag (immutable tags are recommended) | `10-debian-10-r312` | +| `volumePermissions.image.pullPolicy` | Init container volume-permissions image pull policy | `IfNotPresent` | +| `volumePermissions.image.pullSecrets` | Specify docker-registry secret names as an array | `[]` | +| `volumePermissions.resources` | Init container volume-permissions resources | `{}` | + + +### Metrics parameters + +| Name | Description | Value | +| -------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | ------------------------- | +| `metrics.enabled` | Start a side-car prometheus exporter | `false` | +| `metrics.image.registry` | Exporter image registry | `docker.io` | +| `metrics.image.repository` | Exporter image repository | `bitnami/mysqld-exporter` | +| `metrics.image.tag` | Exporter image tag (immutable tags are recommended) | `0.13.0-debian-10-r216` | +| `metrics.image.pullPolicy` | Exporter image pull policy | `IfNotPresent` | +| `metrics.image.pullSecrets` | Specify docker-registry secret names as an array | `[]` | +| `metrics.service.type` | Kubernetes service type for MySQL Prometheus Exporter | `ClusterIP` | +| `metrics.service.port` | MySQL Prometheus Exporter service port | `9104` | +| `metrics.service.annotations` | Prometheus exporter service annotations | `{}` | +| `metrics.extraArgs.primary` | Extra args to be passed to mysqld_exporter on Primary pods | `[]` | +| `metrics.extraArgs.secondary` | Extra args to be passed to mysqld_exporter on Secondary pods | `[]` | +| `metrics.resources.limits` | The resources limits for MySQL prometheus exporter containers | `{}` | +| `metrics.resources.requests` | The requested resources for MySQL prometheus exporter containers | `{}` | +| `metrics.livenessProbe.enabled` | Enable livenessProbe | `true` | +| `metrics.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `120` | +| `metrics.livenessProbe.periodSeconds` | Period seconds for livenessProbe | `10` | +| `metrics.livenessProbe.timeoutSeconds` | Timeout seconds for livenessProbe | `1` | +| `metrics.livenessProbe.failureThreshold` | Failure threshold for livenessProbe | `3` | +| `metrics.livenessProbe.successThreshold` | Success threshold for livenessProbe | `1` | +| `metrics.readinessProbe.enabled` | Enable readinessProbe | `true` | +| `metrics.readinessProbe.initialDelaySeconds` | Initial delay seconds for readinessProbe | `30` | +| `metrics.readinessProbe.periodSeconds` | Period seconds for readinessProbe | `10` | +| `metrics.readinessProbe.timeoutSeconds` | Timeout seconds for readinessProbe | `1` | +| `metrics.readinessProbe.failureThreshold` | Failure threshold for readinessProbe | `3` | +| `metrics.readinessProbe.successThreshold` | Success threshold for readinessProbe | `1` | +| `metrics.serviceMonitor.enabled` | Create ServiceMonitor Resource for scraping metrics using PrometheusOperator | `false` | +| `metrics.serviceMonitor.namespace` | Specify the namespace in which the serviceMonitor resource will be created | `""` | +| `metrics.serviceMonitor.interval` | Specify the interval at which metrics should be scraped | `30s` | +| `metrics.serviceMonitor.scrapeTimeout` | Specify the timeout after which the scrape is ended | `""` | +| `metrics.serviceMonitor.relabellings` | Specify Metric Relabellings to add to the scrape endpoint | `[]` | +| `metrics.serviceMonitor.honorLabels` | Specify honorLabels parameter to add the scrape endpoint | `false` | +| `metrics.serviceMonitor.additionalLabels` | Used to pass Labels that are used by the Prometheus installed in your cluster to select Service Monitors to work with | `{}` | + + +The above parameters map to the env variables defined in [bitnami/mysql](https://github.com/bitnami/bitnami-docker-mysql). For more information please refer to the [bitnami/mysql](https://github.com/bitnami/bitnami-docker-mysql) image documentation. + +Specify each parameter using the `--set key=value[,key=value]` argument to `helm install`. For example, + +```bash +$ helm install my-release \ + --set auth.rootPassword=secretpassword,auth.database=app_database \ + bitnami/mysql +``` + +The above command sets the MySQL `root` account password to `secretpassword`. Additionally it creates a database named `app_database`. + +> NOTE: Once this chart is deployed, it is not possible to change the application's access credentials, such as usernames or passwords, using Helm. To change these application credentials after deployment, delete any persistent volumes (PVs) used by the chart and re-deploy it, or use the application's built-in administrative tools if available. + +Alternatively, a YAML file that specifies the values for the parameters can be provided while installing the chart. For example, + +```bash +$ helm install my-release -f values.yaml bitnami/mysql +``` + +> **Tip**: You can use the default [values.yaml](values.yaml) + +## Configuration and installation details + +### [Rolling VS Immutable tags](https://docs.bitnami.com/containers/how-to/understand-rolling-tags-containers/) + +It is strongly recommended to use immutable tags in a production environment. This ensures your deployment does not change automatically if the same tag is updated with a different image. + +Bitnami will release a new chart updating its containers if a new version of the main container, significant changes, or critical vulnerabilities exist. + +### Use a different MySQL version + +To modify the application version used in this chart, specify a different version of the image using the `image.tag` parameter and/or a different repository using the `image.repository` parameter. Refer to the [chart documentation for more information on these parameters and how to use them with images from a private registry](https://docs.bitnami.com/kubernetes/infrastructure/mysql/configuration/change-image-version/). + +### Customize a new MySQL instance + +The [Bitnami MySQL](https://github.com/bitnami/bitnami-docker-mysql) image allows you to use your custom scripts to initialize a fresh instance. Custom scripts may be specified using the `initdbScripts` parameter. Alternatively, an external ConfigMap may be created with all the initialization scripts and the ConfigMap passed to the chart via the `initdbScriptsConfigMap` parameter. Note that this will override the `initdbScripts` parameter. + +The allowed extensions are `.sh`, `.sql` and `.sql.gz`. + +These scripts are treated differently depending on their extension. While `.sh` scripts are executed on all the nodes, `.sql` and `.sql.gz` scripts are only executed on the primary nodes. This is because `.sh` scripts support conditional tests to identify the type of node they are running on, while such tests are not supported in `.sql` or `sql.gz` files. + +Refer to the [chart documentation for more information and a usage example](http://docs.bitnami.com/kubernetes/infrastructure/mysql/configuration/customize-new-instance/). + +### Sidecars and Init Containers + +If you have a need for additional containers to run within the same pod as MySQL, you can do so via the `sidecars` config parameter. Simply define your container according to the Kubernetes container spec. + +```yaml +sidecars: + - name: your-image-name + image: your-image + imagePullPolicy: Always + ports: + - name: portname + containerPort: 1234 +``` + +Similarly, you can add extra init containers using the `initContainers` parameter. + +```yaml +initContainers: + - name: your-image-name + image: your-image + imagePullPolicy: Always + ports: + - name: portname + containerPort: 1234 +``` + +## Persistence + +The [Bitnami MySQL](https://github.com/bitnami/bitnami-docker-mysql) image stores the MySQL data and configurations at the `/bitnami/mysql` path of the container. + +The chart mounts a [Persistent Volume](https://kubernetes.io/docs/concepts/storage/persistent-volumes/) volume at this location. The volume is created using dynamic volume provisioning by default. An existing PersistentVolumeClaim can also be defined for this purpose. + +If you encounter errors when working with persistent volumes, refer to our [troubleshooting guide for persistent volumes](https://docs.bitnami.com/kubernetes/faq/troubleshooting/troubleshooting-persistence-volumes/). + +## Network Policy + +To enable network policy for MySQL, install [a networking plugin that implements the Kubernetes NetworkPolicy spec](https://kubernetes.io/docs/tasks/administer-cluster/declare-network-policy#before-you-begin), and set `networkPolicy.enabled` to `true`. + +For Kubernetes v1.5 & v1.6, you must also turn on NetworkPolicy by setting the DefaultDeny namespace annotation. Note: this will enforce policy for _all_ pods in the namespace: + +```console +$ kubectl annotate namespace default "net.beta.kubernetes.io/network-policy={\"ingress\":{\"isolation\":\"DefaultDeny\"}}" +``` + +With NetworkPolicy enabled, traffic will be limited to just port 3306. + +For more precise policy, set `networkPolicy.allowExternal=false`. This will only allow pods with the generated client label to connect to MySQL. +This label will be displayed in the output of a successful install. + +## Pod affinity + +This chart allows you to set your custom affinity using the `XXX.affinity` parameter(s). Find more information about Pod affinity in the [Kubernetes documentation](https://kubernetes.io/docs/concepts/configuration/assign-pod-node/#affinity-and-anti-affinity). + +As an alternative, you can use the preset configurations for pod affinity, pod anti-affinity, and node affinity available at the [bitnami/common](https://github.com/bitnami/charts/tree/master/bitnami/common#affinities) chart. To do so, set the `XXX.podAffinityPreset`, `XXX.podAntiAffinityPreset`, or `XXX.nodeAffinityPreset` parameters. + +## Troubleshooting + +Find more information about how to deal with common errors related to Bitnami's Helm charts in [this troubleshooting guide](https://docs.bitnami.com/general/how-to/troubleshoot-helm-chart-issues). + +## Upgrading + +It's necessary to set the `auth.rootPassword` parameter when upgrading for readiness/liveness probes to work properly. When you install this chart for the first time, some notes will be displayed providing the credentials you must use under the 'Administrator credentials' section. Please note down the password and run the command below to upgrade your chart: + +```bash +$ helm upgrade my-release bitnami/mysql --set auth.rootPassword=[ROOT_PASSWORD] +``` + +| Note: you need to substitute the placeholder _[ROOT_PASSWORD]_ with the value obtained in the installation notes. + +### To 8.0.0 + +- Several parameters were renamed or disappeared in favor of new ones on this major version: + - The terms *master* and *slave* have been replaced by the terms *primary* and *secondary*. Therefore, parameters prefixed with `master` or `slave` are now prefixed with `primary` or `secondary`, respectively. + - Credentials parameters are reorganized under the `auth` parameter. + - `replication.enabled` parameter is deprecated in favor of `architecture` parameter that accepts two values: `standalone` and `replication`. +- Chart labels were adapted to follow the [Helm charts standard labels](https://helm.sh/docs/chart_best_practices/labels/#standard-labels). +- This version also introduces `bitnami/common`, a [library chart](https://helm.sh/docs/topics/library_charts/#helm) as a dependency. More documentation about this new utility could be found [here](https://github.com/bitnami/charts/tree/master/bitnami/common#bitnami-common-library-chart). Please, make sure that you have updated the chart dependencies before executing any upgrade. + +Consequences: + +- Backwards compatibility is not guaranteed. To upgrade to `8.0.0`, install a new release of the MySQL chart, and migrate the data from your previous release. You have 2 alternatives to do so: + - Create a backup of the database, and restore it on the new release using tools such as [mysqldump](https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html). + - Reuse the PVC used to hold the master data on your previous release. To do so, use the `primary.persistence.existingClaim` parameter. The following example assumes that the release name is `mysql`: + +```bash +$ helm install mysql bitnami/mysql --set auth.rootPassword=[ROOT_PASSWORD] --set primary.persistence.existingClaim=[EXISTING_PVC] +``` + +| Note: you need to substitute the placeholder _[EXISTING_PVC]_ with the name of the PVC used on your previous release, and _[ROOT_PASSWORD]_ with the root password used in your previous release. + +### To 7.0.0 + +[On November 13, 2020, Helm v2 support formally ended](https://github.com/helm/charts#status-of-the-project). This major version is the result of the required changes applied to the Helm Chart to be able to incorporate the different features added in Helm v3 and to be consistent with the Helm project itself regarding the Helm v2 EOL. + +[Learn more about this change and related upgrade considerations](https://docs.bitnami.com/kubernetes/infrastructure/mysql/administration/upgrade-helm3/). + +### To 3.0.0 + +Backwards compatibility is not guaranteed unless you modify the labels used on the chart's deployments. +Use the workaround below to upgrade from versions previous to 3.0.0. The following example assumes that the release name is mysql: + +```console +$ kubectl delete statefulset mysql-master --cascade=false +$ kubectl delete statefulset mysql-slave --cascade=false +``` + +## License + +Copyright © 2022 Bitnami + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/pkg/scanners/helm/test/mysql/charts/common/.helmignore b/pkg/scanners/helm/test/mysql/charts/common/.helmignore new file mode 100644 index 000000000000..50af03172541 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/.helmignore @@ -0,0 +1,22 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/pkg/scanners/helm/test/mysql/charts/common/Chart.yaml b/pkg/scanners/helm/test/mysql/charts/common/Chart.yaml new file mode 100644 index 000000000000..87226649a57c --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/Chart.yaml @@ -0,0 +1,23 @@ +annotations: + category: Infrastructure +apiVersion: v2 +appVersion: 1.11.1 +description: A Library Helm Chart for grouping common logic between bitnami charts. + This chart is not deployable by itself. +home: https://github.com/bitnami/charts/tree/master/bitnami/common +icon: https://bitnami.com/downloads/logos/bitnami-mark.png +keywords: +- common +- helper +- template +- function +- bitnami +maintainers: +- email: containers@bitnami.com + name: Bitnami +name: common +sources: +- https://github.com/bitnami/charts +- https://www.bitnami.com/ +type: library +version: 1.11.1 diff --git a/pkg/scanners/helm/test/mysql/charts/common/README.md b/pkg/scanners/helm/test/mysql/charts/common/README.md new file mode 100644 index 000000000000..da84c426d0db --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/README.md @@ -0,0 +1,345 @@ +# Bitnami Common Library Chart + +A [Helm Library Chart](https://helm.sh/docs/topics/library_charts/#helm) for grouping common logic between bitnami charts. + +## TL;DR + +```yaml +dependencies: + - name: common + version: 0.x.x + repository: https://charts.bitnami.com/bitnami +``` + +```bash +$ helm dependency update +``` + +```yaml +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "common.names.fullname" . }} +data: + myvalue: "Hello World" +``` + +## Introduction + +This chart provides a common template helpers which can be used to develop new charts using [Helm](https://helm.sh) package manager. + +Bitnami charts can be used with [Kubeapps](https://kubeapps.com/) for deployment and management of Helm Charts in clusters. This Helm chart has been tested on top of [Bitnami Kubernetes Production Runtime](https://kubeprod.io/) (BKPR). Deploy BKPR to get automated TLS certificates, logging and monitoring for your applications. + +## Prerequisites + +- Kubernetes 1.19+ +- Helm 3.2.0+ + +## Parameters + +The following table lists the helpers available in the library which are scoped in different sections. + +### Affinities + +| Helper identifier | Description | Expected Input | +|-------------------------------|------------------------------------------------------|------------------------------------------------| +| `common.affinities.node.soft` | Return a soft nodeAffinity definition | `dict "key" "FOO" "values" (list "BAR" "BAZ")` | +| `common.affinities.node.hard` | Return a hard nodeAffinity definition | `dict "key" "FOO" "values" (list "BAR" "BAZ")` | +| `common.affinities.pod.soft` | Return a soft podAffinity/podAntiAffinity definition | `dict "component" "FOO" "context" $` | +| `common.affinities.pod.hard` | Return a hard podAffinity/podAntiAffinity definition | `dict "component" "FOO" "context" $` | + +### Capabilities + +| Helper identifier | Description | Expected Input | +|------------------------------------------------|------------------------------------------------------------------------------------------------|-------------------| +| `common.capabilities.kubeVersion` | Return the target Kubernetes version (using client default if .Values.kubeVersion is not set). | `.` Chart context | +| `common.capabilities.cronjob.apiVersion` | Return the appropriate apiVersion for cronjob. | `.` Chart context | +| `common.capabilities.deployment.apiVersion` | Return the appropriate apiVersion for deployment. | `.` Chart context | +| `common.capabilities.statefulset.apiVersion` | Return the appropriate apiVersion for statefulset. | `.` Chart context | +| `common.capabilities.ingress.apiVersion` | Return the appropriate apiVersion for ingress. | `.` Chart context | +| `common.capabilities.rbac.apiVersion` | Return the appropriate apiVersion for RBAC resources. | `.` Chart context | +| `common.capabilities.crd.apiVersion` | Return the appropriate apiVersion for CRDs. | `.` Chart context | +| `common.capabilities.policy.apiVersion` | Return the appropriate apiVersion for podsecuritypolicy. | `.` Chart context | +| `common.capabilities.networkPolicy.apiVersion` | Return the appropriate apiVersion for networkpolicy. | `.` Chart context | +| `common.capabilities.supportsHelmVersion` | Returns true if the used Helm version is 3.3+ | `.` Chart context | + +### Errors + +| Helper identifier | Description | Expected Input | +|-----------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------| +| `common.errors.upgrade.passwords.empty` | It will ensure required passwords are given when we are upgrading a chart. If `validationErrors` is not empty it will throw an error and will stop the upgrade action. | `dict "validationErrors" (list $validationError00 $validationError01) "context" $` | + +### Images + +| Helper identifier | Description | Expected Input | +|-----------------------------|------------------------------------------------------|---------------------------------------------------------------------------------------------------------| +| `common.images.image` | Return the proper and full image name | `dict "imageRoot" .Values.path.to.the.image "global" $`, see [ImageRoot](#imageroot) for the structure. | +| `common.images.pullSecrets` | Return the proper Docker Image Registry Secret Names (deprecated: use common.images.renderPullSecrets instead) | `dict "images" (list .Values.path.to.the.image1, .Values.path.to.the.image2) "global" .Values.global` | +| `common.images.renderPullSecrets` | Return the proper Docker Image Registry Secret Names (evaluates values as templates) | `dict "images" (list .Values.path.to.the.image1, .Values.path.to.the.image2) "context" $` | + +### Ingress + +| Helper identifier | Description | Expected Input | +|-------------------------------------------|-------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `common.ingress.backend` | Generate a proper Ingress backend entry depending on the API version | `dict "serviceName" "foo" "servicePort" "bar"`, see the [Ingress deprecation notice](https://kubernetes.io/blog/2019/07/18/api-deprecations-in-1-16/) for the syntax differences | +| `common.ingress.supportsPathType` | Prints "true" if the pathType field is supported | `.` Chart context | +| `common.ingress.supportsIngressClassname` | Prints "true" if the ingressClassname field is supported | `.` Chart context | +| `common.ingress.certManagerRequest` | Prints "true" if required cert-manager annotations for TLS signed certificates are set in the Ingress annotations | `dict "annotations" .Values.path.to.the.ingress.annotations` | + +### Labels + +| Helper identifier | Description | Expected Input | +|-----------------------------|------------------------------------------------------|-------------------| +| `common.labels.standard` | Return Kubernetes standard labels | `.` Chart context | +| `common.labels.matchLabels` | Return the proper Docker Image Registry Secret Names | `.` Chart context | + +### Names + +| Helper identifier | Description | Expected Input | +|-------------------------|------------------------------------------------------------|-------------------| +| `common.names.name` | Expand the name of the chart or use `.Values.nameOverride` | `.` Chart context | +| `common.names.fullname` | Create a default fully qualified app name. | `.` Chart context | +| `common.names.chart` | Chart name plus version | `.` Chart context | + +### Secrets + +| Helper identifier | Description | Expected Input | +|---------------------------|--------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `common.secrets.name` | Generate the name of the secret. | `dict "existingSecret" .Values.path.to.the.existingSecret "defaultNameSuffix" "mySuffix" "context" $` see [ExistingSecret](#existingsecret) for the structure. | +| `common.secrets.key` | Generate secret key. | `dict "existingSecret" .Values.path.to.the.existingSecret "key" "keyName"` see [ExistingSecret](#existingsecret) for the structure. | +| `common.passwords.manage` | Generate secret password or retrieve one if already created. | `dict "secret" "secret-name" "key" "keyName" "providedValues" (list "path.to.password1" "path.to.password2") "length" 10 "strong" false "chartName" "chartName" "context" $`, length, strong and chartNAme fields are optional. | +| `common.secrets.exists` | Returns whether a previous generated secret already exists. | `dict "secret" "secret-name" "context" $` | + +### Storage + +| Helper identifier | Description | Expected Input | +|-------------------------------|---------------------------------------|---------------------------------------------------------------------------------------------------------------------| +| `common.storage.class` | Return the proper Storage Class | `dict "persistence" .Values.path.to.the.persistence "global" $`, see [Persistence](#persistence) for the structure. | + +### TplValues + +| Helper identifier | Description | Expected Input | +|---------------------------|----------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------| +| `common.tplvalues.render` | Renders a value that contains template | `dict "value" .Values.path.to.the.Value "context" $`, value is the value should rendered as template, context frequently is the chart context `$` or `.` | + +### Utils + +| Helper identifier | Description | Expected Input | +|--------------------------------|------------------------------------------------------------------------------------------|------------------------------------------------------------------------| +| `common.utils.fieldToEnvVar` | Build environment variable name given a field. | `dict "field" "my-password"` | +| `common.utils.secret.getvalue` | Print instructions to get a secret value. | `dict "secret" "secret-name" "field" "secret-value-field" "context" $` | +| `common.utils.getValueFromKey` | Gets a value from `.Values` object given its key path | `dict "key" "path.to.key" "context" $` | +| `common.utils.getKeyFromList` | Returns first `.Values` key with a defined value or first of the list if all non-defined | `dict "keys" (list "path.to.key1" "path.to.key2") "context" $` | + +### Validations + +| Helper identifier | Description | Expected Input | +|--------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `common.validations.values.single.empty` | Validate a value must not be empty. | `dict "valueKey" "path.to.value" "secret" "secret.name" "field" "my-password" "subchart" "subchart" "context" $` secret, field and subchart are optional. In case they are given, the helper will generate a how to get instruction. See [ValidateValue](#validatevalue) | +| `common.validations.values.multiple.empty` | Validate a multiple values must not be empty. It returns a shared error for all the values. | `dict "required" (list $validateValueConf00 $validateValueConf01) "context" $`. See [ValidateValue](#validatevalue) | +| `common.validations.values.mariadb.passwords` | This helper will ensure required password for MariaDB are not empty. It returns a shared error for all the values. | `dict "secret" "mariadb-secret" "subchart" "true" "context" $` subchart field is optional and could be true or false it depends on where you will use mariadb chart and the helper. | +| `common.validations.values.postgresql.passwords` | This helper will ensure required password for PostgreSQL are not empty. It returns a shared error for all the values. | `dict "secret" "postgresql-secret" "subchart" "true" "context" $` subchart field is optional and could be true or false it depends on where you will use postgresql chart and the helper. | +| `common.validations.values.redis.passwords` | This helper will ensure required password for Redis™ are not empty. It returns a shared error for all the values. | `dict "secret" "redis-secret" "subchart" "true" "context" $` subchart field is optional and could be true or false it depends on where you will use redis chart and the helper. | +| `common.validations.values.cassandra.passwords` | This helper will ensure required password for Cassandra are not empty. It returns a shared error for all the values. | `dict "secret" "cassandra-secret" "subchart" "true" "context" $` subchart field is optional and could be true or false it depends on where you will use cassandra chart and the helper. | +| `common.validations.values.mongodb.passwords` | This helper will ensure required password for MongoDB® are not empty. It returns a shared error for all the values. | `dict "secret" "mongodb-secret" "subchart" "true" "context" $` subchart field is optional and could be true or false it depends on where you will use mongodb chart and the helper. | + +### Warnings + +| Helper identifier | Description | Expected Input | +|------------------------------|----------------------------------|------------------------------------------------------------| +| `common.warnings.rollingTag` | Warning about using rolling tag. | `ImageRoot` see [ImageRoot](#imageroot) for the structure. | + +## Special input schemas + +### ImageRoot + +```yaml +registry: + type: string + description: Docker registry where the image is located + example: docker.io + +repository: + type: string + description: Repository and image name + example: bitnami/nginx + +tag: + type: string + description: image tag + example: 1.16.1-debian-10-r63 + +pullPolicy: + type: string + description: Specify a imagePullPolicy. Defaults to 'Always' if image tag is 'latest', else set to 'IfNotPresent' + +pullSecrets: + type: array + items: + type: string + description: Optionally specify an array of imagePullSecrets (evaluated as templates). + +debug: + type: boolean + description: Set to true if you would like to see extra information on logs + example: false + +## An instance would be: +# registry: docker.io +# repository: bitnami/nginx +# tag: 1.16.1-debian-10-r63 +# pullPolicy: IfNotPresent +# debug: false +``` + +### Persistence + +```yaml +enabled: + type: boolean + description: Whether enable persistence. + example: true + +storageClass: + type: string + description: Ghost data Persistent Volume Storage Class, If set to "-", storageClassName: "" which disables dynamic provisioning. + example: "-" + +accessMode: + type: string + description: Access mode for the Persistent Volume Storage. + example: ReadWriteOnce + +size: + type: string + description: Size the Persistent Volume Storage. + example: 8Gi + +path: + type: string + description: Path to be persisted. + example: /bitnami + +## An instance would be: +# enabled: true +# storageClass: "-" +# accessMode: ReadWriteOnce +# size: 8Gi +# path: /bitnami +``` + +### ExistingSecret + +```yaml +name: + type: string + description: Name of the existing secret. + example: mySecret +keyMapping: + description: Mapping between the expected key name and the name of the key in the existing secret. + type: object + +## An instance would be: +# name: mySecret +# keyMapping: +# password: myPasswordKey +``` + +#### Example of use + +When we store sensitive data for a deployment in a secret, some times we want to give to users the possibility of using theirs existing secrets. + +```yaml +# templates/secret.yaml +--- +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "common.names.fullname" . }} + labels: + app: {{ include "common.names.fullname" . }} +type: Opaque +data: + password: {{ .Values.password | b64enc | quote }} + +# templates/dpl.yaml +--- +... + env: + - name: PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "common.secrets.name" (dict "existingSecret" .Values.existingSecret "context" $) }} + key: {{ include "common.secrets.key" (dict "existingSecret" .Values.existingSecret "key" "password") }} +... + +# values.yaml +--- +name: mySecret +keyMapping: + password: myPasswordKey +``` + +### ValidateValue + +#### NOTES.txt + +```console +{{- $validateValueConf00 := (dict "valueKey" "path.to.value00" "secret" "secretName" "field" "password-00") -}} +{{- $validateValueConf01 := (dict "valueKey" "path.to.value01" "secret" "secretName" "field" "password-01") -}} + +{{ include "common.validations.values.multiple.empty" (dict "required" (list $validateValueConf00 $validateValueConf01) "context" $) }} +``` + +If we force those values to be empty we will see some alerts + +```console +$ helm install test mychart --set path.to.value00="",path.to.value01="" + 'path.to.value00' must not be empty, please add '--set path.to.value00=$PASSWORD_00' to the command. To get the current value: + + export PASSWORD_00=$(kubectl get secret --namespace default secretName -o jsonpath="{.data.password-00}" | base64 --decode) + + 'path.to.value01' must not be empty, please add '--set path.to.value01=$PASSWORD_01' to the command. To get the current value: + + export PASSWORD_01=$(kubectl get secret --namespace default secretName -o jsonpath="{.data.password-01}" | base64 --decode) +``` + +## Upgrading + +### To 1.0.0 + +[On November 13, 2020, Helm v2 support was formally finished](https://github.com/helm/charts#status-of-the-project), this major version is the result of the required changes applied to the Helm Chart to be able to incorporate the different features added in Helm v3 and to be consistent with the Helm project itself regarding the Helm v2 EOL. + +**What changes were introduced in this major version?** + +- Previous versions of this Helm Chart use `apiVersion: v1` (installable by both Helm 2 and 3), this Helm Chart was updated to `apiVersion: v2` (installable by Helm 3 only). [Here](https://helm.sh/docs/topics/charts/#the-apiversion-field) you can find more information about the `apiVersion` field. +- Use `type: library`. [Here](https://v3.helm.sh/docs/faq/#library-chart-support) you can find more information. +- The different fields present in the *Chart.yaml* file has been ordered alphabetically in a homogeneous way for all the Bitnami Helm Charts + +**Considerations when upgrading to this version** + +- If you want to upgrade to this version from a previous one installed with Helm v3, you shouldn't face any issues +- If you want to upgrade to this version using Helm v2, this scenario is not supported as this version doesn't support Helm v2 anymore +- If you installed the previous version with Helm v2 and wants to upgrade to this version with Helm v3, please refer to the [official Helm documentation](https://helm.sh/docs/topics/v2_v3_migration/#migration-use-cases) about migrating from Helm v2 to v3 + +**Useful links** + +- https://docs.bitnami.com/tutorials/resolve-helm2-helm3-post-migration-issues/ +- https://helm.sh/docs/topics/v2_v3_migration/ +- https://helm.sh/blog/migrate-from-helm-v2-to-helm-v3/ + +## License + +Copyright © 2022 Bitnami + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl new file mode 100644 index 000000000000..189ea403d558 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl @@ -0,0 +1,102 @@ +{{/* vim: set filetype=mustache: */}} + +{{/* +Return a soft nodeAffinity definition +{{ include "common.affinities.nodes.soft" (dict "key" "FOO" "values" (list "BAR" "BAZ")) -}} +*/}} +{{- define "common.affinities.nodes.soft" -}} +preferredDuringSchedulingIgnoredDuringExecution: + - preference: + matchExpressions: + - key: {{ .key }} + operator: In + values: + {{- range .values }} + - {{ . | quote }} + {{- end }} + weight: 1 +{{- end -}} + +{{/* +Return a hard nodeAffinity definition +{{ include "common.affinities.nodes.hard" (dict "key" "FOO" "values" (list "BAR" "BAZ")) -}} +*/}} +{{- define "common.affinities.nodes.hard" -}} +requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: {{ .key }} + operator: In + values: + {{- range .values }} + - {{ . | quote }} + {{- end }} +{{- end -}} + +{{/* +Return a nodeAffinity definition +{{ include "common.affinities.nodes" (dict "type" "soft" "key" "FOO" "values" (list "BAR" "BAZ")) -}} +*/}} +{{- define "common.affinities.nodes" -}} + {{- if eq .type "soft" }} + {{- include "common.affinities.nodes.soft" . -}} + {{- else if eq .type "hard" }} + {{- include "common.affinities.nodes.hard" . -}} + {{- end -}} +{{- end -}} + +{{/* +Return a soft podAffinity/podAntiAffinity definition +{{ include "common.affinities.pods.soft" (dict "component" "FOO" "extraMatchLabels" .Values.extraMatchLabels "context" $) -}} +*/}} +{{- define "common.affinities.pods.soft" -}} +{{- $component := default "" .component -}} +{{- $extraMatchLabels := default (dict) .extraMatchLabels -}} +preferredDuringSchedulingIgnoredDuringExecution: + - podAffinityTerm: + labelSelector: + matchLabels: {{- (include "common.labels.matchLabels" .context) | nindent 10 }} + {{- if not (empty $component) }} + {{ printf "app.kubernetes.io/component: %s" $component }} + {{- end }} + {{- range $key, $value := $extraMatchLabels }} + {{ $key }}: {{ $value | quote }} + {{- end }} + namespaces: + - {{ .context.Release.Namespace | quote }} + topologyKey: kubernetes.io/hostname + weight: 1 +{{- end -}} + +{{/* +Return a hard podAffinity/podAntiAffinity definition +{{ include "common.affinities.pods.hard" (dict "component" "FOO" "extraMatchLabels" .Values.extraMatchLabels "context" $) -}} +*/}} +{{- define "common.affinities.pods.hard" -}} +{{- $component := default "" .component -}} +{{- $extraMatchLabels := default (dict) .extraMatchLabels -}} +requiredDuringSchedulingIgnoredDuringExecution: + - labelSelector: + matchLabels: {{- (include "common.labels.matchLabels" .context) | nindent 8 }} + {{- if not (empty $component) }} + {{ printf "app.kubernetes.io/component: %s" $component }} + {{- end }} + {{- range $key, $value := $extraMatchLabels }} + {{ $key }}: {{ $value | quote }} + {{- end }} + namespaces: + - {{ .context.Release.Namespace | quote }} + topologyKey: kubernetes.io/hostname +{{- end -}} + +{{/* +Return a podAffinity/podAntiAffinity definition +{{ include "common.affinities.pods" (dict "type" "soft" "key" "FOO" "values" (list "BAR" "BAZ")) -}} +*/}} +{{- define "common.affinities.pods" -}} + {{- if eq .type "soft" }} + {{- include "common.affinities.pods.soft" . -}} + {{- else if eq .type "hard" }} + {{- include "common.affinities.pods.hard" . -}} + {{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl new file mode 100644 index 000000000000..b94212bbe77c --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl @@ -0,0 +1,128 @@ +{{/* vim: set filetype=mustache: */}} + +{{/* +Return the target Kubernetes version +*/}} +{{- define "common.capabilities.kubeVersion" -}} +{{- if .Values.global }} + {{- if .Values.global.kubeVersion }} + {{- .Values.global.kubeVersion -}} + {{- else }} + {{- default .Capabilities.KubeVersion.Version .Values.kubeVersion -}} + {{- end -}} +{{- else }} +{{- default .Capabilities.KubeVersion.Version .Values.kubeVersion -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for poddisruptionbudget. +*/}} +{{- define "common.capabilities.policy.apiVersion" -}} +{{- if semverCompare "<1.21-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "policy/v1beta1" -}} +{{- else -}} +{{- print "policy/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for networkpolicy. +*/}} +{{- define "common.capabilities.networkPolicy.apiVersion" -}} +{{- if semverCompare "<1.7-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "extensions/v1beta1" -}} +{{- else -}} +{{- print "networking.k8s.io/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for cronjob. +*/}} +{{- define "common.capabilities.cronjob.apiVersion" -}} +{{- if semverCompare "<1.21-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "batch/v1beta1" -}} +{{- else -}} +{{- print "batch/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for deployment. +*/}} +{{- define "common.capabilities.deployment.apiVersion" -}} +{{- if semverCompare "<1.14-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "extensions/v1beta1" -}} +{{- else -}} +{{- print "apps/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for statefulset. +*/}} +{{- define "common.capabilities.statefulset.apiVersion" -}} +{{- if semverCompare "<1.14-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "apps/v1beta1" -}} +{{- else -}} +{{- print "apps/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for ingress. +*/}} +{{- define "common.capabilities.ingress.apiVersion" -}} +{{- if .Values.ingress -}} +{{- if .Values.ingress.apiVersion -}} +{{- .Values.ingress.apiVersion -}} +{{- else if semverCompare "<1.14-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "extensions/v1beta1" -}} +{{- else if semverCompare "<1.19-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "networking.k8s.io/v1beta1" -}} +{{- else -}} +{{- print "networking.k8s.io/v1" -}} +{{- end }} +{{- else if semverCompare "<1.14-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "extensions/v1beta1" -}} +{{- else if semverCompare "<1.19-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "networking.k8s.io/v1beta1" -}} +{{- else -}} +{{- print "networking.k8s.io/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for RBAC resources. +*/}} +{{- define "common.capabilities.rbac.apiVersion" -}} +{{- if semverCompare "<1.17-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "rbac.authorization.k8s.io/v1beta1" -}} +{{- else -}} +{{- print "rbac.authorization.k8s.io/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for CRDs. +*/}} +{{- define "common.capabilities.crd.apiVersion" -}} +{{- if semverCompare "<1.19-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "apiextensions.k8s.io/v1beta1" -}} +{{- else -}} +{{- print "apiextensions.k8s.io/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Returns true if the used Helm version is 3.3+. +A way to check the used Helm version was not introduced until version 3.3.0 with .Capabilities.HelmVersion, which contains an additional "{}}" structure. +This check is introduced as a regexMatch instead of {{ if .Capabilities.HelmVersion }} because checking for the key HelmVersion in <3.3 results in a "interface not found" error. +**To be removed when the catalog's minimun Helm version is 3.3** +*/}} +{{- define "common.capabilities.supportsHelmVersion" -}} +{{- if regexMatch "{(v[0-9])*[^}]*}}$" (.Capabilities | toString ) }} + {{- true -}} +{{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_errors.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_errors.tpl new file mode 100644 index 000000000000..a79cc2e322e0 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_errors.tpl @@ -0,0 +1,23 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Through error when upgrading using empty passwords values that must not be empty. + +Usage: +{{- $validationError00 := include "common.validations.values.single.empty" (dict "valueKey" "path.to.password00" "secret" "secretName" "field" "password-00") -}} +{{- $validationError01 := include "common.validations.values.single.empty" (dict "valueKey" "path.to.password01" "secret" "secretName" "field" "password-01") -}} +{{ include "common.errors.upgrade.passwords.empty" (dict "validationErrors" (list $validationError00 $validationError01) "context" $) }} + +Required password params: + - validationErrors - String - Required. List of validation strings to be return, if it is empty it won't throw error. + - context - Context - Required. Parent context. +*/}} +{{- define "common.errors.upgrade.passwords.empty" -}} + {{- $validationErrors := join "" .validationErrors -}} + {{- if and $validationErrors .context.Release.IsUpgrade -}} + {{- $errorString := "\nPASSWORDS ERROR: You must provide your current passwords when upgrading the release." -}} + {{- $errorString = print $errorString "\n Note that even after reinstallation, old credentials may be needed as they may be kept in persistent volume claims." -}} + {{- $errorString = print $errorString "\n Further information can be obtained at https://docs.bitnami.com/general/how-to/troubleshoot-helm-chart-issues/#credential-errors-while-upgrading-chart-releases" -}} + {{- $errorString = print $errorString "\n%s" -}} + {{- printf $errorString $validationErrors | fail -}} + {{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_images.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_images.tpl new file mode 100644 index 000000000000..42ffbc7227eb --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_images.tpl @@ -0,0 +1,75 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Return the proper image name +{{ include "common.images.image" ( dict "imageRoot" .Values.path.to.the.image "global" $) }} +*/}} +{{- define "common.images.image" -}} +{{- $registryName := .imageRoot.registry -}} +{{- $repositoryName := .imageRoot.repository -}} +{{- $tag := .imageRoot.tag | toString -}} +{{- if .global }} + {{- if .global.imageRegistry }} + {{- $registryName = .global.imageRegistry -}} + {{- end -}} +{{- end -}} +{{- if $registryName }} +{{- printf "%s/%s:%s" $registryName $repositoryName $tag -}} +{{- else -}} +{{- printf "%s:%s" $repositoryName $tag -}} +{{- end -}} +{{- end -}} + +{{/* +Return the proper Docker Image Registry Secret Names (deprecated: use common.images.renderPullSecrets instead) +{{ include "common.images.pullSecrets" ( dict "images" (list .Values.path.to.the.image1, .Values.path.to.the.image2) "global" .Values.global) }} +*/}} +{{- define "common.images.pullSecrets" -}} + {{- $pullSecrets := list }} + + {{- if .global }} + {{- range .global.imagePullSecrets -}} + {{- $pullSecrets = append $pullSecrets . -}} + {{- end -}} + {{- end -}} + + {{- range .images -}} + {{- range .pullSecrets -}} + {{- $pullSecrets = append $pullSecrets . -}} + {{- end -}} + {{- end -}} + + {{- if (not (empty $pullSecrets)) }} +imagePullSecrets: + {{- range $pullSecrets }} + - name: {{ . }} + {{- end }} + {{- end }} +{{- end -}} + +{{/* +Return the proper Docker Image Registry Secret Names evaluating values as templates +{{ include "common.images.renderPullSecrets" ( dict "images" (list .Values.path.to.the.image1, .Values.path.to.the.image2) "context" $) }} +*/}} +{{- define "common.images.renderPullSecrets" -}} + {{- $pullSecrets := list }} + {{- $context := .context }} + + {{- if $context.Values.global }} + {{- range $context.Values.global.imagePullSecrets -}} + {{- $pullSecrets = append $pullSecrets (include "common.tplvalues.render" (dict "value" . "context" $context)) -}} + {{- end -}} + {{- end -}} + + {{- range .images -}} + {{- range .pullSecrets -}} + {{- $pullSecrets = append $pullSecrets (include "common.tplvalues.render" (dict "value" . "context" $context)) -}} + {{- end -}} + {{- end -}} + + {{- if (not (empty $pullSecrets)) }} +imagePullSecrets: + {{- range $pullSecrets }} + - name: {{ . }} + {{- end }} + {{- end }} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl new file mode 100644 index 000000000000..8caf73a61082 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl @@ -0,0 +1,68 @@ +{{/* vim: set filetype=mustache: */}} + +{{/* +Generate backend entry that is compatible with all Kubernetes API versions. + +Usage: +{{ include "common.ingress.backend" (dict "serviceName" "backendName" "servicePort" "backendPort" "context" $) }} + +Params: + - serviceName - String. Name of an existing service backend + - servicePort - String/Int. Port name (or number) of the service. It will be translated to different yaml depending if it is a string or an integer. + - context - Dict - Required. The context for the template evaluation. +*/}} +{{- define "common.ingress.backend" -}} +{{- $apiVersion := (include "common.capabilities.ingress.apiVersion" .context) -}} +{{- if or (eq $apiVersion "extensions/v1beta1") (eq $apiVersion "networking.k8s.io/v1beta1") -}} +serviceName: {{ .serviceName }} +servicePort: {{ .servicePort }} +{{- else -}} +service: + name: {{ .serviceName }} + port: + {{- if typeIs "string" .servicePort }} + name: {{ .servicePort }} + {{- else if or (typeIs "int" .servicePort) (typeIs "float64" .servicePort) }} + number: {{ .servicePort | int }} + {{- end }} +{{- end -}} +{{- end -}} + +{{/* +Print "true" if the API pathType field is supported +Usage: +{{ include "common.ingress.supportsPathType" . }} +*/}} +{{- define "common.ingress.supportsPathType" -}} +{{- if (semverCompare "<1.18-0" (include "common.capabilities.kubeVersion" .)) -}} +{{- print "false" -}} +{{- else -}} +{{- print "true" -}} +{{- end -}} +{{- end -}} + +{{/* +Returns true if the ingressClassname field is supported +Usage: +{{ include "common.ingress.supportsIngressClassname" . }} +*/}} +{{- define "common.ingress.supportsIngressClassname" -}} +{{- if semverCompare "<1.18-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "false" -}} +{{- else -}} +{{- print "true" -}} +{{- end -}} +{{- end -}} + +{{/* +Return true if cert-manager required annotations for TLS signed +certificates are set in the Ingress annotations +Ref: https://cert-manager.io/docs/usage/ingress/#supported-annotations +Usage: +{{ include "common.ingress.certManagerRequest" ( dict "annotations" .Values.path.to.the.ingress.annotations ) }} +*/}} +{{- define "common.ingress.certManagerRequest" -}} +{{ if or (hasKey .annotations "cert-manager.io/cluster-issuer") (hasKey .annotations "cert-manager.io/issuer") }} + {{- true -}} +{{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_labels.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_labels.tpl new file mode 100644 index 000000000000..252066c7e2b3 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_labels.tpl @@ -0,0 +1,18 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Kubernetes standard labels +*/}} +{{- define "common.labels.standard" -}} +app.kubernetes.io/name: {{ include "common.names.name" . }} +helm.sh/chart: {{ include "common.names.chart" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end -}} + +{{/* +Labels to use on deploy.spec.selector.matchLabels and svc.spec.selector +*/}} +{{- define "common.labels.matchLabels" -}} +app.kubernetes.io/name: {{ include "common.names.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_names.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_names.tpl new file mode 100644 index 000000000000..cf0323171f39 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_names.tpl @@ -0,0 +1,52 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "common.names.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "common.names.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "common.names.fullname" -}} +{{- if .Values.fullnameOverride -}} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .Chart.Name .Values.nameOverride -}} +{{- if contains $name .Release.Name -}} +{{- .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} + +{{/* +Create a default fully qualified dependency name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +Usage: +{{ include "common.names.dependency.fullname" (dict "chartName" "dependency-chart-name" "chartValues" .Values.dependency-chart "context" $) }} +*/}} +{{- define "common.names.dependency.fullname" -}} +{{- if .chartValues.fullnameOverride -}} +{{- .chartValues.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .chartName .chartValues.nameOverride -}} +{{- if contains $name .context.Release.Name -}} +{{- .context.Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .context.Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl new file mode 100644 index 000000000000..a1afc1195996 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl @@ -0,0 +1,131 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Generate secret name. + +Usage: +{{ include "common.secrets.name" (dict "existingSecret" .Values.path.to.the.existingSecret "defaultNameSuffix" "mySuffix" "context" $) }} + +Params: + - existingSecret - ExistingSecret/String - Optional. The path to the existing secrets in the values.yaml given by the user + to be used instead of the default one. Allows for it to be of type String (just the secret name) for backwards compatibility. + +info: https://github.com/bitnami/charts/tree/master/bitnami/common#existingsecret + - defaultNameSuffix - String - Optional. It is used only if we have several secrets in the same deployment. + - context - Dict - Required. The context for the template evaluation. +*/}} +{{- define "common.secrets.name" -}} +{{- $name := (include "common.names.fullname" .context) -}} + +{{- if .defaultNameSuffix -}} +{{- $name = printf "%s-%s" $name .defaultNameSuffix | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{- with .existingSecret -}} +{{- if not (typeIs "string" .) -}} +{{- with .name -}} +{{- $name = . -}} +{{- end -}} +{{- else -}} +{{- $name = . -}} +{{- end -}} +{{- end -}} + +{{- printf "%s" $name -}} +{{- end -}} + +{{/* +Generate secret key. + +Usage: +{{ include "common.secrets.key" (dict "existingSecret" .Values.path.to.the.existingSecret "key" "keyName") }} + +Params: + - existingSecret - ExistingSecret/String - Optional. The path to the existing secrets in the values.yaml given by the user + to be used instead of the default one. Allows for it to be of type String (just the secret name) for backwards compatibility. + +info: https://github.com/bitnami/charts/tree/master/bitnami/common#existingsecret + - key - String - Required. Name of the key in the secret. +*/}} +{{- define "common.secrets.key" -}} +{{- $key := .key -}} + +{{- if .existingSecret -}} + {{- if not (typeIs "string" .existingSecret) -}} + {{- if .existingSecret.keyMapping -}} + {{- $key = index .existingSecret.keyMapping $.key -}} + {{- end -}} + {{- end }} +{{- end -}} + +{{- printf "%s" $key -}} +{{- end -}} + +{{/* +Generate secret password or retrieve one if already created. + +Usage: +{{ include "common.secrets.passwords.manage" (dict "secret" "secret-name" "key" "keyName" "providedValues" (list "path.to.password1" "path.to.password2") "length" 10 "strong" false "chartName" "chartName" "context" $) }} + +Params: + - secret - String - Required - Name of the 'Secret' resource where the password is stored. + - key - String - Required - Name of the key in the secret. + - providedValues - List - Required - The path to the validating value in the values.yaml, e.g: "mysql.password". Will pick first parameter with a defined value. + - length - int - Optional - Length of the generated random password. + - strong - Boolean - Optional - Whether to add symbols to the generated random password. + - chartName - String - Optional - Name of the chart used when said chart is deployed as a subchart. + - context - Context - Required - Parent context. +*/}} +{{- define "common.secrets.passwords.manage" -}} + +{{- $password := "" }} +{{- $subchart := "" }} +{{- $chartName := default "" .chartName }} +{{- $passwordLength := default 10 .length }} +{{- $providedPasswordKey := include "common.utils.getKeyFromList" (dict "keys" .providedValues "context" $.context) }} +{{- $providedPasswordValue := include "common.utils.getValueFromKey" (dict "key" $providedPasswordKey "context" $.context) }} +{{- $secretData := (lookup "v1" "Secret" $.context.Release.Namespace .secret).data }} +{{- if $secretData }} + {{- if hasKey $secretData .key }} + {{- $password = index $secretData .key }} + {{- else }} + {{- printf "\nPASSWORDS ERROR: The secret \"%s\" does not contain the key \"%s\"\n" .secret .key | fail -}} + {{- end -}} +{{- else if $providedPasswordValue }} + {{- $password = $providedPasswordValue | toString | b64enc | quote }} +{{- else }} + + {{- if .context.Values.enabled }} + {{- $subchart = $chartName }} + {{- end -}} + + {{- $requiredPassword := dict "valueKey" $providedPasswordKey "secret" .secret "field" .key "subchart" $subchart "context" $.context -}} + {{- $requiredPasswordError := include "common.validations.values.single.empty" $requiredPassword -}} + {{- $passwordValidationErrors := list $requiredPasswordError -}} + {{- include "common.errors.upgrade.passwords.empty" (dict "validationErrors" $passwordValidationErrors "context" $.context) -}} + + {{- if .strong }} + {{- $subStr := list (lower (randAlpha 1)) (randNumeric 1) (upper (randAlpha 1)) | join "_" }} + {{- $password = randAscii $passwordLength }} + {{- $password = regexReplaceAllLiteral "\\W" $password "@" | substr 5 $passwordLength }} + {{- $password = printf "%s%s" $subStr $password | toString | shuffle | b64enc | quote }} + {{- else }} + {{- $password = randAlphaNum $passwordLength | b64enc | quote }} + {{- end }} +{{- end -}} +{{- printf "%s" $password -}} +{{- end -}} + +{{/* +Returns whether a previous generated secret already exists + +Usage: +{{ include "common.secrets.exists" (dict "secret" "secret-name" "context" $) }} + +Params: + - secret - String - Required - Name of the 'Secret' resource where the password is stored. + - context - Context - Required - Parent context. +*/}} +{{- define "common.secrets.exists" -}} +{{- $secret := (lookup "v1" "Secret" $.context.Release.Namespace .secret) }} +{{- if $secret }} + {{- true -}} +{{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_storage.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_storage.tpl new file mode 100644 index 000000000000..60e2a844f6eb --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_storage.tpl @@ -0,0 +1,23 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Return the proper Storage Class +{{ include "common.storage.class" ( dict "persistence" .Values.path.to.the.persistence "global" $) }} +*/}} +{{- define "common.storage.class" -}} + +{{- $storageClass := .persistence.storageClass -}} +{{- if .global -}} + {{- if .global.storageClass -}} + {{- $storageClass = .global.storageClass -}} + {{- end -}} +{{- end -}} + +{{- if $storageClass -}} + {{- if (eq "-" $storageClass) -}} + {{- printf "storageClassName: \"\"" -}} + {{- else }} + {{- printf "storageClassName: %s" $storageClass -}} + {{- end -}} +{{- end -}} + +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl new file mode 100644 index 000000000000..2db166851bb5 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl @@ -0,0 +1,13 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Renders a value that contains template. +Usage: +{{ include "common.tplvalues.render" ( dict "value" .Values.path.to.the.Value "context" $) }} +*/}} +{{- define "common.tplvalues.render" -}} + {{- if typeIs "string" .value }} + {{- tpl .value .context }} + {{- else }} + {{- tpl (.value | toYaml) .context }} + {{- end }} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_utils.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_utils.tpl new file mode 100644 index 000000000000..ea083a249f80 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_utils.tpl @@ -0,0 +1,62 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Print instructions to get a secret value. +Usage: +{{ include "common.utils.secret.getvalue" (dict "secret" "secret-name" "field" "secret-value-field" "context" $) }} +*/}} +{{- define "common.utils.secret.getvalue" -}} +{{- $varname := include "common.utils.fieldToEnvVar" . -}} +export {{ $varname }}=$(kubectl get secret --namespace {{ .context.Release.Namespace | quote }} {{ .secret }} -o jsonpath="{.data.{{ .field }}}" | base64 --decode) +{{- end -}} + +{{/* +Build env var name given a field +Usage: +{{ include "common.utils.fieldToEnvVar" dict "field" "my-password" }} +*/}} +{{- define "common.utils.fieldToEnvVar" -}} + {{- $fieldNameSplit := splitList "-" .field -}} + {{- $upperCaseFieldNameSplit := list -}} + + {{- range $fieldNameSplit -}} + {{- $upperCaseFieldNameSplit = append $upperCaseFieldNameSplit ( upper . ) -}} + {{- end -}} + + {{ join "_" $upperCaseFieldNameSplit }} +{{- end -}} + +{{/* +Gets a value from .Values given +Usage: +{{ include "common.utils.getValueFromKey" (dict "key" "path.to.key" "context" $) }} +*/}} +{{- define "common.utils.getValueFromKey" -}} +{{- $splitKey := splitList "." .key -}} +{{- $value := "" -}} +{{- $latestObj := $.context.Values -}} +{{- range $splitKey -}} + {{- if not $latestObj -}} + {{- printf "please review the entire path of '%s' exists in values" $.key | fail -}} + {{- end -}} + {{- $value = ( index $latestObj . ) -}} + {{- $latestObj = $value -}} +{{- end -}} +{{- printf "%v" (default "" $value) -}} +{{- end -}} + +{{/* +Returns first .Values key with a defined value or first of the list if all non-defined +Usage: +{{ include "common.utils.getKeyFromList" (dict "keys" (list "path.to.key1" "path.to.key2") "context" $) }} +*/}} +{{- define "common.utils.getKeyFromList" -}} +{{- $key := first .keys -}} +{{- $reverseKeys := reverse .keys }} +{{- range $reverseKeys }} + {{- $value := include "common.utils.getValueFromKey" (dict "key" . "context" $.context ) }} + {{- if $value -}} + {{- $key = . }} + {{- end -}} +{{- end -}} +{{- printf "%s" $key -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl new file mode 100644 index 000000000000..ae10fa41ee7d --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl @@ -0,0 +1,14 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Warning about using rolling tag. +Usage: +{{ include "common.warnings.rollingTag" .Values.path.to.the.imageRoot }} +*/}} +{{- define "common.warnings.rollingTag" -}} + +{{- if and (contains "bitnami/" .repository) (not (.tag | toString | regexFind "-r\\d+$|sha256:")) }} +WARNING: Rolling tag detected ({{ .repository }}:{{ .tag }}), please note that it is strongly recommended to avoid using rolling tags in a production environment. ++info https://docs.bitnami.com/containers/how-to/understand-rolling-tags-containers/ +{{- end }} + +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl new file mode 100644 index 000000000000..ded1ae3bcad7 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl @@ -0,0 +1,72 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Validate Cassandra required passwords are not empty. + +Usage: +{{ include "common.validations.values.cassandra.passwords" (dict "secret" "secretName" "subchart" false "context" $) }} +Params: + - secret - String - Required. Name of the secret where Cassandra values are stored, e.g: "cassandra-passwords-secret" + - subchart - Boolean - Optional. Whether Cassandra is used as subchart or not. Default: false +*/}} +{{- define "common.validations.values.cassandra.passwords" -}} + {{- $existingSecret := include "common.cassandra.values.existingSecret" . -}} + {{- $enabled := include "common.cassandra.values.enabled" . -}} + {{- $dbUserPrefix := include "common.cassandra.values.key.dbUser" . -}} + {{- $valueKeyPassword := printf "%s.password" $dbUserPrefix -}} + + {{- if and (or (not $existingSecret) (eq $existingSecret "\"\"")) (eq $enabled "true") -}} + {{- $requiredPasswords := list -}} + + {{- $requiredPassword := dict "valueKey" $valueKeyPassword "secret" .secret "field" "cassandra-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPassword -}} + + {{- include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" .context) -}} + + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for existingSecret. + +Usage: +{{ include "common.cassandra.values.existingSecret" (dict "context" $) }} +Params: + - subchart - Boolean - Optional. Whether Cassandra is used as subchart or not. Default: false +*/}} +{{- define "common.cassandra.values.existingSecret" -}} + {{- if .subchart -}} + {{- .context.Values.cassandra.dbUser.existingSecret | quote -}} + {{- else -}} + {{- .context.Values.dbUser.existingSecret | quote -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled cassandra. + +Usage: +{{ include "common.cassandra.values.enabled" (dict "context" $) }} +*/}} +{{- define "common.cassandra.values.enabled" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.cassandra.enabled -}} + {{- else -}} + {{- printf "%v" (not .context.Values.enabled) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for the key dbUser + +Usage: +{{ include "common.cassandra.values.key.dbUser" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether Cassandra is used as subchart or not. Default: false +*/}} +{{- define "common.cassandra.values.key.dbUser" -}} + {{- if .subchart -}} + cassandra.dbUser + {{- else -}} + dbUser + {{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl new file mode 100644 index 000000000000..b6906ff77b72 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl @@ -0,0 +1,103 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Validate MariaDB required passwords are not empty. + +Usage: +{{ include "common.validations.values.mariadb.passwords" (dict "secret" "secretName" "subchart" false "context" $) }} +Params: + - secret - String - Required. Name of the secret where MariaDB values are stored, e.g: "mysql-passwords-secret" + - subchart - Boolean - Optional. Whether MariaDB is used as subchart or not. Default: false +*/}} +{{- define "common.validations.values.mariadb.passwords" -}} + {{- $existingSecret := include "common.mariadb.values.auth.existingSecret" . -}} + {{- $enabled := include "common.mariadb.values.enabled" . -}} + {{- $architecture := include "common.mariadb.values.architecture" . -}} + {{- $authPrefix := include "common.mariadb.values.key.auth" . -}} + {{- $valueKeyRootPassword := printf "%s.rootPassword" $authPrefix -}} + {{- $valueKeyUsername := printf "%s.username" $authPrefix -}} + {{- $valueKeyPassword := printf "%s.password" $authPrefix -}} + {{- $valueKeyReplicationPassword := printf "%s.replicationPassword" $authPrefix -}} + + {{- if and (or (not $existingSecret) (eq $existingSecret "\"\"")) (eq $enabled "true") -}} + {{- $requiredPasswords := list -}} + + {{- $requiredRootPassword := dict "valueKey" $valueKeyRootPassword "secret" .secret "field" "mariadb-root-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredRootPassword -}} + + {{- $valueUsername := include "common.utils.getValueFromKey" (dict "key" $valueKeyUsername "context" .context) }} + {{- if not (empty $valueUsername) -}} + {{- $requiredPassword := dict "valueKey" $valueKeyPassword "secret" .secret "field" "mariadb-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPassword -}} + {{- end -}} + + {{- if (eq $architecture "replication") -}} + {{- $requiredReplicationPassword := dict "valueKey" $valueKeyReplicationPassword "secret" .secret "field" "mariadb-replication-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredReplicationPassword -}} + {{- end -}} + + {{- include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" .context) -}} + + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for existingSecret. + +Usage: +{{ include "common.mariadb.values.auth.existingSecret" (dict "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MariaDB is used as subchart or not. Default: false +*/}} +{{- define "common.mariadb.values.auth.existingSecret" -}} + {{- if .subchart -}} + {{- .context.Values.mariadb.auth.existingSecret | quote -}} + {{- else -}} + {{- .context.Values.auth.existingSecret | quote -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled mariadb. + +Usage: +{{ include "common.mariadb.values.enabled" (dict "context" $) }} +*/}} +{{- define "common.mariadb.values.enabled" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.mariadb.enabled -}} + {{- else -}} + {{- printf "%v" (not .context.Values.enabled) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for architecture + +Usage: +{{ include "common.mariadb.values.architecture" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MariaDB is used as subchart or not. Default: false +*/}} +{{- define "common.mariadb.values.architecture" -}} + {{- if .subchart -}} + {{- .context.Values.mariadb.architecture -}} + {{- else -}} + {{- .context.Values.architecture -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for the key auth + +Usage: +{{ include "common.mariadb.values.key.auth" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MariaDB is used as subchart or not. Default: false +*/}} +{{- define "common.mariadb.values.key.auth" -}} + {{- if .subchart -}} + mariadb.auth + {{- else -}} + auth + {{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl new file mode 100644 index 000000000000..a071ea4d3127 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl @@ -0,0 +1,108 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Validate MongoDB® required passwords are not empty. + +Usage: +{{ include "common.validations.values.mongodb.passwords" (dict "secret" "secretName" "subchart" false "context" $) }} +Params: + - secret - String - Required. Name of the secret where MongoDB® values are stored, e.g: "mongodb-passwords-secret" + - subchart - Boolean - Optional. Whether MongoDB® is used as subchart or not. Default: false +*/}} +{{- define "common.validations.values.mongodb.passwords" -}} + {{- $existingSecret := include "common.mongodb.values.auth.existingSecret" . -}} + {{- $enabled := include "common.mongodb.values.enabled" . -}} + {{- $authPrefix := include "common.mongodb.values.key.auth" . -}} + {{- $architecture := include "common.mongodb.values.architecture" . -}} + {{- $valueKeyRootPassword := printf "%s.rootPassword" $authPrefix -}} + {{- $valueKeyUsername := printf "%s.username" $authPrefix -}} + {{- $valueKeyDatabase := printf "%s.database" $authPrefix -}} + {{- $valueKeyPassword := printf "%s.password" $authPrefix -}} + {{- $valueKeyReplicaSetKey := printf "%s.replicaSetKey" $authPrefix -}} + {{- $valueKeyAuthEnabled := printf "%s.enabled" $authPrefix -}} + + {{- $authEnabled := include "common.utils.getValueFromKey" (dict "key" $valueKeyAuthEnabled "context" .context) -}} + + {{- if and (or (not $existingSecret) (eq $existingSecret "\"\"")) (eq $enabled "true") (eq $authEnabled "true") -}} + {{- $requiredPasswords := list -}} + + {{- $requiredRootPassword := dict "valueKey" $valueKeyRootPassword "secret" .secret "field" "mongodb-root-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredRootPassword -}} + + {{- $valueUsername := include "common.utils.getValueFromKey" (dict "key" $valueKeyUsername "context" .context) }} + {{- $valueDatabase := include "common.utils.getValueFromKey" (dict "key" $valueKeyDatabase "context" .context) }} + {{- if and $valueUsername $valueDatabase -}} + {{- $requiredPassword := dict "valueKey" $valueKeyPassword "secret" .secret "field" "mongodb-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPassword -}} + {{- end -}} + + {{- if (eq $architecture "replicaset") -}} + {{- $requiredReplicaSetKey := dict "valueKey" $valueKeyReplicaSetKey "secret" .secret "field" "mongodb-replica-set-key" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredReplicaSetKey -}} + {{- end -}} + + {{- include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" .context) -}} + + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for existingSecret. + +Usage: +{{ include "common.mongodb.values.auth.existingSecret" (dict "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MongoDb is used as subchart or not. Default: false +*/}} +{{- define "common.mongodb.values.auth.existingSecret" -}} + {{- if .subchart -}} + {{- .context.Values.mongodb.auth.existingSecret | quote -}} + {{- else -}} + {{- .context.Values.auth.existingSecret | quote -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled mongodb. + +Usage: +{{ include "common.mongodb.values.enabled" (dict "context" $) }} +*/}} +{{- define "common.mongodb.values.enabled" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.mongodb.enabled -}} + {{- else -}} + {{- printf "%v" (not .context.Values.enabled) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for the key auth + +Usage: +{{ include "common.mongodb.values.key.auth" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MongoDB® is used as subchart or not. Default: false +*/}} +{{- define "common.mongodb.values.key.auth" -}} + {{- if .subchart -}} + mongodb.auth + {{- else -}} + auth + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for architecture + +Usage: +{{ include "common.mongodb.values.architecture" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MariaDB is used as subchart or not. Default: false +*/}} +{{- define "common.mongodb.values.architecture" -}} + {{- if .subchart -}} + {{- .context.Values.mongodb.architecture -}} + {{- else -}} + {{- .context.Values.architecture -}} + {{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl new file mode 100644 index 000000000000..164ec0d01252 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl @@ -0,0 +1,129 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Validate PostgreSQL required passwords are not empty. + +Usage: +{{ include "common.validations.values.postgresql.passwords" (dict "secret" "secretName" "subchart" false "context" $) }} +Params: + - secret - String - Required. Name of the secret where postgresql values are stored, e.g: "postgresql-passwords-secret" + - subchart - Boolean - Optional. Whether postgresql is used as subchart or not. Default: false +*/}} +{{- define "common.validations.values.postgresql.passwords" -}} + {{- $existingSecret := include "common.postgresql.values.existingSecret" . -}} + {{- $enabled := include "common.postgresql.values.enabled" . -}} + {{- $valueKeyPostgresqlPassword := include "common.postgresql.values.key.postgressPassword" . -}} + {{- $valueKeyPostgresqlReplicationEnabled := include "common.postgresql.values.key.replicationPassword" . -}} + {{- if and (or (not $existingSecret) (eq $existingSecret "\"\"")) (eq $enabled "true") -}} + {{- $requiredPasswords := list -}} + {{- $requiredPostgresqlPassword := dict "valueKey" $valueKeyPostgresqlPassword "secret" .secret "field" "postgresql-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPostgresqlPassword -}} + + {{- $enabledReplication := include "common.postgresql.values.enabled.replication" . -}} + {{- if (eq $enabledReplication "true") -}} + {{- $requiredPostgresqlReplicationPassword := dict "valueKey" $valueKeyPostgresqlReplicationEnabled "secret" .secret "field" "postgresql-replication-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPostgresqlReplicationPassword -}} + {{- end -}} + + {{- include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" .context) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to decide whether evaluate global values. + +Usage: +{{ include "common.postgresql.values.use.global" (dict "key" "key-of-global" "context" $) }} +Params: + - key - String - Required. Field to be evaluated within global, e.g: "existingSecret" +*/}} +{{- define "common.postgresql.values.use.global" -}} + {{- if .context.Values.global -}} + {{- if .context.Values.global.postgresql -}} + {{- index .context.Values.global.postgresql .key | quote -}} + {{- end -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for existingSecret. + +Usage: +{{ include "common.postgresql.values.existingSecret" (dict "context" $) }} +*/}} +{{- define "common.postgresql.values.existingSecret" -}} + {{- $globalValue := include "common.postgresql.values.use.global" (dict "key" "existingSecret" "context" .context) -}} + + {{- if .subchart -}} + {{- default (.context.Values.postgresql.existingSecret | quote) $globalValue -}} + {{- else -}} + {{- default (.context.Values.existingSecret | quote) $globalValue -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled postgresql. + +Usage: +{{ include "common.postgresql.values.enabled" (dict "context" $) }} +*/}} +{{- define "common.postgresql.values.enabled" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.postgresql.enabled -}} + {{- else -}} + {{- printf "%v" (not .context.Values.enabled) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for the key postgressPassword. + +Usage: +{{ include "common.postgresql.values.key.postgressPassword" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether postgresql is used as subchart or not. Default: false +*/}} +{{- define "common.postgresql.values.key.postgressPassword" -}} + {{- $globalValue := include "common.postgresql.values.use.global" (dict "key" "postgresqlUsername" "context" .context) -}} + + {{- if not $globalValue -}} + {{- if .subchart -}} + postgresql.postgresqlPassword + {{- else -}} + postgresqlPassword + {{- end -}} + {{- else -}} + global.postgresql.postgresqlPassword + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled.replication. + +Usage: +{{ include "common.postgresql.values.enabled.replication" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether postgresql is used as subchart or not. Default: false +*/}} +{{- define "common.postgresql.values.enabled.replication" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.postgresql.replication.enabled -}} + {{- else -}} + {{- printf "%v" .context.Values.replication.enabled -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for the key replication.password. + +Usage: +{{ include "common.postgresql.values.key.replicationPassword" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether postgresql is used as subchart or not. Default: false +*/}} +{{- define "common.postgresql.values.key.replicationPassword" -}} + {{- if .subchart -}} + postgresql.replication.password + {{- else -}} + replication.password + {{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl new file mode 100644 index 000000000000..5d72959b9eee --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl @@ -0,0 +1,76 @@ + +{{/* vim: set filetype=mustache: */}} +{{/* +Validate Redis™ required passwords are not empty. + +Usage: +{{ include "common.validations.values.redis.passwords" (dict "secret" "secretName" "subchart" false "context" $) }} +Params: + - secret - String - Required. Name of the secret where redis values are stored, e.g: "redis-passwords-secret" + - subchart - Boolean - Optional. Whether redis is used as subchart or not. Default: false +*/}} +{{- define "common.validations.values.redis.passwords" -}} + {{- $enabled := include "common.redis.values.enabled" . -}} + {{- $valueKeyPrefix := include "common.redis.values.keys.prefix" . -}} + {{- $standarizedVersion := include "common.redis.values.standarized.version" . }} + + {{- $existingSecret := ternary (printf "%s%s" $valueKeyPrefix "auth.existingSecret") (printf "%s%s" $valueKeyPrefix "existingSecret") (eq $standarizedVersion "true") }} + {{- $existingSecretValue := include "common.utils.getValueFromKey" (dict "key" $existingSecret "context" .context) }} + + {{- $valueKeyRedisPassword := ternary (printf "%s%s" $valueKeyPrefix "auth.password") (printf "%s%s" $valueKeyPrefix "password") (eq $standarizedVersion "true") }} + {{- $valueKeyRedisUseAuth := ternary (printf "%s%s" $valueKeyPrefix "auth.enabled") (printf "%s%s" $valueKeyPrefix "usePassword") (eq $standarizedVersion "true") }} + + {{- if and (or (not $existingSecret) (eq $existingSecret "\"\"")) (eq $enabled "true") -}} + {{- $requiredPasswords := list -}} + + {{- $useAuth := include "common.utils.getValueFromKey" (dict "key" $valueKeyRedisUseAuth "context" .context) -}} + {{- if eq $useAuth "true" -}} + {{- $requiredRedisPassword := dict "valueKey" $valueKeyRedisPassword "secret" .secret "field" "redis-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredRedisPassword -}} + {{- end -}} + + {{- include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" .context) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled redis. + +Usage: +{{ include "common.redis.values.enabled" (dict "context" $) }} +*/}} +{{- define "common.redis.values.enabled" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.redis.enabled -}} + {{- else -}} + {{- printf "%v" (not .context.Values.enabled) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right prefix path for the values + +Usage: +{{ include "common.redis.values.key.prefix" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether redis is used as subchart or not. Default: false +*/}} +{{- define "common.redis.values.keys.prefix" -}} + {{- if .subchart -}}redis.{{- else -}}{{- end -}} +{{- end -}} + +{{/* +Checks whether the redis chart's includes the standarizations (version >= 14) + +Usage: +{{ include "common.redis.values.standarized.version" (dict "context" $) }} +*/}} +{{- define "common.redis.values.standarized.version" -}} + + {{- $standarizedAuth := printf "%s%s" (include "common.redis.values.keys.prefix" .) "auth" -}} + {{- $standarizedAuthValues := include "common.utils.getValueFromKey" (dict "key" $standarizedAuth "context" .context) }} + + {{- if $standarizedAuthValues -}} + {{- true -}} + {{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl new file mode 100644 index 000000000000..9a814cf40dcb --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl @@ -0,0 +1,46 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Validate values must not be empty. + +Usage: +{{- $validateValueConf00 := (dict "valueKey" "path.to.value" "secret" "secretName" "field" "password-00") -}} +{{- $validateValueConf01 := (dict "valueKey" "path.to.value" "secret" "secretName" "field" "password-01") -}} +{{ include "common.validations.values.empty" (dict "required" (list $validateValueConf00 $validateValueConf01) "context" $) }} + +Validate value params: + - valueKey - String - Required. The path to the validating value in the values.yaml, e.g: "mysql.password" + - secret - String - Optional. Name of the secret where the validating value is generated/stored, e.g: "mysql-passwords-secret" + - field - String - Optional. Name of the field in the secret data, e.g: "mysql-password" +*/}} +{{- define "common.validations.values.multiple.empty" -}} + {{- range .required -}} + {{- include "common.validations.values.single.empty" (dict "valueKey" .valueKey "secret" .secret "field" .field "context" $.context) -}} + {{- end -}} +{{- end -}} + +{{/* +Validate a value must not be empty. + +Usage: +{{ include "common.validations.value.empty" (dict "valueKey" "mariadb.password" "secret" "secretName" "field" "my-password" "subchart" "subchart" "context" $) }} + +Validate value params: + - valueKey - String - Required. The path to the validating value in the values.yaml, e.g: "mysql.password" + - secret - String - Optional. Name of the secret where the validating value is generated/stored, e.g: "mysql-passwords-secret" + - field - String - Optional. Name of the field in the secret data, e.g: "mysql-password" + - subchart - String - Optional - Name of the subchart that the validated password is part of. +*/}} +{{- define "common.validations.values.single.empty" -}} + {{- $value := include "common.utils.getValueFromKey" (dict "key" .valueKey "context" .context) }} + {{- $subchart := ternary "" (printf "%s." .subchart) (empty .subchart) }} + + {{- if not $value -}} + {{- $varname := "my-value" -}} + {{- $getCurrentValue := "" -}} + {{- if and .secret .field -}} + {{- $varname = include "common.utils.fieldToEnvVar" . -}} + {{- $getCurrentValue = printf " To get the current value:\n\n %s\n" (include "common.utils.secret.getvalue" .) -}} + {{- end -}} + {{- printf "\n '%s' must not be empty, please add '--set %s%s=$%s' to the command.%s" .valueKey $subchart .valueKey $varname $getCurrentValue -}} + {{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/charts/common/values.yaml b/pkg/scanners/helm/test/mysql/charts/common/values.yaml new file mode 100644 index 000000000000..f2df68e5e6af --- /dev/null +++ b/pkg/scanners/helm/test/mysql/charts/common/values.yaml @@ -0,0 +1,5 @@ +## bitnami/common +## It is required by CI/CD tools and processes. +## @skip exampleValue +## +exampleValue: common-chart diff --git a/pkg/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml b/pkg/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml new file mode 100644 index 000000000000..d3370c931113 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml @@ -0,0 +1,30 @@ +# Test values file for generating all of the yaml and check that +# the rendering is correct + +architecture: replication +auth: + usePasswordFiles: true + +primary: + extraEnvVars: + - name: TEST + value: "3" + podDisruptionBudget: + create: true + +secondary: + replicaCount: 2 + extraEnvVars: + - name: TEST + value: "2" + podDisruptionBudget: + create: true + +serviceAccount: + create: true + name: mysql-service-account +rbac: + create: true + +metrics: + enabled: true diff --git a/pkg/scanners/helm/test/mysql/templates/NOTES.txt b/pkg/scanners/helm/test/mysql/templates/NOTES.txt new file mode 100644 index 000000000000..1b8b6d5ea7d2 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/NOTES.txt @@ -0,0 +1,102 @@ +CHART NAME: {{ .Chart.Name }} +CHART VERSION: {{ .Chart.Version }} +APP VERSION: {{ .Chart.AppVersion }} + +** Please be patient while the chart is being deployed ** + +{{- if .Values.diagnosticMode.enabled }} +The chart has been deployed in diagnostic mode. All probes have been disabled and the command has been overwritten with: + + command: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.command "context" $) | nindent 4 }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.args "context" $) | nindent 4 }} + +Get the list of pods by executing: + + kubectl get pods --namespace {{ .Release.Namespace }} -l app.kubernetes.io/instance={{ .Release.Name }} + +Access the pod you want to debug by executing + + kubectl exec --namespace {{ .Release.Namespace }} -ti -- bash + +In order to replicate the container startup scripts execute this command: + + /opt/bitnami/scripts/mysql/entrypoint.sh /opt/bitnami/scripts/mysql/run.sh + +{{- else }} + +Tip: + + Watch the deployment status using the command: kubectl get pods -w --namespace {{ .Release.Namespace }} + +Services: + + echo Primary: {{ include "mysql.primary.fullname" . }}.{{ .Release.Namespace }}.svc.{{ .Values.clusterDomain }}:{{ .Values.primary.service.port }} +{{- if eq .Values.architecture "replication" }} + echo Secondary: {{ include "mysql.secondary.fullname" . }}.{{ .Release.Namespace }}.svc.{{ .Values.clusterDomain }}:{{ .Values.secondary.service.port }} +{{- end }} + +Execute the following to get the administrator credentials: + + echo Username: root + MYSQL_ROOT_PASSWORD=$(kubectl get secret --namespace {{ .Release.Namespace }} {{ template "mysql.secretName" . }} -o jsonpath="{.data.mysql-root-password}" | base64 --decode) + +To connect to your database: + + 1. Run a pod that you can use as a client: + + kubectl run {{ include "common.names.fullname" . }}-client --rm --tty -i --restart='Never' --image {{ template "mysql.image" . }} --namespace {{ .Release.Namespace }} --command -- bash + + 2. To connect to primary service (read/write): + + mysql -h {{ include "mysql.primary.fullname" . }}.{{ .Release.Namespace }}.svc.{{ .Values.clusterDomain }} -uroot -p"$MYSQL_ROOT_PASSWORD" + +{{- if eq .Values.architecture "replication" }} + + 3. To connect to secondary service (read-only): + + mysql -h {{ include "mysql.secondary.fullname" . }}.{{ .Release.Namespace }}.svc.{{ .Values.clusterDomain }} -uroot -p"$MYSQL_ROOT_PASSWORD" +{{- end }} + +{{ if and (.Values.networkPolicy.enabled) (not .Values.networkPolicy.allowExternal) }} +Note: Since NetworkPolicy is enabled, only pods with label {{ template "common.names.fullname" . }}-client=true" will be able to connect to MySQL. +{{- end }} + +{{- if .Values.metrics.enabled }} + +To access the MySQL Prometheus metrics from outside the cluster execute the following commands: + + kubectl port-forward --namespace {{ .Release.Namespace }} svc/{{ printf "%s-metrics" (include "common.names.fullname" .) }} {{ .Values.metrics.service.port }}:{{ .Values.metrics.service.port }} & + curl http://127.0.0.1:{{ .Values.metrics.service.port }}/metrics + +{{- end }} + +To upgrade this helm chart: + + 1. Obtain the password as described on the 'Administrator credentials' section and set the 'root.password' parameter as shown below: + + ROOT_PASSWORD=$(kubectl get secret --namespace {{ .Release.Namespace }} {{ include "common.names.fullname" . }} -o jsonpath="{.data.mysql-root-password}" | base64 --decode) + helm upgrade --namespace {{ .Release.Namespace }} {{ .Release.Name }} bitnami/mysql --set auth.rootPassword=$ROOT_PASSWORD + +{{ include "mysql.validateValues" . }} +{{ include "mysql.checkRollingTags" . }} +{{- if and (not .Values.auth.existingSecret) (not .Values.auth.customPasswordFiles) -}} + {{- $secretName := include "mysql.secretName" . -}} + {{- $requiredPasswords := list -}} + + {{- $requiredRootPassword := dict "valueKey" "auth.rootPassword" "secret" $secretName "field" "mysql-root-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredRootPassword -}} + + {{- if not (empty .Values.auth.username) -}} + {{- $requiredPassword := dict "valueKey" "auth.password" "secret" $secretName "field" "mysql-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPassword -}} + {{- end -}} + + {{- if (eq .Values.architecture "replication") -}} + {{- $requiredReplicationPassword := dict "valueKey" "auth.replicationPassword" "secret" $secretName "field" "mysql-replication-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredReplicationPassword -}} + {{- end -}} + + {{- $mysqlPasswordValidationErrors := include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" $) -}} + {{- include "common.errors.upgrade.passwords.empty" (dict "validationErrors" $mysqlPasswordValidationErrors "context" $) -}} +{{- end }} +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/_helpers.tpl b/pkg/scanners/helm/test/mysql/templates/_helpers.tpl new file mode 100644 index 000000000000..6c2bcff81398 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/_helpers.tpl @@ -0,0 +1,192 @@ +{{/* vim: set filetype=mustache: */}} + +{{- define "mysql.primary.fullname" -}} +{{- if eq .Values.architecture "replication" }} +{{- printf "%s-%s" (include "common.names.fullname" .) "primary" | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- include "common.names.fullname" . -}} +{{- end -}} +{{- end -}} + +{{- define "mysql.secondary.fullname" -}} +{{- printf "%s-%s" (include "common.names.fullname" .) "secondary" | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Return the proper MySQL image name +*/}} +{{- define "mysql.image" -}} +{{ include "common.images.image" (dict "imageRoot" .Values.image "global" .Values.global) }} +{{- end -}} + +{{/* +Return the proper metrics image name +*/}} +{{- define "mysql.metrics.image" -}} +{{ include "common.images.image" (dict "imageRoot" .Values.metrics.image "global" .Values.global) }} +{{- end -}} + +{{/* +Return the proper image name (for the init container volume-permissions image) +*/}} +{{- define "mysql.volumePermissions.image" -}} +{{ include "common.images.image" (dict "imageRoot" .Values.volumePermissions.image "global" .Values.global) }} +{{- end -}} + +{{/* +Return the proper Docker Image Registry Secret Names +*/}} +{{- define "mysql.imagePullSecrets" -}} +{{ include "common.images.pullSecrets" (dict "images" (list .Values.image .Values.metrics.image .Values.volumePermissions.image) "global" .Values.global) }} +{{- end -}} + +{{ template "mysql.initdbScriptsCM" . }} +{{/* +Get the initialization scripts ConfigMap name. +*/}} +{{- define "mysql.initdbScriptsCM" -}} +{{- if .Values.initdbScriptsConfigMap -}} + {{- printf "%s" .Values.initdbScriptsConfigMap -}} +{{- else -}} + {{- printf "%s-init-scripts" (include "mysql.primary.fullname" .) -}} +{{- end -}} +{{- end -}} + +{{/* + Returns the proper service account name depending if an explicit service account name is set + in the values file. If the name is not set it will default to either mysql.fullname if serviceAccount.create + is true or default otherwise. +*/}} +{{- define "mysql.serviceAccountName" -}} + {{- if .Values.serviceAccount.create -}} + {{ default (include "common.names.fullname" .) .Values.serviceAccount.name }} + {{- else -}} + {{ default "default" .Values.serviceAccount.name }} + {{- end -}} +{{- end -}} + +{{/* +Return the configmap with the MySQL Primary configuration +*/}} +{{- define "mysql.primary.configmapName" -}} +{{- if .Values.primary.existingConfigmap -}} + {{- printf "%s" (tpl .Values.primary.existingConfigmap $) -}} +{{- else -}} + {{- printf "%s" (include "mysql.primary.fullname" .) -}} +{{- end -}} +{{- end -}} + +{{/* +Return true if a configmap object should be created for MySQL Secondary +*/}} +{{- define "mysql.primary.createConfigmap" -}} +{{- if and .Values.primary.configuration (not .Values.primary.existingConfigmap) }} + {{- true -}} +{{- else -}} +{{- end -}} +{{- end -}} + +{{/* +Return the configmap with the MySQL Primary configuration +*/}} +{{- define "mysql.secondary.configmapName" -}} +{{- if .Values.secondary.existingConfigmap -}} + {{- printf "%s" (tpl .Values.secondary.existingConfigmap $) -}} +{{- else -}} + {{- printf "%s" (include "mysql.secondary.fullname" .) -}} +{{- end -}} +{{- end -}} + +{{/* +Return true if a configmap object should be created for MySQL Secondary +*/}} +{{- define "mysql.secondary.createConfigmap" -}} +{{- if and (eq .Values.architecture "replication") .Values.secondary.configuration (not .Values.secondary.existingConfigmap) }} + {{- true -}} +{{- else -}} +{{- end -}} +{{- end -}} + +{{/* +Return the secret with MySQL credentials +*/}} +{{- define "mysql.secretName" -}} + {{- if .Values.auth.existingSecret -}} + {{- printf "%s" .Values.auth.existingSecret -}} + {{- else -}} + {{- printf "%s" (include "common.names.fullname" .) -}} + {{- end -}} +{{- end -}} + +{{/* +Return true if a secret object should be created for MySQL +*/}} +{{- define "mysql.createSecret" -}} +{{- if and (not .Values.auth.existingSecret) (not .Values.auth.customPasswordFiles) }} + {{- true -}} +{{- end -}} +{{- end -}} + +{{/* +Returns the available value for certain key in an existing secret (if it exists), +otherwise it generates a random value. +*/}} +{{- define "getValueFromSecret" }} + {{- $len := (default 16 .Length) | int -}} + {{- $obj := (lookup "v1" "Secret" .Namespace .Name).data -}} + {{- if $obj }} + {{- index $obj .Key | b64dec -}} + {{- else -}} + {{- randAlphaNum $len -}} + {{- end -}} +{{- end }} + +{{- define "mysql.root.password" -}} + {{- if not (empty .Values.auth.rootPassword) }} + {{- .Values.auth.rootPassword }} + {{- else if (not .Values.auth.forcePassword) }} + {{- include "getValueFromSecret" (dict "Namespace" .Release.Namespace "Name" (include "common.names.fullname" .) "Length" 10 "Key" "mysql-root-password") }} + {{- else }} + {{- required "A MySQL Root Password is required!" .Values.auth.rootPassword }} + {{- end }} +{{- end -}} + +{{- define "mysql.password" -}} + {{- if and (not (empty .Values.auth.username)) (not (empty .Values.auth.password)) }} + {{- .Values.auth.password }} + {{- else if (not .Values.auth.forcePassword) }} + {{- include "getValueFromSecret" (dict "Namespace" .Release.Namespace "Name" (include "common.names.fullname" .) "Length" 10 "Key" "mysql-password") }} + {{- else }} + {{- required "A MySQL Database Password is required!" .Values.auth.password }} + {{- end }} +{{- end -}} + +{{- define "mysql.replication.password" -}} + {{- if not (empty .Values.auth.replicationPassword) }} + {{- .Values.auth.replicationPassword }} + {{- else if (not .Values.auth.forcePassword) }} + {{- include "getValueFromSecret" (dict "Namespace" .Release.Namespace "Name" (include "common.names.fullname" .) "Length" 10 "Key" "mysql-replication-password") }} + {{- else }} + {{- required "A MySQL Replication Password is required!" .Values.auth.replicationPassword }} + {{- end }} +{{- end -}} + +{{/* Check if there are rolling tags in the images */}} +{{- define "mysql.checkRollingTags" -}} +{{- include "common.warnings.rollingTag" .Values.image }} +{{- include "common.warnings.rollingTag" .Values.metrics.image }} +{{- include "common.warnings.rollingTag" .Values.volumePermissions.image }} +{{- end -}} + +{{/* +Compile all warnings into a single message, and call fail. +*/}} +{{- define "mysql.validateValues" -}} +{{- $messages := list -}} +{{- $messages := without $messages "" -}} +{{- $message := join "\n" $messages -}} + +{{- if $message -}} +{{- printf "\nVALUES VALIDATION:\n%s" $message | fail -}} +{{- end -}} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/templates/extra-list.yaml b/pkg/scanners/helm/test/mysql/templates/extra-list.yaml new file mode 100644 index 000000000000..9ac65f9e16f4 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/extra-list.yaml @@ -0,0 +1,4 @@ +{{- range .Values.extraDeploy }} +--- +{{ include "common.tplvalues.render" (dict "value" . "context" $) }} +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/metrics-svc.yaml b/pkg/scanners/helm/test/mysql/templates/metrics-svc.yaml new file mode 100644 index 000000000000..fb0d9d761dc6 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/metrics-svc.yaml @@ -0,0 +1,29 @@ +{{- if .Values.metrics.enabled }} +apiVersion: v1 +kind: Service +metadata: + name: {{ printf "%s-metrics" (include "common.names.fullname" .) }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + app.kubernetes.io/component: metrics + {{- if or .Values.metrics.service.annotations .Values.commonAnnotations }} + annotations: + {{- if .Values.metrics.service.annotations }} + {{- include "common.tplvalues.render" (dict "value" .Values.metrics.service.annotations "context" $) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} + {{- end }} +spec: + type: {{ .Values.metrics.service.type }} + ports: + - port: {{ .Values.metrics.service.port }} + targetPort: metrics + protocol: TCP + name: metrics + selector: {{- include "common.labels.matchLabels" $ | nindent 4 }} +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/networkpolicy.yaml b/pkg/scanners/helm/test/mysql/templates/networkpolicy.yaml new file mode 100644 index 000000000000..a0d1d01d4079 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/networkpolicy.yaml @@ -0,0 +1,38 @@ +{{- if .Values.networkPolicy.enabled }} +kind: NetworkPolicy +apiVersion: {{ template "common.capabilities.networkPolicy.apiVersion" . }} +metadata: + name: {{ template "common.names.fullname" . }} + labels: + {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} + namespace: {{ .Release.Namespace }} +spec: + podSelector: + matchLabels: + {{- include "common.labels.matchLabels" . | nindent 6 }} + ingress: + # Allow inbound connections + - ports: + - port: {{ .Values.primary.service.port }} + {{- if not .Values.networkPolicy.allowExternal }} + from: + - podSelector: + matchLabels: + {{ template "common.names.fullname" . }}-client: "true" + {{- if .Values.networkPolicy.explicitNamespacesSelector }} + namespaceSelector: +{{ toYaml .Values.networkPolicy.explicitNamespacesSelector | indent 12 }} + {{- end }} + - podSelector: + matchLabels: + {{- include "common.labels.matchLabels" . | nindent 14 }} + {{- end }} + {{- if .Values.metrics.enabled }} + # Allow prometheus scrapes + - ports: + - port: 9104 + {{- end }} +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/primary/configmap.yaml b/pkg/scanners/helm/test/mysql/templates/primary/configmap.yaml new file mode 100644 index 000000000000..540b7b9072e9 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/primary/configmap.yaml @@ -0,0 +1,18 @@ +{{- if (include "mysql.primary.createConfigmap" .) }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "mysql.primary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +data: + my.cnf: |- + {{ .Values.primary.configuration | nindent 4 }} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml b/pkg/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml new file mode 100644 index 000000000000..83cbaea74883 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml @@ -0,0 +1,14 @@ +{{- if and .Values.initdbScripts (not .Values.initdbScriptsConfigMap) }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ printf "%s-init-scripts" (include "mysql.primary.fullname" .) }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +data: +{{- include "common.tplvalues.render" (dict "value" .Values.initdbScripts "context" .) | nindent 2 }} +{{ end }} diff --git a/pkg/scanners/helm/test/mysql/templates/primary/pdb.yaml b/pkg/scanners/helm/test/mysql/templates/primary/pdb.yaml new file mode 100644 index 000000000000..106ad5207e5a --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/primary/pdb.yaml @@ -0,0 +1,25 @@ +{{- if .Values.primary.pdb.enabled }} +apiVersion: {{ include "common.capabilities.policy.apiVersion" . }} +kind: PodDisruptionBudget +metadata: + name: {{ include "mysql.primary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + {{- if .Values.primary.pdb.minAvailable }} + minAvailable: {{ .Values.primary.pdb.minAvailable }} + {{- end }} + {{- if .Values.primary.pdb.maxUnavailable }} + maxUnavailable: {{ .Values.primary.pdb.maxUnavailable }} + {{- end }} + selector: + matchLabels: {{ include "common.labels.matchLabels" . | nindent 6 }} + app.kubernetes.io/component: primary +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/primary/statefulset.yaml b/pkg/scanners/helm/test/mysql/templates/primary/statefulset.yaml new file mode 100644 index 000000000000..6f9c99ea66d9 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/primary/statefulset.yaml @@ -0,0 +1,368 @@ +apiVersion: {{ include "common.capabilities.statefulset.apiVersion" . }} +kind: StatefulSet +metadata: + name: {{ include "mysql.primary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.primary.podLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.primary.podLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + replicas: 1 + selector: + matchLabels: {{ include "common.labels.matchLabels" . | nindent 6 }} + app.kubernetes.io/component: primary + serviceName: {{ include "mysql.primary.fullname" . }} + updateStrategy: + type: {{ .Values.primary.updateStrategy }} + {{- if (eq "Recreate" .Values.primary.updateStrategy) }} + rollingUpdate: null + {{- else if .Values.primary.rollingUpdatePartition }} + rollingUpdate: + partition: {{ .Values.primary.rollingUpdatePartition }} + {{- end }} + template: + metadata: + annotations: + {{- if (include "mysql.primary.createConfigmap" .) }} + checksum/configuration: {{ include (print $.Template.BasePath "/primary/configmap.yaml") . | sha256sum }} + {{- end }} + {{- if .Values.primary.podAnnotations }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.podAnnotations "context" $) | nindent 8 }} + {{- end }} + labels: {{- include "common.labels.standard" . | nindent 8 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 8 }} + {{- end }} + {{- if .Values.primary.podLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.primary.podLabels "context" $ ) | nindent 8 }} + {{- end }} + spec: + {{- include "mysql.imagePullSecrets" . | nindent 6 }} + {{- if .Values.primary.hostAliases }} + hostAliases: {{- include "common.tplvalues.render" (dict "value" .Values.primary.hostAliases "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.schedulerName }} + schedulerName: {{ .Values.schedulerName | quote }} + {{- end }} + serviceAccountName: {{ template "mysql.serviceAccountName" . }} + {{- if .Values.primary.affinity }} + affinity: {{- include "common.tplvalues.render" (dict "value" .Values.primary.affinity "context" $) | nindent 8 }} + {{- else }} + affinity: + podAffinity: {{- include "common.affinities.pods" (dict "type" .Values.primary.podAffinityPreset "component" "primary" "context" $) | nindent 10 }} + podAntiAffinity: {{- include "common.affinities.pods" (dict "type" .Values.primary.podAntiAffinityPreset "component" "primary" "context" $) | nindent 10 }} + nodeAffinity: {{- include "common.affinities.nodes" (dict "type" .Values.primary.nodeAffinityPreset.type "key" .Values.primary.nodeAffinityPreset.key "values" .Values.primary.nodeAffinityPreset.values) | nindent 10 }} + {{- end }} + {{- if .Values.primary.nodeSelector }} + nodeSelector: {{- include "common.tplvalues.render" (dict "value" .Values.primary.nodeSelector "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.primary.tolerations }} + tolerations: {{- include "common.tplvalues.render" (dict "value" .Values.primary.tolerations "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.priorityClassName }} + priorityClassName: {{ .Values.priorityClassName | quote }} + {{- end }} + {{- if .Values.primary.podSecurityContext.enabled }} + securityContext: {{- omit .Values.primary.podSecurityContext "enabled" | toYaml | nindent 8 }} + {{- end }} + {{- if or .Values.primary.initContainers (and .Values.primary.podSecurityContext.enabled .Values.volumePermissions.enabled .Values.primary.persistence.enabled) }} + initContainers: + {{- if .Values.primary.initContainers }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.initContainers "context" $) | nindent 8 }} + {{- end }} + {{- if and .Values.primary.podSecurityContext.enabled .Values.volumePermissions.enabled .Values.primary.persistence.enabled }} + - name: volume-permissions + image: {{ include "mysql.volumePermissions.image" . }} + imagePullPolicy: {{ .Values.volumePermissions.image.pullPolicy | quote }} + command: + - /bin/bash + - -ec + - | + chown -R {{ .Values.primary.containerSecurityContext.runAsUser }}:{{ .Values.primary.podSecurityContext.fsGroup }} /bitnami/mysql + securityContext: + runAsUser: 0 + {{- if .Values.volumePermissions.resources }} + resources: {{- toYaml .Values.volumePermissions.resources | nindent 12 }} + {{- end }} + volumeMounts: + - name: data + mountPath: /bitnami/mysql + {{- end }} + {{- end }} + containers: + - name: mysql + image: {{ include "mysql.image" . }} + imagePullPolicy: {{ .Values.image.pullPolicy | quote }} + {{- if .Values.primary.containerSecurityContext.enabled }} + securityContext: {{- omit .Values.primary.containerSecurityContext "enabled" | toYaml | nindent 12 }} + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.command "context" $) | nindent 12 }} + {{- else if .Values.primary.command }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.primary.command "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.args "context" $) | nindent 12 }} + {{- else if .Values.primary.args }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.primary.args "context" $) | nindent 12 }} + {{- end }} + env: + - name: BITNAMI_DEBUG + value: {{ ternary "true" "false" (or .Values.image.debug .Values.diagnosticMode.enabled) | quote }} + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_ROOT_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysql/secrets/mysql-root-password" .Values.auth.customPasswordFiles.root }} + {{- else }} + - name: MYSQL_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-root-password + {{- end }} + {{- if not (empty .Values.auth.username) }} + - name: MYSQL_USER + value: {{ .Values.auth.username | quote }} + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysql/secrets/mysql-password" .Values.auth.customPasswordFiles.user }} + {{- else }} + - name: MYSQL_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-password + {{- end }} + {{- end }} + - name: MYSQL_DATABASE + value: {{ .Values.auth.database | quote }} + {{- if eq .Values.architecture "replication" }} + - name: MYSQL_REPLICATION_MODE + value: "master" + - name: MYSQL_REPLICATION_USER + value: {{ .Values.auth.replicationUser | quote }} + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_REPLICATION_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysql/secrets/mysql-replication-password" .Values.auth.customPasswordFiles.replicator }} + {{- else }} + - name: MYSQL_REPLICATION_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-replication-password + {{- end }} + {{- end }} + {{- if .Values.primary.extraFlags }} + - name: MYSQL_EXTRA_FLAGS + value: "{{ .Values.primary.extraFlags }}" + {{- end }} + {{- if .Values.primary.extraEnvVars }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.extraEnvVars "context" $) | nindent 12 }} + {{- end }} + {{- if or .Values.primary.extraEnvVarsCM .Values.primary.extraEnvVarsSecret }} + envFrom: + {{- if .Values.primary.extraEnvVarsCM }} + - configMapRef: + name: {{ .Values.primary.extraEnvVarsCM }} + {{- end }} + {{- if .Values.primary.extraEnvVarsSecret }} + - secretRef: + name: {{ .Values.primary.extraEnvVarsSecret }} + {{- end }} + {{- end }} + ports: + - name: mysql + containerPort: 3306 + {{- if not .Values.diagnosticMode.enabled }} + {{- if .Values.primary.livenessProbe.enabled }} + livenessProbe: {{- omit .Values.primary.livenessProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.primary.customLivenessProbe }} + livenessProbe: {{- include "common.tplvalues.render" (dict "value" .Values.primary.customLivenessProbe "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.primary.readinessProbe.enabled }} + readinessProbe: {{- omit .Values.primary.readinessProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.primary.customReadinessProbe }} + readinessProbe: {{- include "common.tplvalues.render" (dict "value" .Values.primary.customReadinessProbe "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.primary.startupProbe.enabled }} + startupProbe: {{- omit .Values.primary.startupProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.primary.customStartupProbe }} + startupProbe: {{- include "common.tplvalues.render" (dict "value" .Values.primary.customStartupProbe "context" $) | nindent 12 }} + {{- end }} + {{- end }} + {{- if .Values.primary.resources }} + resources: {{ toYaml .Values.primary.resources | nindent 12 }} + {{- end }} + volumeMounts: + - name: data + mountPath: /bitnami/mysql + {{- if or .Values.initdbScriptsConfigMap .Values.initdbScripts }} + - name: custom-init-scripts + mountPath: /docker-entrypoint-initdb.d + {{- end }} + {{- if or .Values.primary.configuration .Values.primary.existingConfigmap }} + - name: config + mountPath: /opt/bitnami/mysql/conf/my.cnf + subPath: my.cnf + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + - name: mysql-credentials + mountPath: /opt/bitnami/mysql/secrets/ + {{- end }} + {{- if .Values.primary.extraVolumeMounts }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.extraVolumeMounts "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.metrics.enabled }} + - name: metrics + image: {{ include "mysql.metrics.image" . }} + imagePullPolicy: {{ .Values.metrics.image.pullPolicy | quote }} + env: + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_ROOT_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysqld-exporter/secrets/mysql-root-password" .Values.auth.customPasswordFiles.root }} + {{- else }} + - name: MYSQL_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "mysql.secretName" . }} + key: mysql-root-password + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.command "context" $) | nindent 12 }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.args "context" $) | nindent 12 }} + {{- else }} + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + DATA_SOURCE_NAME="root:${password_aux}@(localhost:3306)/" /bin/mysqld_exporter {{- range .Values.metrics.extraArgs.primary }} {{ . }} {{- end }} + {{- end }} + ports: + - name: metrics + containerPort: 9104 + {{- if not .Values.diagnosticMode.enabled }} + {{- if .Values.metrics.livenessProbe.enabled }} + livenessProbe: {{- omit .Values.metrics.livenessProbe "enabled" | toYaml | nindent 12 }} + httpGet: + path: /metrics + port: metrics + {{- end }} + {{- if .Values.metrics.readinessProbe.enabled }} + readinessProbe: {{- omit .Values.metrics.readinessProbe "enabled" | toYaml | nindent 12 }} + httpGet: + path: /metrics + port: metrics + {{- end }} + {{- end }} + {{- if .Values.metrics.resources }} + resources: {{- toYaml .Values.metrics.resources | nindent 12 }} + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + volumeMounts: + - name: mysql-credentials + mountPath: /opt/bitnami/mysqld-exporter/secrets/ + {{- end }} + {{- end }} + {{- if .Values.primary.sidecars }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.sidecars "context" $) | nindent 8 }} + {{- end }} + volumes: + {{- if or .Values.primary.configuration .Values.primary.existingConfigmap }} + - name: config + configMap: + name: {{ include "mysql.primary.configmapName" . }} + {{- end }} + {{- if or .Values.initdbScriptsConfigMap .Values.initdbScripts }} + - name: custom-init-scripts + configMap: + name: {{ include "mysql.initdbScriptsCM" . }} + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + - name: mysql-credentials + secret: + secretName: {{ include "mysql.secretName" . }} + items: + - key: mysql-root-password + path: mysql-root-password + - key: mysql-password + path: mysql-password + {{- if eq .Values.architecture "replication" }} + - key: mysql-replication-password + path: mysql-replication-password + {{- end }} + {{- end }} + {{- if .Values.primary.extraVolumes }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.extraVolumes "context" $) | nindent 8 }} + {{- end }} + {{- if and .Values.primary.persistence.enabled .Values.primary.persistence.existingClaim }} + - name: data + persistentVolumeClaim: + claimName: {{ tpl .Values.primary.persistence.existingClaim . }} + {{- else if not .Values.primary.persistence.enabled }} + - name: data + emptyDir: {} + {{- else if and .Values.primary.persistence.enabled (not .Values.primary.persistence.existingClaim) }} + volumeClaimTemplates: + - metadata: + name: data + labels: {{ include "common.labels.matchLabels" . | nindent 10 }} + app.kubernetes.io/component: primary + {{- if .Values.primary.persistence.annotations }} + annotations: + {{- toYaml .Values.primary.persistence.annotations | nindent 10 }} + {{- end }} + spec: + accessModes: + {{- range .Values.primary.persistence.accessModes }} + - {{ . | quote }} + {{- end }} + resources: + requests: + storage: {{ .Values.primary.persistence.size | quote }} + {{ include "common.storage.class" (dict "persistence" .Values.primary.persistence "global" .Values.global) }} + {{- if .Values.primary.persistence.selector }} + selector: {{- include "common.tplvalues.render" (dict "value" .Values.primary.persistence.selector "context" $) | nindent 10 }} + {{- end -}} + {{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/primary/svc-headless.yaml b/pkg/scanners/helm/test/mysql/templates/primary/svc-headless.yaml new file mode 100644 index 000000000000..49e6e5798783 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/primary/svc-headless.yaml @@ -0,0 +1,24 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "mysql.primary.fullname" . }}-headless + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + annotations: + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + type: ClusterIP + clusterIP: None + publishNotReadyAddresses: true + ports: + - name: mysql + port: {{ .Values.primary.service.port }} + targetPort: mysql + selector: {{ include "common.labels.matchLabels" . | nindent 4 }} + app.kubernetes.io/component: primary diff --git a/pkg/scanners/helm/test/mysql/templates/primary/svc.yaml b/pkg/scanners/helm/test/mysql/templates/primary/svc.yaml new file mode 100644 index 000000000000..b46e6faa8149 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/primary/svc.yaml @@ -0,0 +1,41 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "mysql.primary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + annotations: + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.primary.service.annotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.primary.service.annotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + type: {{ .Values.primary.service.type }} + {{- if and (eq .Values.primary.service.type "ClusterIP") .Values.primary.service.clusterIP }} + clusterIP: {{ .Values.primary.service.clusterIP }} + {{- end }} + {{- if and .Values.primary.service.loadBalancerIP (eq .Values.primary.service.type "LoadBalancer") }} + loadBalancerIP: {{ .Values.primary.service.loadBalancerIP }} + externalTrafficPolicy: {{ .Values.primary.service.externalTrafficPolicy | quote }} + {{- end }} + {{- if and (eq .Values.primary.service.type "LoadBalancer") .Values.primary.service.loadBalancerSourceRanges }} + loadBalancerSourceRanges: {{- toYaml .Values.primary.service.loadBalancerSourceRanges | nindent 4 }} + {{- end }} + ports: + - name: mysql + port: {{ .Values.primary.service.port }} + protocol: TCP + targetPort: mysql + {{- if (and (or (eq .Values.primary.service.type "NodePort") (eq .Values.primary.service.type "LoadBalancer")) .Values.primary.service.nodePort) }} + nodePort: {{ .Values.primary.service.nodePort }} + {{- else if eq .Values.primary.service.type "ClusterIP" }} + nodePort: null + {{- end }} + selector: {{ include "common.labels.matchLabels" . | nindent 4 }} + app.kubernetes.io/component: primary diff --git a/pkg/scanners/helm/test/mysql/templates/role.yaml b/pkg/scanners/helm/test/mysql/templates/role.yaml new file mode 100644 index 000000000000..4cbdd5c9ff20 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/role.yaml @@ -0,0 +1,21 @@ +{{- if and .Values.serviceAccount.create .Values.rbac.create }} +apiVersion: {{ include "common.capabilities.rbac.apiVersion" . }} +kind: Role +metadata: + name: {{ include "common.names.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +rules: + - apiGroups: + - "" + resources: + - endpoints + verbs: + - get +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/rolebinding.yaml b/pkg/scanners/helm/test/mysql/templates/rolebinding.yaml new file mode 100644 index 000000000000..90ede32f5fc7 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/rolebinding.yaml @@ -0,0 +1,21 @@ +{{- if and .Values.serviceAccount.create .Values.rbac.create }} +kind: RoleBinding +apiVersion: {{ include "common.capabilities.rbac.apiVersion" . }} +metadata: + name: {{ include "common.names.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +subjects: + - kind: ServiceAccount + name: {{ include "mysql.serviceAccountName" . }} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: {{ include "common.names.fullname" . -}} +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/secondary/configmap.yaml b/pkg/scanners/helm/test/mysql/templates/secondary/configmap.yaml new file mode 100644 index 000000000000..682e3e19ba96 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/secondary/configmap.yaml @@ -0,0 +1,18 @@ +{{- if (include "mysql.secondary.createConfigmap" .) }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "mysql.secondary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +data: + my.cnf: |- + {{ .Values.secondary.configuration | nindent 4 }} +{{- end -}} diff --git a/pkg/scanners/helm/test/mysql/templates/secondary/pdb.yaml b/pkg/scanners/helm/test/mysql/templates/secondary/pdb.yaml new file mode 100644 index 000000000000..49c7e167c0a2 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/secondary/pdb.yaml @@ -0,0 +1,25 @@ +{{- if and (eq .Values.architecture "replication") .Values.secondary.pdb.enabled }} +apiVersion: {{ include "common.capabilities.policy.apiVersion" . }} +kind: PodDisruptionBudget +metadata: + name: {{ include "mysql.secondary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + {{- if .Values.secondary.pdb.minAvailable }} + minAvailable: {{ .Values.secondary.pdb.minAvailable }} + {{- end }} + {{- if .Values.secondary.pdb.maxUnavailable }} + maxUnavailable: {{ .Values.secondary.pdb.maxUnavailable }} + {{- end }} + selector: + matchLabels: {{ include "common.labels.matchLabels" . | nindent 6 }} + app.kubernetes.io/component: secondary +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/secondary/statefulset.yaml b/pkg/scanners/helm/test/mysql/templates/secondary/statefulset.yaml new file mode 100644 index 000000000000..ef196ebf6df0 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/secondary/statefulset.yaml @@ -0,0 +1,338 @@ +{{- if eq .Values.architecture "replication" }} +apiVersion: {{ include "common.capabilities.statefulset.apiVersion" . }} +kind: StatefulSet +metadata: + name: {{ include "mysql.secondary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.secondary.podLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.secondary.podLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + replicas: {{ .Values.secondary.replicaCount }} + selector: + matchLabels: {{ include "common.labels.matchLabels" . | nindent 6 }} + app.kubernetes.io/component: secondary + serviceName: {{ include "mysql.secondary.fullname" . }} + updateStrategy: + type: {{ .Values.secondary.updateStrategy }} + {{- if (eq "Recreate" .Values.secondary.updateStrategy) }} + rollingUpdate: null + {{- else if .Values.secondary.rollingUpdatePartition }} + rollingUpdate: + partition: {{ .Values.secondary.rollingUpdatePartition }} + {{- end }} + template: + metadata: + annotations: + {{- if (include "mysql.secondary.createConfigmap" .) }} + checksum/configuration: {{ include (print $.Template.BasePath "/secondary/configmap.yaml") . | sha256sum }} + {{- end }} + {{- if .Values.secondary.podAnnotations }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.podAnnotations "context" $) | nindent 8 }} + {{- end }} + labels: {{- include "common.labels.standard" . | nindent 8 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 8 }} + {{- end }} + {{- if .Values.secondary.podLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.secondary.podLabels "context" $ ) | nindent 8 }} + {{- end }} + spec: + {{- include "mysql.imagePullSecrets" . | nindent 6 }} + {{- if .Values.secondary.hostAliases }} + hostAliases: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.hostAliases "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.schedulerName }} + schedulerName: {{ .Values.schedulerName | quote }} + {{- end }} + serviceAccountName: {{ include "mysql.serviceAccountName" . }} + {{- if .Values.secondary.affinity }} + affinity: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.affinity "context" $) | nindent 8 }} + {{- else }} + affinity: + podAffinity: {{- include "common.affinities.pods" (dict "type" .Values.secondary.podAffinityPreset "component" "secondary" "context" $) | nindent 10 }} + podAntiAffinity: {{- include "common.affinities.pods" (dict "type" .Values.secondary.podAntiAffinityPreset "component" "secondary" "context" $) | nindent 10 }} + nodeAffinity: {{- include "common.affinities.nodes" (dict "type" .Values.secondary.nodeAffinityPreset.type "key" .Values.secondary.nodeAffinityPreset.key "values" .Values.secondary.nodeAffinityPreset.values) | nindent 10 }} + {{- end }} + {{- if .Values.secondary.nodeSelector }} + nodeSelector: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.nodeSelector "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.secondary.tolerations }} + tolerations: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.tolerations "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.priorityClassName }} + priorityClassName: {{ .Values.priorityClassName | quote }} + {{- end }} + {{- if .Values.secondary.podSecurityContext.enabled }} + securityContext: {{- omit .Values.secondary.podSecurityContext "enabled" | toYaml | nindent 8 }} + {{- end }} + {{- if or .Values.secondary.initContainers (and .Values.secondary.podSecurityContext.enabled .Values.volumePermissions.enabled .Values.secondary.persistence.enabled) }} + initContainers: + {{- if .Values.secondary.initContainers }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.initContainers "context" $) | nindent 8 }} + {{- end }} + {{- if and .Values.secondary.podSecurityContext.enabled .Values.volumePermissions.enabled .Values.secondary.persistence.enabled }} + - name: volume-permissions + image: {{ include "mysql.volumePermissions.image" . }} + imagePullPolicy: {{ .Values.volumePermissions.image.pullPolicy | quote }} + command: + - /bin/bash + - -ec + - | + chown -R {{ .Values.secondary.containerSecurityContext.runAsUser }}:{{ .Values.secondary.podSecurityContext.fsGroup }} /bitnami/mysql + securityContext: + runAsUser: 0 + {{- if .Values.volumePermissions.resources }} + resources: {{- toYaml .Values.volumePermissions.resources | nindent 12 }} + {{- end }} + volumeMounts: + - name: data + mountPath: /bitnami/mysql + {{- end }} + {{- end }} + containers: + - name: mysql + image: {{ include "mysql.image" . }} + imagePullPolicy: {{ .Values.image.pullPolicy | quote }} + {{- if .Values.secondary.containerSecurityContext.enabled }} + securityContext: {{- omit .Values.secondary.containerSecurityContext "enabled" | toYaml | nindent 12 }} + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.command "context" $) | nindent 12 }} + {{- else if .Values.secondary.command }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.command "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.args "context" $) | nindent 12 }} + {{- else if .Values.secondary.args }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.args "context" $) | nindent 12 }} + {{- end }} + env: + - name: BITNAMI_DEBUG + value: {{ ternary "true" "false" (or .Values.image.debug .Values.diagnosticMode.enabled) | quote }} + - name: MYSQL_REPLICATION_MODE + value: "slave" + - name: MYSQL_MASTER_HOST + value: {{ include "mysql.primary.fullname" . }} + - name: MYSQL_MASTER_PORT_NUMBER + value: {{ .Values.primary.service.port | quote }} + - name: MYSQL_MASTER_ROOT_USER + value: "root" + - name: MYSQL_REPLICATION_USER + value: {{ .Values.auth.replicationUser | quote }} + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_MASTER_ROOT_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysql/secrets/mysql-root-password" .Values.auth.customPasswordFiles.root }} + - name: MYSQL_REPLICATION_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysql/secrets/mysql-replication-password" .Values.auth.customPasswordFiles.replicator }} + {{- else }} + - name: MYSQL_MASTER_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-root-password + - name: MYSQL_REPLICATION_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-replication-password + {{- end }} + {{- if .Values.secondary.extraFlags }} + - name: MYSQL_EXTRA_FLAGS + value: "{{ .Values.secondary.extraFlags }}" + {{- end }} + {{- if .Values.secondary.extraEnvVars }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.extraEnvVars "context" $) | nindent 12 }} + {{- end }} + {{- if or .Values.secondary.extraEnvVarsCM .Values.secondary.extraEnvVarsSecret }} + envFrom: + {{- if .Values.secondary.extraEnvVarsCM }} + - configMapRef: + name: {{ .Values.secondary.extraEnvVarsCM }} + {{- end }} + {{- if .Values.secondary.extraEnvVarsSecret }} + - secretRef: + name: {{ .Values.secondary.extraEnvVarsSecret }} + {{- end }} + {{- end }} + ports: + - name: mysql + containerPort: 3306 + {{- if not .Values.diagnosticMode.enabled }} + {{- if .Values.secondary.livenessProbe.enabled }} + livenessProbe: {{- omit .Values.secondary.livenessProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_MASTER_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_MASTER_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_MASTER_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.secondary.customLivenessProbe }} + livenessProbe: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.customLivenessProbe "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.secondary.readinessProbe.enabled }} + readinessProbe: {{- omit .Values.secondary.readinessProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_MASTER_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_MASTER_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_MASTER_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.secondary.customReadinessProbe }} + readinessProbe: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.customReadinessProbe "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.secondary.startupProbe.enabled }} + startupProbe: {{- omit .Values.secondary.startupProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_MASTER_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_MASTER_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_MASTER_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.secondary.customStartupProbe }} + startupProbe: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.customStartupProbe "context" $) | nindent 12 }} + {{- end }} + {{- end }} + {{- if .Values.secondary.resources }} + resources: {{ toYaml .Values.secondary.resources | nindent 12 }} + {{- end }} + volumeMounts: + - name: data + mountPath: /bitnami/mysql + {{- if or .Values.secondary.configuration .Values.secondary.existingConfigmap }} + - name: config + mountPath: /opt/bitnami/mysql/conf/my.cnf + subPath: my.cnf + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + - name: mysql-credentials + mountPath: /opt/bitnami/mysql/secrets/ + {{- end }} + {{- if .Values.secondary.extraVolumeMounts }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.extraVolumeMounts "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.metrics.enabled }} + - name: metrics + image: {{ include "mysql.metrics.image" . }} + imagePullPolicy: {{ .Values.metrics.image.pullPolicy | quote }} + env: + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_ROOT_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysqld-exporter/secrets/mysql-root-password" .Values.auth.customPasswordFiles.root }} + {{- else }} + - name: MYSQL_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-root-password + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.command "context" $) | nindent 12 }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.args "context" $) | nindent 12 }} + {{- else }} + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + DATA_SOURCE_NAME="root:${password_aux}@(localhost:3306)/" /bin/mysqld_exporter {{- range .Values.metrics.extraArgs.secondary }} {{ . }} {{- end }} + {{- end }} + ports: + - name: metrics + containerPort: 9104 + {{- if not .Values.diagnosticMode.enabled }} + {{- if .Values.metrics.livenessProbe.enabled }} + livenessProbe: {{- omit .Values.metrics.livenessProbe "enabled" | toYaml | nindent 12 }} + httpGet: + path: /metrics + port: metrics + {{- end }} + {{- if .Values.metrics.readinessProbe.enabled }} + readinessProbe: {{- omit .Values.metrics.readinessProbe "enabled" | toYaml | nindent 12 }} + httpGet: + path: /metrics + port: metrics + {{- end }} + {{- end }} + {{- if .Values.metrics.resources }} + resources: {{- toYaml .Values.metrics.resources | nindent 12 }} + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + volumeMounts: + - name: mysql-credentials + mountPath: /opt/bitnami/mysqld-exporter/secrets/ + {{- end }} + {{- end }} + {{- if .Values.secondary.sidecars }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.sidecars "context" $) | nindent 8 }} + {{- end }} + volumes: + {{- if or .Values.secondary.configuration .Values.secondary.existingConfigmap }} + - name: config + configMap: + name: {{ include "mysql.secondary.configmapName" . }} + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + - name: mysql-credentials + secret: + secretName: {{ template "mysql.secretName" . }} + items: + - key: mysql-root-password + path: mysql-root-password + - key: mysql-replication-password + path: mysql-replication-password + {{- end }} + {{- if .Values.secondary.extraVolumes }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.extraVolumes "context" $) | nindent 8 }} + {{- end }} + {{- if not .Values.secondary.persistence.enabled }} + - name: data + emptyDir: {} + {{- else }} + volumeClaimTemplates: + - metadata: + name: data + labels: {{ include "common.labels.matchLabels" . | nindent 10 }} + app.kubernetes.io/component: secondary + {{- if .Values.secondary.persistence.annotations }} + annotations: + {{- toYaml .Values.secondary.persistence.annotations | nindent 10 }} + {{- end }} + spec: + accessModes: + {{- range .Values.secondary.persistence.accessModes }} + - {{ . | quote }} + {{- end }} + resources: + requests: + storage: {{ .Values.secondary.persistence.size | quote }} + {{ include "common.storage.class" (dict "persistence" .Values.secondary.persistence "global" .Values.global) }} + {{- if .Values.secondary.persistence.selector }} + selector: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.persistence.selector "context" $) | nindent 10 }} + {{- end -}} + {{- end }} +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml b/pkg/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml new file mode 100644 index 000000000000..703d8e747b75 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml @@ -0,0 +1,26 @@ +{{- if eq .Values.architecture "replication" }} +apiVersion: v1 +kind: Service +metadata: + name: {{ include "mysql.secondary.fullname" . }}-headless + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + annotations: + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + type: ClusterIP + clusterIP: None + publishNotReadyAddresses: true + ports: + - name: mysql + port: {{ .Values.secondary.service.port }} + targetPort: mysql + selector: {{ include "common.labels.matchLabels" . | nindent 4 }} + app.kubernetes.io/component: secondary +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/secondary/svc.yaml b/pkg/scanners/helm/test/mysql/templates/secondary/svc.yaml new file mode 100644 index 000000000000..74a4c6ef5fb8 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/secondary/svc.yaml @@ -0,0 +1,43 @@ +{{- if eq .Values.architecture "replication" }} +apiVersion: v1 +kind: Service +metadata: + name: {{ include "mysql.secondary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + annotations: + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.secondary.service.annotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.secondary.service.annotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + type: {{ .Values.secondary.service.type }} + {{- if and (eq .Values.secondary.service.type "ClusterIP") .Values.secondary.service.clusterIP }} + clusterIP: {{ .Values.secondary.service.clusterIP }} + {{- end }} + {{- if and .Values.secondary.service.loadBalancerIP (eq .Values.secondary.service.type "LoadBalancer") }} + loadBalancerIP: {{ .Values.secondary.service.loadBalancerIP }} + externalTrafficPolicy: {{ .Values.secondary.service.externalTrafficPolicy | quote }} + {{- end }} + {{- if and (eq .Values.secondary.service.type "LoadBalancer") .Values.secondary.service.loadBalancerSourceRanges }} + loadBalancerSourceRanges: {{- toYaml .Values.secondary.service.loadBalancerSourceRanges | nindent 4 }} + {{- end }} + ports: + - name: mysql + port: {{ .Values.secondary.service.port }} + protocol: TCP + targetPort: mysql + {{- if (and (or (eq .Values.secondary.service.type "NodePort") (eq .Values.secondary.service.type "LoadBalancer")) .Values.secondary.service.nodePort) }} + nodePort: {{ .Values.secondary.service.nodePort }} + {{- else if eq .Values.secondary.service.type "ClusterIP" }} + nodePort: null + {{- end }} + selector: {{ include "common.labels.matchLabels" . | nindent 4 }} + app.kubernetes.io/component: secondary +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/secrets.yaml b/pkg/scanners/helm/test/mysql/templates/secrets.yaml new file mode 100644 index 000000000000..9412fc35a5bc --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/secrets.yaml @@ -0,0 +1,21 @@ +{{- if eq (include "mysql.createSecret" .) "true" }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "common.names.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +type: Opaque +data: + mysql-root-password: {{ include "mysql.root.password" . | b64enc | quote }} + mysql-password: {{ include "mysql.password" . | b64enc | quote }} + {{- if eq .Values.architecture "replication" }} + mysql-replication-password: {{ include "mysql.replication.password" . | b64enc | quote }} + {{- end }} +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/serviceaccount.yaml b/pkg/scanners/helm/test/mysql/templates/serviceaccount.yaml new file mode 100644 index 000000000000..59eb10409d91 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/serviceaccount.yaml @@ -0,0 +1,22 @@ +{{- if .Values.serviceAccount.create }} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "mysql.serviceAccountName" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + annotations: + {{- if .Values.serviceAccount.annotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.serviceAccount.annotations "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +{{- if (not .Values.auth.customPasswordFiles) }} +secrets: + - name: {{ template "mysql.secretName" . }} +{{- end }} +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/templates/servicemonitor.yaml b/pkg/scanners/helm/test/mysql/templates/servicemonitor.yaml new file mode 100644 index 000000000000..f082dd5409d6 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/templates/servicemonitor.yaml @@ -0,0 +1,42 @@ +{{- if and .Values.metrics.enabled .Values.metrics.serviceMonitor.enabled }} +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + name: {{ include "common.names.fullname" . }} + {{- if .Values.metrics.serviceMonitor.namespace }} + namespace: {{ .Values.metrics.serviceMonitor.namespace }} + {{- else }} + namespace: {{ .Release.Namespace }} + {{- end }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.metrics.serviceMonitor.additionalLabels }} + {{- include "common.tplvalues.render" (dict "value" .Values.metrics.serviceMonitor.additionalLabels "context" $) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + endpoints: + - port: metrics + {{- if .Values.metrics.serviceMonitor.interval }} + interval: {{ .Values.metrics.serviceMonitor.interval }} + {{- end }} + {{- if .Values.metrics.serviceMonitor.scrapeTimeout }} + scrapeTimeout: {{ .Values.metrics.serviceMonitor.scrapeTimeout }} + {{- end }} + {{- if .Values.metrics.serviceMonitor.honorLabels }} + honorLabels: {{ .Values.metrics.serviceMonitor.honorLabels }} + {{- end }} + {{- if .Values.metrics.serviceMonitor.relabellings }} + metricRelabelings: {{- toYaml .Values.metrics.serviceMonitor.relabellings | nindent 6 }} + {{- end }} + namespaceSelector: + matchNames: + - {{ .Release.Namespace }} + selector: + matchLabels: {{- include "common.labels.matchLabels" . | nindent 6 }} + app.kubernetes.io/component: metrics +{{- end }} diff --git a/pkg/scanners/helm/test/mysql/values.schema.json b/pkg/scanners/helm/test/mysql/values.schema.json new file mode 100644 index 000000000000..8021a4603600 --- /dev/null +++ b/pkg/scanners/helm/test/mysql/values.schema.json @@ -0,0 +1,178 @@ +{ + "$schema": "http://json-schema.org/schema#", + "type": "object", + "properties": { + "architecture": { + "type": "string", + "title": "MySQL architecture", + "form": true, + "description": "Allowed values: `standalone` or `replication`", + "enum": ["standalone", "replication"] + }, + "auth": { + "type": "object", + "title": "Authentication configuration", + "form": true, + "required": ["database", "username", "password"], + "properties": { + "rootPassword": { + "type": "string", + "title": "MySQL root password", + "description": "Defaults to a random 10-character alphanumeric string if not set" + }, + "database": { + "type": "string", + "title": "MySQL custom database name" + }, + "username": { + "type": "string", + "title": "MySQL custom username" + }, + "password": { + "type": "string", + "title": "MySQL custom password" + }, + "replicationUser": { + "type": "string", + "title": "MySQL replication username" + }, + "replicationPassword": { + "type": "string", + "title": "MySQL replication password" + } + } + }, + "primary": { + "type": "object", + "title": "Primary database configuration", + "form": true, + "properties": { + "podSecurityContext": { + "type": "object", + "title": "MySQL primary Pod security context", + "properties": { + "enabled": { + "type": "boolean", + "default": false + }, + "fsGroup": { + "type": "integer", + "default": 1001, + "hidden": { + "value": false, + "path": "primary/podSecurityContext/enabled" + } + } + } + }, + "containerSecurityContext": { + "type": "object", + "title": "MySQL primary container security context", + "properties": { + "enabled": { + "type": "boolean", + "default": false + }, + "runAsUser": { + "type": "integer", + "default": 1001, + "hidden": { + "value": false, + "path": "primary/containerSecurityContext/enabled" + } + } + } + }, + "persistence": { + "type": "object", + "title": "Enable persistence using Persistent Volume Claims", + "properties": { + "enabled": { + "type": "boolean", + "default": true, + "title": "If true, use a Persistent Volume Claim, If false, use emptyDir" + }, + "size": { + "type": "string", + "title": "Persistent Volume Size", + "form": true, + "render": "slider", + "sliderMin": 1, + "sliderUnit": "Gi", + "hidden": { + "value": false, + "path": "primary/persistence/enabled" + } + } + } + } + } + }, + "secondary": { + "type": "object", + "title": "Secondary database configuration", + "form": true, + "properties": { + "podSecurityContext": { + "type": "object", + "title": "MySQL secondary Pod security context", + "properties": { + "enabled": { + "type": "boolean", + "default": false + }, + "fsGroup": { + "type": "integer", + "default": 1001, + "hidden": { + "value": false, + "path": "secondary/podSecurityContext/enabled" + } + } + } + }, + "containerSecurityContext": { + "type": "object", + "title": "MySQL secondary container security context", + "properties": { + "enabled": { + "type": "boolean", + "default": false + }, + "runAsUser": { + "type": "integer", + "default": 1001, + "hidden": { + "value": false, + "path": "secondary/containerSecurityContext/enabled" + } + } + } + }, + "persistence": { + "type": "object", + "title": "Enable persistence using Persistent Volume Claims", + "properties": { + "enabled": { + "type": "boolean", + "default": true, + "title": "If true, use a Persistent Volume Claim, If false, use emptyDir" + }, + "size": { + "type": "string", + "title": "Persistent Volume Size", + "form": true, + "render": "slider", + "sliderMin": 1, + "sliderUnit": "Gi", + "hidden": { + "value": false, + "path": "secondary/persistence/enabled" + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/pkg/scanners/helm/test/mysql/values.yaml b/pkg/scanners/helm/test/mysql/values.yaml new file mode 100644 index 000000000000..3900e865955c --- /dev/null +++ b/pkg/scanners/helm/test/mysql/values.yaml @@ -0,0 +1,1020 @@ +## @section Global parameters +## Global Docker image parameters +## Please, note that this will override the image parameters, including dependencies, configured to use the global value +## Current available global Docker image parameters: imageRegistry, imagePullSecrets and storageClass + +## @param global.imageRegistry Global Docker image registry +## @param global.imagePullSecrets [array] Global Docker registry secret names as an array +## @param global.storageClass Global StorageClass for Persistent Volume(s) +## +global: + imageRegistry: "" + ## E.g. + ## imagePullSecrets: + ## - myRegistryKeySecretName + ## + imagePullSecrets: [] + storageClass: "" + +## @section Common parameters + +## @param nameOverride String to partially override common.names.fullname template (will maintain the release name) +## +nameOverride: "" +## @param fullnameOverride String to fully override common.names.fullname template +## +fullnameOverride: "" +## @param clusterDomain Cluster domain +## +clusterDomain: cluster.local +## @param commonAnnotations [object] Common annotations to add to all MySQL resources (sub-charts are not considered). Evaluated as a template +## +commonAnnotations: {} +## @param commonLabels [object] Common labels to add to all MySQL resources (sub-charts are not considered). Evaluated as a template +## +commonLabels: {} +## @param extraDeploy [array] Array with extra yaml to deploy with the chart. Evaluated as a template +## +extraDeploy: [] +## @param schedulerName Use an alternate scheduler, e.g. "stork". +## ref: https://kubernetes.io/docs/tasks/administer-cluster/configure-multiple-schedulers/ +## +schedulerName: "" + +## Enable diagnostic mode in the deployment +## +diagnosticMode: + ## @param diagnosticMode.enabled Enable diagnostic mode (all probes will be disabled and the command will be overridden) + ## + enabled: false + ## @param diagnosticMode.command Command to override all containers in the deployment + ## + command: + - sleep + ## @param diagnosticMode.args Args to override all containers in the deployment + ## + args: + - infinity + +## @section MySQL common parameters + +## Bitnami MySQL image +## ref: https://hub.docker.com/r/bitnami/mysql/tags/ +## @param image.registry MySQL image registry +## @param image.repository MySQL image repository +## @param image.tag MySQL image tag (immutable tags are recommended) +## @param image.pullPolicy MySQL image pull policy +## @param image.pullSecrets [array] Specify docker-registry secret names as an array +## @param image.debug Specify if debug logs should be enabled +## +image: + registry: docker.io + repository: bitnami/mysql + tag: 8.0.28-debian-10-r23 + ## Specify a imagePullPolicy + ## Defaults to 'Always' if image tag is 'latest', else set to 'IfNotPresent' + ## ref: https://kubernetes.io/docs/user-guide/images/#pre-pulling-images + ## + pullPolicy: IfNotPresent + ## Optionally specify an array of imagePullSecrets (secrets must be manually created in the namespace) + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/ + ## Example: + ## pullSecrets: + ## - myRegistryKeySecretName + ## + pullSecrets: [] + ## Set to true if you would like to see extra information on logs + ## It turns BASH and/or NAMI debugging in the image + ## + debug: false +## @param architecture MySQL architecture (`standalone` or `replication`) +## +architecture: standalone +## MySQL Authentication parameters +## +auth: + ## @param auth.rootPassword Password for the `root` user. Ignored if existing secret is provided + ## ref: https://github.com/bitnami/bitnami-docker-mysql#setting-the-root-password-on-first-run + ## + rootPassword: "" + ## @param auth.database Name for a custom database to create + ## ref: https://github.com/bitnami/bitnami-docker-mysql/blob/master/README.md#creating-a-database-on-first-run + ## + database: my_database + ## @param auth.username Name for a custom user to create + ## ref: https://github.com/bitnami/bitnami-docker-mysql/blob/master/README.md#creating-a-database-user-on-first-run + ## + username: "" + ## @param auth.password Password for the new user. Ignored if existing secret is provided + ## + password: "" + ## @param auth.replicationUser MySQL replication user + ## ref: https://github.com/bitnami/bitnami-docker-mysql#setting-up-a-replication-cluster + ## + replicationUser: replicator + ## @param auth.replicationPassword MySQL replication user password. Ignored if existing secret is provided + ## + replicationPassword: "" + ## @param auth.existingSecret Use existing secret for password details. The secret has to contain the keys `mysql-root-password`, `mysql-replication-password` and `mysql-password` + ## NOTE: When it's set the auth.rootPassword, auth.password, auth.replicationPassword are ignored. + ## + existingSecret: "" + ## @param auth.forcePassword Force users to specify required passwords + ## + forcePassword: false + ## @param auth.usePasswordFiles Mount credentials as files instead of using an environment variable + ## + usePasswordFiles: false + ## @param auth.customPasswordFiles [object] Use custom password files when `auth.usePasswordFiles` is set to `true`. Define path for keys `root` and `user`, also define `replicator` if `architecture` is set to `replication` + ## Example: + ## customPasswordFiles: + ## root: /vault/secrets/mysql-root + ## user: /vault/secrets/mysql-user + ## replicator: /vault/secrets/mysql-replicator + ## + customPasswordFiles: {} +## @param initdbScripts [object] Dictionary of initdb scripts +## Specify dictionary of scripts to be run at first boot +## Example: +## initdbScripts: +## my_init_script.sh: | +## #!/bin/bash +## echo "Do something." +## +initdbScripts: {} +## @param initdbScriptsConfigMap ConfigMap with the initdb scripts (Note: Overrides `initdbScripts`) +## +initdbScriptsConfigMap: "" + +## @section MySQL Primary parameters + +primary: + ## @param primary.command [array] Override default container command on MySQL Primary container(s) (useful when using custom images) + ## + command: [] + ## @param primary.args [array] Override default container args on MySQL Primary container(s) (useful when using custom images) + ## + args: [] + ## @param primary.hostAliases [array] Deployment pod host aliases + ## https://kubernetes.io/docs/concepts/services-networking/add-entries-to-pod-etc-hosts-with-host-aliases/ + ## + hostAliases: [] + ## @param primary.configuration [string] Configure MySQL Primary with a custom my.cnf file + ## ref: https://mysql.com/kb/en/mysql/configuring-mysql-with-mycnf/#example-of-configuration-file + ## + configuration: |- + [mysqld] + default_authentication_plugin=mysql_native_password + skip-name-resolve + explicit_defaults_for_timestamp + basedir=/opt/bitnami/mysql + plugin_dir=/opt/bitnami/mysql/lib/plugin + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + datadir=/bitnami/mysql/data + tmpdir=/opt/bitnami/mysql/tmp + max_allowed_packet=16M + bind-address=0.0.0.0 + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid + log-error=/opt/bitnami/mysql/logs/mysqld.log + character-set-server=UTF8 + collation-server=utf8_general_ci + + [client] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + default-character-set=UTF8 + plugin_dir=/opt/bitnami/mysql/lib/plugin + + [manager] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid + ## @param primary.existingConfigmap Name of existing ConfigMap with MySQL Primary configuration. + ## NOTE: When it's set the 'configuration' parameter is ignored + ## + existingConfigmap: "" + ## @param primary.updateStrategy Update strategy type for the MySQL primary statefulset + ## ref: https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/#update-strategies + ## + updateStrategy: RollingUpdate + ## @param primary.rollingUpdatePartition Partition update strategy for MySQL Primary statefulset + ## https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/#partitions + ## + rollingUpdatePartition: "" + ## @param primary.podAnnotations [object] Additional pod annotations for MySQL primary pods + ## ref: https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/ + ## + podAnnotations: {} + ## @param primary.podAffinityPreset MySQL primary pod affinity preset. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#inter-pod-affinity-and-anti-affinity + ## + podAffinityPreset: "" + ## @param primary.podAntiAffinityPreset MySQL primary pod anti-affinity preset. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#inter-pod-affinity-and-anti-affinity + ## + podAntiAffinityPreset: soft + ## MySQL Primary node affinity preset + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity + ## + nodeAffinityPreset: + ## @param primary.nodeAffinityPreset.type MySQL primary node affinity preset type. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` + ## + type: "" + ## @param primary.nodeAffinityPreset.key MySQL primary node label key to match Ignored if `primary.affinity` is set. + ## E.g. + ## key: "kubernetes.io/e2e-az-name" + ## + key: "" + ## @param primary.nodeAffinityPreset.values [array] MySQL primary node label values to match. Ignored if `primary.affinity` is set. + ## E.g. + ## values: + ## - e2e-az1 + ## - e2e-az2 + ## + values: [] + ## @param primary.affinity [object] Affinity for MySQL primary pods assignment + ## ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/#affinity-and-anti-affinity + ## Note: podAffinityPreset, podAntiAffinityPreset, and nodeAffinityPreset will be ignored when it's set + ## + affinity: {} + ## @param primary.nodeSelector [object] Node labels for MySQL primary pods assignment + ## ref: https://kubernetes.io/docs/user-guide/node-selection/ + ## + nodeSelector: {} + ## @param primary.tolerations [array] Tolerations for MySQL primary pods assignment + ## ref: https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ + ## + tolerations: [] + ## MySQL primary Pod security context + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-the-security-context-for-a-pod + ## @param primary.podSecurityContext.enabled Enable security context for MySQL primary pods + ## @param primary.podSecurityContext.fsGroup Group ID for the mounted volumes' filesystem + ## + podSecurityContext: + enabled: true + fsGroup: 1001 + ## MySQL primary container security context + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-the-security-context-for-a-container + ## @param primary.containerSecurityContext.enabled MySQL primary container securityContext + ## @param primary.containerSecurityContext.runAsUser User ID for the MySQL primary container + ## + containerSecurityContext: + enabled: true + runAsUser: 1001 + ## MySQL primary container's resource requests and limits + ## ref: https://kubernetes.io/docs/user-guide/compute-resources/ + ## We usually recommend not to specify default resources and to leave this as a conscious + ## choice for the user. This also increases chances charts run on environments with little + ## resources, such as Minikube. If you do want to specify resources, uncomment the following + ## lines, adjust them as necessary, and remove the curly braces after 'resources:'. + ## @param primary.resources.limits [object] The resources limits for MySQL primary containers + ## @param primary.resources.requests [object] The requested resources for MySQL primary containers + ## + resources: + ## Example: + ## limits: + ## cpu: 250m + ## memory: 256Mi + limits: {} + ## Examples: + ## requests: + ## cpu: 250m + ## memory: 256Mi + requests: {} + ## Configure extra options for liveness probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param primary.livenessProbe.enabled Enable livenessProbe + ## @param primary.livenessProbe.initialDelaySeconds Initial delay seconds for livenessProbe + ## @param primary.livenessProbe.periodSeconds Period seconds for livenessProbe + ## @param primary.livenessProbe.timeoutSeconds Timeout seconds for livenessProbe + ## @param primary.livenessProbe.failureThreshold Failure threshold for livenessProbe + ## @param primary.livenessProbe.successThreshold Success threshold for livenessProbe + ## + livenessProbe: + enabled: true + initialDelaySeconds: 5 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 3 + successThreshold: 1 + ## Configure extra options for readiness probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param primary.readinessProbe.enabled Enable readinessProbe + ## @param primary.readinessProbe.initialDelaySeconds Initial delay seconds for readinessProbe + ## @param primary.readinessProbe.periodSeconds Period seconds for readinessProbe + ## @param primary.readinessProbe.timeoutSeconds Timeout seconds for readinessProbe + ## @param primary.readinessProbe.failureThreshold Failure threshold for readinessProbe + ## @param primary.readinessProbe.successThreshold Success threshold for readinessProbe + ## + readinessProbe: + enabled: true + initialDelaySeconds: 5 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 3 + successThreshold: 1 + ## Configure extra options for startupProbe probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param primary.startupProbe.enabled Enable startupProbe + ## @param primary.startupProbe.initialDelaySeconds Initial delay seconds for startupProbe + ## @param primary.startupProbe.periodSeconds Period seconds for startupProbe + ## @param primary.startupProbe.timeoutSeconds Timeout seconds for startupProbe + ## @param primary.startupProbe.failureThreshold Failure threshold for startupProbe + ## @param primary.startupProbe.successThreshold Success threshold for startupProbe + ## + startupProbe: + enabled: true + initialDelaySeconds: 15 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 10 + successThreshold: 1 + ## @param primary.customLivenessProbe [object] Override default liveness probe for MySQL primary containers + ## + customLivenessProbe: {} + ## @param primary.customReadinessProbe [object] Override default readiness probe for MySQL primary containers + ## + customReadinessProbe: {} + ## @param primary.customStartupProbe [object] Override default startup probe for MySQL primary containers + ## + customStartupProbe: {} + ## @param primary.extraFlags MySQL primary additional command line flags + ## Can be used to specify command line flags, for example: + ## E.g. + ## extraFlags: "--max-connect-errors=1000 --max_connections=155" + ## + extraFlags: "" + ## @param primary.extraEnvVars [array] Extra environment variables to be set on MySQL primary containers + ## E.g. + ## extraEnvVars: + ## - name: TZ + ## value: "Europe/Paris" + ## + extraEnvVars: [] + ## @param primary.extraEnvVarsCM Name of existing ConfigMap containing extra env vars for MySQL primary containers + ## + extraEnvVarsCM: "" + ## @param primary.extraEnvVarsSecret Name of existing Secret containing extra env vars for MySQL primary containers + ## + extraEnvVarsSecret: "" + ## Enable persistence using Persistent Volume Claims + ## ref: https://kubernetes.io/docs/user-guide/persistent-volumes/ + ## + persistence: + ## @param primary.persistence.enabled Enable persistence on MySQL primary replicas using a `PersistentVolumeClaim`. If false, use emptyDir + ## + enabled: true + ## @param primary.persistence.existingClaim Name of an existing `PersistentVolumeClaim` for MySQL primary replicas + ## NOTE: When it's set the rest of persistence parameters are ignored + ## + existingClaim: "" + ## @param primary.persistence.storageClass MySQL primary persistent volume storage Class + ## If defined, storageClassName: + ## If set to "-", storageClassName: "", which disables dynamic provisioning + ## If undefined (the default) or set to null, no storageClassName spec is + ## set, choosing the default provisioner. (gp2 on AWS, standard on + ## GKE, AWS & OpenStack) + ## + storageClass: "" + ## @param primary.persistence.annotations [object] MySQL primary persistent volume claim annotations + ## + annotations: {} + ## @param primary.persistence.accessModes MySQL primary persistent volume access Modes + ## + accessModes: + - ReadWriteOnce + ## @param primary.persistence.size MySQL primary persistent volume size + ## + size: 8Gi + ## @param primary.persistence.selector [object] Selector to match an existing Persistent Volume + ## selector: + ## matchLabels: + ## app: my-app + ## + selector: {} + ## @param primary.extraVolumes [array] Optionally specify extra list of additional volumes to the MySQL Primary pod(s) + ## + extraVolumes: [] + ## @param primary.extraVolumeMounts [array] Optionally specify extra list of additional volumeMounts for the MySQL Primary container(s) + ## + extraVolumeMounts: [] + ## @param primary.initContainers [array] Add additional init containers for the MySQL Primary pod(s) + ## + initContainers: [] + ## @param primary.sidecars [array] Add additional sidecar containers for the MySQL Primary pod(s) + ## + sidecars: [] + ## MySQL Primary Service parameters + ## + service: + ## @param primary.service.type MySQL Primary K8s service type + ## + type: ClusterIP + ## @param primary.service.port MySQL Primary K8s service port + ## + port: 3306 + ## @param primary.service.nodePort MySQL Primary K8s service node port + ## ref: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport + ## + nodePort: "" + ## @param primary.service.clusterIP MySQL Primary K8s service clusterIP IP + ## e.g: + ## clusterIP: None + ## + clusterIP: "" + ## @param primary.service.loadBalancerIP MySQL Primary loadBalancerIP if service type is `LoadBalancer` + ## Set the LoadBalancer service type to internal only + ## ref: https://kubernetes.io/docs/concepts/services-networking/service/#internal-load-balancer + ## + loadBalancerIP: "" + ## @param primary.service.externalTrafficPolicy Enable client source IP preservation + ## ref https://kubernetes.io/docs/tasks/access-application-cluster/create-external-load-balancer/#preserving-the-client-source-ip + ## + externalTrafficPolicy: Cluster + ## @param primary.service.loadBalancerSourceRanges [array] Addresses that are allowed when MySQL Primary service is LoadBalancer + ## https://kubernetes.io/docs/tasks/access-application-cluster/configure-cloud-provider-firewall/#restrict-access-for-loadbalancer-service + ## E.g. + ## loadBalancerSourceRanges: + ## - 10.10.10.0/24 + ## + loadBalancerSourceRanges: [] + ## @param primary.service.annotations [object] Provide any additional annotations which may be required + ## + annotations: {} + ## MySQL primary Pod Disruption Budget configuration + ## ref: https://kubernetes.io/docs/tasks/run-application/configure-pdb/ + ## + pdb: + ## @param primary.pdb.enabled Enable/disable a Pod Disruption Budget creation for MySQL primary pods + ## + enabled: false + ## @param primary.pdb.minAvailable Minimum number/percentage of MySQL primary pods that should remain scheduled + ## + minAvailable: 1 + ## @param primary.pdb.maxUnavailable Maximum number/percentage of MySQL primary pods that may be made unavailable + ## + maxUnavailable: "" + ## @param primary.podLabels [object] MySQL Primary pod label. If labels are same as commonLabels , this will take precedence + ## + podLabels: {} + +## @section MySQL Secondary parameters + +secondary: + ## @param secondary.replicaCount Number of MySQL secondary replicas + ## + replicaCount: 1 + ## @param secondary.hostAliases [array] Deployment pod host aliases + ## https://kubernetes.io/docs/concepts/services-networking/add-entries-to-pod-etc-hosts-with-host-aliases/ + ## + hostAliases: [] + ## @param secondary.command [array] Override default container command on MySQL Secondary container(s) (useful when using custom images) + ## + command: [] + ## @param secondary.args [array] Override default container args on MySQL Secondary container(s) (useful when using custom images) + ## + args: [] + ## @param secondary.configuration [string] Configure MySQL Secondary with a custom my.cnf file + ## ref: https://mysql.com/kb/en/mysql/configuring-mysql-with-mycnf/#example-of-configuration-file + ## + configuration: |- + [mysqld] + default_authentication_plugin=mysql_native_password + skip-name-resolve + explicit_defaults_for_timestamp + basedir=/opt/bitnami/mysql + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + datadir=/bitnami/mysql/data + tmpdir=/opt/bitnami/mysql/tmp + max_allowed_packet=16M + bind-address=0.0.0.0 + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid + log-error=/opt/bitnami/mysql/logs/mysqld.log + character-set-server=UTF8 + collation-server=utf8_general_ci + + [client] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + default-character-set=UTF8 + + [manager] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid + ## @param secondary.existingConfigmap Name of existing ConfigMap with MySQL Secondary configuration. + ## NOTE: When it's set the 'configuration' parameter is ignored + ## + existingConfigmap: "" + ## @param secondary.updateStrategy Update strategy type for the MySQL secondary statefulset + ## ref: https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/#update-strategies + ## + updateStrategy: RollingUpdate + ## @param secondary.rollingUpdatePartition Partition update strategy for MySQL Secondary statefulset + ## https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/#partitions + ## + rollingUpdatePartition: "" + ## @param secondary.podAnnotations [object] Additional pod annotations for MySQL secondary pods + ## ref: https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/ + ## + podAnnotations: {} + ## @param secondary.podAffinityPreset MySQL secondary pod affinity preset. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#inter-pod-affinity-and-anti-affinity + ## + podAffinityPreset: "" + ## @param secondary.podAntiAffinityPreset MySQL secondary pod anti-affinity preset. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#inter-pod-affinity-and-anti-affinity + ## Allowed values: soft, hard + ## + podAntiAffinityPreset: soft + ## MySQL Secondary node affinity preset + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity + ## + nodeAffinityPreset: + ## @param secondary.nodeAffinityPreset.type MySQL secondary node affinity preset type. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` + ## + type: "" + ## @param secondary.nodeAffinityPreset.key MySQL secondary node label key to match Ignored if `secondary.affinity` is set. + ## E.g. + ## key: "kubernetes.io/e2e-az-name" + ## + key: "" + ## @param secondary.nodeAffinityPreset.values [array] MySQL secondary node label values to match. Ignored if `secondary.affinity` is set. + ## E.g. + ## values: + ## - e2e-az1 + ## - e2e-az2 + ## + values: [] + ## @param secondary.affinity [object] Affinity for MySQL secondary pods assignment + ## ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/#affinity-and-anti-affinity + ## Note: podAffinityPreset, podAntiAffinityPreset, and nodeAffinityPreset will be ignored when it's set + ## + affinity: {} + ## @param secondary.nodeSelector [object] Node labels for MySQL secondary pods assignment + ## ref: https://kubernetes.io/docs/user-guide/node-selection/ + ## + nodeSelector: {} + ## @param secondary.tolerations [array] Tolerations for MySQL secondary pods assignment + ## ref: https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ + ## + tolerations: [] + ## MySQL secondary Pod security context + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-the-security-context-for-a-pod + ## @param secondary.podSecurityContext.enabled Enable security context for MySQL secondary pods + ## @param secondary.podSecurityContext.fsGroup Group ID for the mounted volumes' filesystem + ## + podSecurityContext: + enabled: true + fsGroup: 1001 + ## MySQL secondary container security context + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-the-security-context-for-a-container + ## @param secondary.containerSecurityContext.enabled MySQL secondary container securityContext + ## @param secondary.containerSecurityContext.runAsUser User ID for the MySQL secondary container + ## + containerSecurityContext: + enabled: true + runAsUser: 1001 + ## MySQL secondary container's resource requests and limits + ## ref: https://kubernetes.io/docs/user-guide/compute-resources/ + ## We usually recommend not to specify default resources and to leave this as a conscious + ## choice for the user. This also increases chances charts run on environments with little + ## resources, such as Minikube. If you do want to specify resources, uncomment the following + ## lines, adjust them as necessary, and remove the curly braces after 'resources:'. + ## @param secondary.resources.limits [object] The resources limits for MySQL secondary containers + ## @param secondary.resources.requests [object] The requested resources for MySQL secondary containers + ## + resources: + ## Example: + ## limits: + ## cpu: 250m + ## memory: 256Mi + limits: {} + ## Examples: + ## requests: + ## cpu: 250m + ## memory: 256Mi + requests: {} + ## Configure extra options for liveness probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param secondary.livenessProbe.enabled Enable livenessProbe + ## @param secondary.livenessProbe.initialDelaySeconds Initial delay seconds for livenessProbe + ## @param secondary.livenessProbe.periodSeconds Period seconds for livenessProbe + ## @param secondary.livenessProbe.timeoutSeconds Timeout seconds for livenessProbe + ## @param secondary.livenessProbe.failureThreshold Failure threshold for livenessProbe + ## @param secondary.livenessProbe.successThreshold Success threshold for livenessProbe + ## + livenessProbe: + enabled: true + initialDelaySeconds: 5 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 3 + successThreshold: 1 + ## Configure extra options for readiness probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param secondary.readinessProbe.enabled Enable readinessProbe + ## @param secondary.readinessProbe.initialDelaySeconds Initial delay seconds for readinessProbe + ## @param secondary.readinessProbe.periodSeconds Period seconds for readinessProbe + ## @param secondary.readinessProbe.timeoutSeconds Timeout seconds for readinessProbe + ## @param secondary.readinessProbe.failureThreshold Failure threshold for readinessProbe + ## @param secondary.readinessProbe.successThreshold Success threshold for readinessProbe + ## + readinessProbe: + enabled: true + initialDelaySeconds: 5 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 3 + successThreshold: 1 + ## Configure extra options for startupProbe probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param secondary.startupProbe.enabled Enable startupProbe + ## @param secondary.startupProbe.initialDelaySeconds Initial delay seconds for startupProbe + ## @param secondary.startupProbe.periodSeconds Period seconds for startupProbe + ## @param secondary.startupProbe.timeoutSeconds Timeout seconds for startupProbe + ## @param secondary.startupProbe.failureThreshold Failure threshold for startupProbe + ## @param secondary.startupProbe.successThreshold Success threshold for startupProbe + ## + startupProbe: + enabled: true + initialDelaySeconds: 15 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 15 + successThreshold: 1 + ## @param secondary.customLivenessProbe [object] Override default liveness probe for MySQL secondary containers + ## + customLivenessProbe: {} + ## @param secondary.customReadinessProbe [object] Override default readiness probe for MySQL secondary containers + ## + customReadinessProbe: {} + ## @param secondary.customStartupProbe [object] Override default startup probe for MySQL secondary containers + ## + customStartupProbe: {} + ## @param secondary.extraFlags MySQL secondary additional command line flags + ## Can be used to specify command line flags, for example: + ## E.g. + ## extraFlags: "--max-connect-errors=1000 --max_connections=155" + ## + extraFlags: "" + ## @param secondary.extraEnvVars [array] An array to add extra environment variables on MySQL secondary containers + ## E.g. + ## extraEnvVars: + ## - name: TZ + ## value: "Europe/Paris" + ## + extraEnvVars: [] + ## @param secondary.extraEnvVarsCM Name of existing ConfigMap containing extra env vars for MySQL secondary containers + ## + extraEnvVarsCM: "" + ## @param secondary.extraEnvVarsSecret Name of existing Secret containing extra env vars for MySQL secondary containers + ## + extraEnvVarsSecret: "" + ## Enable persistence using Persistent Volume Claims + ## ref: https://kubernetes.io/docs/user-guide/persistent-volumes/ + ## + persistence: + ## @param secondary.persistence.enabled Enable persistence on MySQL secondary replicas using a `PersistentVolumeClaim` + ## + enabled: true + ## @param secondary.persistence.storageClass MySQL secondary persistent volume storage Class + ## If defined, storageClassName: + ## If set to "-", storageClassName: "", which disables dynamic provisioning + ## If undefined (the default) or set to null, no storageClassName spec is + ## set, choosing the default provisioner. (gp2 on AWS, standard on + ## GKE, AWS & OpenStack) + ## + storageClass: "" + ## @param secondary.persistence.annotations [object] MySQL secondary persistent volume claim annotations + ## + annotations: {} + ## @param secondary.persistence.accessModes MySQL secondary persistent volume access Modes + ## + accessModes: + - ReadWriteOnce + ## @param secondary.persistence.size MySQL secondary persistent volume size + ## + size: 8Gi + ## @param secondary.persistence.selector [object] Selector to match an existing Persistent Volume + ## selector: + ## matchLabels: + ## app: my-app + ## + selector: {} + ## @param secondary.extraVolumes [array] Optionally specify extra list of additional volumes to the MySQL secondary pod(s) + ## + extraVolumes: [] + ## @param secondary.extraVolumeMounts [array] Optionally specify extra list of additional volumeMounts for the MySQL secondary container(s) + ## + extraVolumeMounts: [] + ## @param secondary.initContainers [array] Add additional init containers for the MySQL secondary pod(s) + ## + initContainers: [] + ## @param secondary.sidecars [array] Add additional sidecar containers for the MySQL secondary pod(s) + ## + sidecars: [] + ## MySQL Secondary Service parameters + ## + service: + ## @param secondary.service.type MySQL secondary Kubernetes service type + ## + type: ClusterIP + ## @param secondary.service.port MySQL secondary Kubernetes service port + ## + port: 3306 + ## @param secondary.service.nodePort MySQL secondary Kubernetes service node port + ## ref: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport + ## + nodePort: "" + ## @param secondary.service.clusterIP MySQL secondary Kubernetes service clusterIP IP + ## e.g: + ## clusterIP: None + ## + clusterIP: "" + ## @param secondary.service.loadBalancerIP MySQL secondary loadBalancerIP if service type is `LoadBalancer` + ## Set the LoadBalancer service type to internal only + ## ref: https://kubernetes.io/docs/concepts/services-networking/service/#internal-load-balancer + ## + loadBalancerIP: "" + ## @param secondary.service.externalTrafficPolicy Enable client source IP preservation + ## ref https://kubernetes.io/docs/tasks/access-application-cluster/create-external-load-balancer/#preserving-the-client-source-ip + ## + externalTrafficPolicy: Cluster + ## @param secondary.service.loadBalancerSourceRanges [array] Addresses that are allowed when MySQL secondary service is LoadBalancer + ## https://kubernetes.io/docs/tasks/access-application-cluster/configure-cloud-provider-firewall/#restrict-access-for-loadbalancer-service + ## E.g. + ## loadBalancerSourceRanges: + ## - 10.10.10.0/24 + ## + loadBalancerSourceRanges: [] + ## @param secondary.service.annotations [object] Provide any additional annotations which may be required + ## + annotations: {} + ## MySQL secondary Pod Disruption Budget configuration + ## ref: https://kubernetes.io/docs/tasks/run-application/configure-pdb/ + ## + pdb: + ## @param secondary.pdb.enabled Enable/disable a Pod Disruption Budget creation for MySQL secondary pods + ## + enabled: false + ## @param secondary.pdb.minAvailable Minimum number/percentage of MySQL secondary pods that should remain scheduled + ## + minAvailable: 1 + ## @param secondary.pdb.maxUnavailable Maximum number/percentage of MySQL secondary pods that may be made unavailable + ## + maxUnavailable: "" + ## @param secondary.podLabels [object] Additional pod labels for MySQL secondary pods + ## + podLabels: {} + +## @section RBAC parameters + +## MySQL pods ServiceAccount +## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/ +## +serviceAccount: + ## @param serviceAccount.create Enable the creation of a ServiceAccount for MySQL pods + ## + create: true + ## @param serviceAccount.name Name of the created ServiceAccount + ## If not set and create is true, a name is generated using the mysql.fullname template + ## + name: "" + ## @param serviceAccount.annotations [object] Annotations for MySQL Service Account + ## + annotations: {} +## Role Based Access +## ref: https://kubernetes.io/docs/admin/authorization/rbac/ +## +rbac: + ## @param rbac.create Whether to create & use RBAC resources or not + ## + create: false + +## @section Network Policy + +## MySQL Nework Policy configuration +## +networkPolicy: + ## @param networkPolicy.enabled Enable creation of NetworkPolicy resources + ## + enabled: false + ## @param networkPolicy.allowExternal The Policy model to apply. + ## When set to false, only pods with the correct + ## client label will have network access to the port MySQL is listening + ## on. When true, MySQL will accept connections from any source + ## (with the correct destination port). + ## + allowExternal: true + ## @param networkPolicy.explicitNamespacesSelector [object] A Kubernetes LabelSelector to explicitly select namespaces from which ingress traffic could be allowed to MySQL + ## If explicitNamespacesSelector is missing or set to {}, only client Pods that are in the networkPolicy's namespace + ## and that match other criteria, the ones that have the good label, can reach the DB. + ## But sometimes, we want the DB to be accessible to clients from other namespaces, in this case, we can use this + ## LabelSelector to select these namespaces, note that the networkPolicy's namespace should also be explicitly added. + ## + ## Example: + ## explicitNamespacesSelector: + ## matchLabels: + ## role: frontend + ## matchExpressions: + ## - {key: role, operator: In, values: [frontend]} + ## + explicitNamespacesSelector: {} + +## @section Volume Permissions parameters + +## Init containers parameters: +## volumePermissions: Change the owner and group of the persistent volume mountpoint to runAsUser:fsGroup values from the securityContext section. +## +volumePermissions: + ## @param volumePermissions.enabled Enable init container that changes the owner and group of the persistent volume(s) mountpoint to `runAsUser:fsGroup` + ## + enabled: false + ## @param volumePermissions.image.registry Init container volume-permissions image registry + ## @param volumePermissions.image.repository Init container volume-permissions image repository + ## @param volumePermissions.image.tag Init container volume-permissions image tag (immutable tags are recommended) + ## @param volumePermissions.image.pullPolicy Init container volume-permissions image pull policy + ## @param volumePermissions.image.pullSecrets [array] Specify docker-registry secret names as an array + ## + image: + registry: docker.io + repository: bitnami/bitnami-shell + tag: 10-debian-10-r349 + pullPolicy: IfNotPresent + ## Optionally specify an array of imagePullSecrets. + ## Secrets must be manually created in the namespace. + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/ + ## e.g: + ## pullSecrets: + ## - myRegistryKeySecretName + ## + pullSecrets: [] + ## @param volumePermissions.resources [object] Init container volume-permissions resources + ## + resources: {} + +## @section Metrics parameters + +## Mysqld Prometheus exporter parameters +## +metrics: + ## @param metrics.enabled Start a side-car prometheus exporter + ## + enabled: false + ## @param metrics.image.registry Exporter image registry + ## @param metrics.image.repository Exporter image repository + ## @param metrics.image.tag Exporter image tag (immutable tags are recommended) + ## @param metrics.image.pullPolicy Exporter image pull policy + ## @param metrics.image.pullSecrets [array] Specify docker-registry secret names as an array + ## + image: + registry: docker.io + repository: bitnami/mysqld-exporter + tag: 0.13.0-debian-10-r256 + pullPolicy: IfNotPresent + ## Optionally specify an array of imagePullSecrets. + ## Secrets must be manually created in the namespace. + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/ + ## e.g: + ## pullSecrets: + ## - myRegistryKeySecretName + ## + pullSecrets: [] + ## MySQL Prometheus exporter service parameters + ## Mysqld Prometheus exporter liveness and readiness probes + ## ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes + ## @param metrics.service.type Kubernetes service type for MySQL Prometheus Exporter + ## @param metrics.service.port MySQL Prometheus Exporter service port + ## @param metrics.service.annotations [object] Prometheus exporter service annotations + ## + service: + type: ClusterIP + port: 9104 + annotations: + prometheus.io/scrape: "true" + prometheus.io/port: "{{ .Values.metrics.service.port }}" + ## @param metrics.extraArgs.primary [array] Extra args to be passed to mysqld_exporter on Primary pods + ## @param metrics.extraArgs.secondary [array] Extra args to be passed to mysqld_exporter on Secondary pods + ## ref: https://github.com/prometheus/mysqld_exporter/ + ## E.g. + ## - --collect.auto_increment.columns + ## - --collect.binlog_size + ## - --collect.engine_innodb_status + ## - --collect.engine_tokudb_status + ## - --collect.global_status + ## - --collect.global_variables + ## - --collect.info_schema.clientstats + ## - --collect.info_schema.innodb_metrics + ## - --collect.info_schema.innodb_tablespaces + ## - --collect.info_schema.innodb_cmp + ## - --collect.info_schema.innodb_cmpmem + ## - --collect.info_schema.processlist + ## - --collect.info_schema.processlist.min_time + ## - --collect.info_schema.query_response_time + ## - --collect.info_schema.tables + ## - --collect.info_schema.tables.databases + ## - --collect.info_schema.tablestats + ## - --collect.info_schema.userstats + ## - --collect.perf_schema.eventsstatements + ## - --collect.perf_schema.eventsstatements.digest_text_limit + ## - --collect.perf_schema.eventsstatements.limit + ## - --collect.perf_schema.eventsstatements.timelimit + ## - --collect.perf_schema.eventswaits + ## - --collect.perf_schema.file_events + ## - --collect.perf_schema.file_instances + ## - --collect.perf_schema.indexiowaits + ## - --collect.perf_schema.tableiowaits + ## - --collect.perf_schema.tablelocks + ## - --collect.perf_schema.replication_group_member_stats + ## - --collect.slave_status + ## - --collect.slave_hosts + ## - --collect.heartbeat + ## - --collect.heartbeat.database + ## - --collect.heartbeat.table + ## + extraArgs: + primary: [] + secondary: [] + ## Mysqld Prometheus exporter resource requests and limits + ## ref: https://kubernetes.io/docs/user-guide/compute-resources/ + ## We usually recommend not to specify default resources and to leave this as a conscious + ## choice for the user. This also increases chances charts run on environments with little + ## resources, such as Minikube. If you do want to specify resources, uncomment the following + ## lines, adjust them as necessary, and remove the curly braces after 'resources:'. + ## @param metrics.resources.limits [object] The resources limits for MySQL prometheus exporter containers + ## @param metrics.resources.requests [object] The requested resources for MySQL prometheus exporter containers + ## + resources: + ## Example: + ## limits: + ## cpu: 100m + ## memory: 256Mi + limits: {} + ## Examples: + ## requests: + ## cpu: 100m + ## memory: 256Mi + requests: {} + ## Mysqld Prometheus exporter liveness probe + ## ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes + ## @param metrics.livenessProbe.enabled Enable livenessProbe + ## @param metrics.livenessProbe.initialDelaySeconds Initial delay seconds for livenessProbe + ## @param metrics.livenessProbe.periodSeconds Period seconds for livenessProbe + ## @param metrics.livenessProbe.timeoutSeconds Timeout seconds for livenessProbe + ## @param metrics.livenessProbe.failureThreshold Failure threshold for livenessProbe + ## @param metrics.livenessProbe.successThreshold Success threshold for livenessProbe + ## + livenessProbe: + enabled: true + initialDelaySeconds: 120 + periodSeconds: 10 + timeoutSeconds: 1 + successThreshold: 1 + failureThreshold: 3 + ## Mysqld Prometheus exporter readiness probe + ## ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes + ## @param metrics.readinessProbe.enabled Enable readinessProbe + ## @param metrics.readinessProbe.initialDelaySeconds Initial delay seconds for readinessProbe + ## @param metrics.readinessProbe.periodSeconds Period seconds for readinessProbe + ## @param metrics.readinessProbe.timeoutSeconds Timeout seconds for readinessProbe + ## @param metrics.readinessProbe.failureThreshold Failure threshold for readinessProbe + ## @param metrics.readinessProbe.successThreshold Success threshold for readinessProbe + ## + readinessProbe: + enabled: true + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 1 + successThreshold: 1 + failureThreshold: 3 + ## Prometheus Service Monitor + ## ref: https://github.com/coreos/prometheus-operator + ## + serviceMonitor: + ## @param metrics.serviceMonitor.enabled Create ServiceMonitor Resource for scraping metrics using PrometheusOperator + ## + enabled: false + ## @param metrics.serviceMonitor.namespace Specify the namespace in which the serviceMonitor resource will be created + ## + namespace: "" + ## @param metrics.serviceMonitor.interval Specify the interval at which metrics should be scraped + ## + interval: 30s + ## @param metrics.serviceMonitor.scrapeTimeout Specify the timeout after which the scrape is ended + ## e.g: + ## scrapeTimeout: 30s + ## + scrapeTimeout: "" + ## @param metrics.serviceMonitor.relabellings [array] Specify Metric Relabellings to add to the scrape endpoint + ## + relabellings: [] + ## @param metrics.serviceMonitor.honorLabels Specify honorLabels parameter to add the scrape endpoint + ## + honorLabels: false + ## @param metrics.serviceMonitor.additionalLabels [object] Used to pass Labels that are used by the Prometheus installed in your cluster to select Service Monitors to work with + ## ref: https://github.com/coreos/prometheus-operator/blob/master/Documentation/api.md#prometheusspec + ## + additionalLabels: {} diff --git a/pkg/scanners/helm/test/option_test.go b/pkg/scanners/helm/test/option_test.go new file mode 100644 index 000000000000..bdce55341fa3 --- /dev/null +++ b/pkg/scanners/helm/test/option_test.go @@ -0,0 +1,167 @@ +package test + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/scanners/helm/parser" +) + +func Test_helm_parser_with_options_with_values_file(t *testing.T) { + + tests := []struct { + testName string + chartName string + valuesFile string + }{ + { + testName: "Parsing directory 'testchart'", + chartName: "testchart", + valuesFile: "values/values.yaml", + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + var opts []options.ParserOption + + if test.valuesFile != "" { + opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) + } + + helmParser := parser.New(chartName, opts...) + err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 3) + + for _, manifest := range manifests { + expectedPath := filepath.Join("testdata", "expected", "options", chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + cleanExpected := strings.ReplaceAll(string(expectedContent), "\r\n", "\n") + cleanActual := strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n") + + assert.Equal(t, cleanExpected, cleanActual) + } + }) + } +} + +func Test_helm_parser_with_options_with_set_value(t *testing.T) { + + tests := []struct { + testName string + chartName string + valuesFile string + values string + }{ + { + testName: "Parsing directory 'testchart'", + chartName: "testchart", + values: "securityContext.runAsUser=0", + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + var opts []options.ParserOption + + if test.valuesFile != "" { + opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) + } + + if test.values != "" { + opts = append(opts, parser.OptionWithValues(test.values)) + } + + helmParser := parser.New(chartName, opts...) + err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 3) + + for _, manifest := range manifests { + expectedPath := filepath.Join("testdata", "expected", "options", chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + cleanExpected := strings.ReplaceAll(string(expectedContent), "\r\n", "\n") + cleanActual := strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n") + + assert.Equal(t, cleanExpected, cleanActual) + } + }) + } +} + +func Test_helm_parser_with_options_with_api_versions(t *testing.T) { + + tests := []struct { + testName string + chartName string + apiVersions []string + }{ + { + testName: "Parsing directory 'with-api-version'", + chartName: "with-api-version", + apiVersions: []string{"policy/v1/PodDisruptionBudget"}, + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + var opts []options.ParserOption + + if len(test.apiVersions) > 0 { + opts = append(opts, parser.OptionWithAPIVersions(test.apiVersions...)) + } + + helmParser := parser.New(chartName, opts...) + err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 1) + + for _, manifest := range manifests { + expectedPath := filepath.Join("testdata", "expected", "options", chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + cleanExpected := strings.TrimSpace(strings.ReplaceAll(string(expectedContent), "\r\n", "\n")) + cleanActual := strings.TrimSpace(strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n")) + + assert.Equal(t, cleanExpected, cleanActual) + } + }) + } +} diff --git a/pkg/scanners/helm/test/parser_test.go b/pkg/scanners/helm/test/parser_test.go new file mode 100644 index 000000000000..24130d00a104 --- /dev/null +++ b/pkg/scanners/helm/test/parser_test.go @@ -0,0 +1,199 @@ +package test + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/aquasecurity/trivy/pkg/detection" + "github.com/aquasecurity/trivy/pkg/scanners/helm/parser" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_helm_parser(t *testing.T) { + + tests := []struct { + testName string + chartName string + }{ + { + testName: "Parsing directory 'testchart'", + chartName: "testchart", + }, + { + testName: "Parsing directory with tarred dependency", + chartName: "with-tarred-dep", + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + helmParser := parser.New(chartName) + err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 3) + + for _, manifest := range manifests { + expectedPath := filepath.Join("testdata", "expected", chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + got := strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n") + assert.Equal(t, strings.ReplaceAll(string(expectedContent), "\r\n", "\n"), got) + } + }) + } +} + +func Test_helm_parser_where_name_non_string(t *testing.T) { + + tests := []struct { + testName string + chartName string + }{ + { + testName: "Scanning chart with integer for name", + chartName: "numberName", + }, + } + + for _, test := range tests { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + helmParser := parser.New(chartName) + err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + } +} + +func Test_tar_is_chart(t *testing.T) { + + tests := []struct { + testName string + archiveFile string + isHelmChart bool + }{ + { + testName: "standard tarball", + archiveFile: "mysql-8.8.26.tar", + isHelmChart: true, + }, + { + testName: "gzip tarball with tar.gz extension", + archiveFile: "mysql-8.8.26.tar.gz", + isHelmChart: true, + }, + { + testName: "broken gzip tarball with tar.gz extension", + archiveFile: "aws-cluster-autoscaler-bad.tar.gz", + isHelmChart: true, + }, + { + testName: "gzip tarball with tgz extension", + archiveFile: "mysql-8.8.26.tgz", + isHelmChart: true, + }, + { + testName: "gzip tarball that has nothing of interest in it", + archiveFile: "nope.tgz", + isHelmChart: false, + }, + } + + for _, test := range tests { + + t.Logf("Running test: %s", test.testName) + testPath := filepath.Join("testdata", test.archiveFile) + file, err := os.Open(testPath) + defer func() { _ = file.Close() }() + require.NoError(t, err) + + assert.Equal(t, test.isHelmChart, detection.IsHelmChartArchive(test.archiveFile, file)) + + _ = file.Close() + } +} + +func Test_helm_tarball_parser(t *testing.T) { + + tests := []struct { + testName string + chartName string + archiveFile string + }{ + { + testName: "standard tarball", + chartName: "mysql", + archiveFile: "mysql-8.8.26.tar", + }, + { + testName: "gzip tarball with tar.gz extension", + chartName: "mysql", + archiveFile: "mysql-8.8.26.tar.gz", + }, + { + testName: "gzip tarball with tgz extension", + chartName: "mysql", + archiveFile: "mysql-8.8.26.tgz", + }, + } + + for _, test := range tests { + + t.Logf("Running test: %s", test.testName) + + testPath := filepath.Join("testdata", test.archiveFile) + + testTemp := t.TempDir() + testFileName := filepath.Join(testTemp, test.archiveFile) + require.NoError(t, copyArchive(testPath, testFileName)) + + testFs := os.DirFS(testTemp) + + helmParser := parser.New(test.archiveFile) + err := helmParser.ParseFS(context.TODO(), testFs, ".") + require.NoError(t, err) + + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 6) + + oneOf := []string{ + "configmap.yaml", + "statefulset.yaml", + "svc-headless.yaml", + "svc.yaml", + "secrets.yaml", + "serviceaccount.yaml", + } + + for _, manifest := range manifests { + filename := filepath.Base(manifest.TemplateFilePath) + assert.Contains(t, oneOf, filename) + + if strings.HasSuffix(manifest.TemplateFilePath, "secrets.yaml") { + continue + } + expectedPath := filepath.Join("testdata", "expected", test.chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + assert.Equal(t, strings.ReplaceAll(string(expectedContent), "\r\n", "\n"), strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n")) + } + } +} diff --git a/pkg/scanners/helm/test/scanner_test.go b/pkg/scanners/helm/test/scanner_test.go new file mode 100644 index 000000000000..2c1d14d12ab1 --- /dev/null +++ b/pkg/scanners/helm/test/scanner_test.go @@ -0,0 +1,306 @@ +package test + +import ( + "context" + "io" + "os" + "path/filepath" + "sort" + "strings" + "testing" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/scanners/helm" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_helm_scanner_with_archive(t *testing.T) { + + tests := []struct { + testName string + chartName string + path string + archiveName string + }{ + { + testName: "Parsing tarball 'mysql-8.8.26.tar'", + chartName: "mysql", + path: filepath.Join("testdata", "mysql-8.8.26.tar"), + archiveName: "mysql-8.8.26.tar", + }, + } + + for _, test := range tests { + t.Logf("Running test: %s", test.testName) + + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + testTemp := t.TempDir() + testFileName := filepath.Join(testTemp, test.archiveName) + require.NoError(t, copyArchive(test.path, testFileName)) + + testFs := os.DirFS(testTemp) + results, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + require.NotNil(t, results) + + failed := results.GetFailed() + assert.Equal(t, 13, len(failed)) + + visited := make(map[string]bool) + var errorCodes []string + for _, result := range failed { + id := result.Flatten().RuleID + if _, exists := visited[id]; !exists { + visited[id] = true + errorCodes = append(errorCodes, id) + } + } + assert.Len(t, errorCodes, 13) + + sort.Strings(errorCodes) + + assert.Equal(t, []string{ + "AVD-KSV-0001", "AVD-KSV-0003", + "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", + "AVD-KSV-0015", "AVD-KSV-0016", "AVD-KSV-0018", + "AVD-KSV-0020", "AVD-KSV-0021", "AVD-KSV-0030", + "AVD-KSV-0104", "AVD-KSV-0106", + }, errorCodes) + } +} + +func Test_helm_scanner_with_missing_name_can_recover(t *testing.T) { + + tests := []struct { + testName string + chartName string + path string + archiveName string + }{ + { + testName: "Parsing tarball 'aws-cluster-autoscaler-bad.tar.gz'", + chartName: "aws-cluster-autoscaler", + path: filepath.Join("testdata", "aws-cluster-autoscaler-bad.tar.gz"), + archiveName: "aws-cluster-autoscaler-bad.tar.gz", + }, + } + + for _, test := range tests { + t.Logf("Running test: %s", test.testName) + + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + testTemp := t.TempDir() + testFileName := filepath.Join(testTemp, test.archiveName) + require.NoError(t, copyArchive(test.path, testFileName)) + + testFs := os.DirFS(testTemp) + _, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + } +} + +func Test_helm_scanner_with_dir(t *testing.T) { + + tests := []struct { + testName string + chartName string + }{ + { + testName: "Parsing directory testchart'", + chartName: "testchart", + }, + } + + for _, test := range tests { + + t.Logf("Running test: %s", test.testName) + + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + testFs := os.DirFS(filepath.Join("testdata", test.chartName)) + results, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + require.NotNil(t, results) + + failed := results.GetFailed() + assert.Equal(t, 14, len(failed)) + + visited := make(map[string]bool) + var errorCodes []string + for _, result := range failed { + id := result.Flatten().RuleID + if _, exists := visited[id]; !exists { + visited[id] = true + errorCodes = append(errorCodes, id) + } + } + + sort.Strings(errorCodes) + + assert.Equal(t, []string{ + "AVD-KSV-0001", "AVD-KSV-0003", + "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", + "AVD-KSV-0015", "AVD-KSV-0016", "AVD-KSV-0018", + "AVD-KSV-0020", "AVD-KSV-0021", "AVD-KSV-0030", + "AVD-KSV-0104", "AVD-KSV-0106", + "AVD-KSV-0117", + }, errorCodes) + } +} + +func Test_helm_scanner_with_custom_policies(t *testing.T) { + regoRule := ` +package user.kubernetes.ID001 + + +__rego_metadata__ := { + "id": "ID001", + "avd_id": "AVD-USR-ID001", + "title": "Services not allowed", + "severity": "LOW", + "description": "Services are not allowed because of some reasons.", +} + +__rego_input__ := { + "selector": [ + {"type": "kubernetes"}, + ], +} + +deny[res] { + input.kind == "Service" + msg := sprintf("Found service '%s' but services are not allowed", [input.metadata.name]) + res := result.new(msg, input) +} +` + tests := []struct { + testName string + chartName string + path string + archiveName string + }{ + { + testName: "Parsing tarball 'mysql-8.8.26.tar'", + chartName: "mysql", + path: filepath.Join("testdata", "mysql-8.8.26.tar"), + archiveName: "mysql-8.8.26.tar", + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + t.Logf("Running test: %s", test.testName) + + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithPolicyNamespaces("user")) + + testTemp := t.TempDir() + testFileName := filepath.Join(testTemp, test.archiveName) + require.NoError(t, copyArchive(test.path, testFileName)) + + policyDirName := filepath.Join(testTemp, "rules") + require.NoError(t, os.Mkdir(policyDirName, 0o700)) + require.NoError(t, os.WriteFile(filepath.Join(policyDirName, "rule.rego"), []byte(regoRule), 0o600)) + + testFs := os.DirFS(testTemp) + + results, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + require.NotNil(t, results) + + failed := results.GetFailed() + assert.Equal(t, 15, len(failed)) + + visited := make(map[string]bool) + var errorCodes []string + for _, result := range failed { + id := result.Flatten().RuleID + if _, exists := visited[id]; !exists { + visited[id] = true + errorCodes = append(errorCodes, id) + } + } + assert.Len(t, errorCodes, 14) + + sort.Strings(errorCodes) + + assert.Equal(t, []string{ + "AVD-KSV-0001", "AVD-KSV-0003", + "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", + "AVD-KSV-0015", "AVD-KSV-0016", "AVD-KSV-0018", + "AVD-KSV-0020", "AVD-KSV-0021", "AVD-KSV-0030", + "AVD-KSV-0104", "AVD-KSV-0106", "AVD-USR-ID001", + }, errorCodes) + }) + } +} + +func copyArchive(src, dst string) error { + in, err := os.Open(src) + if err != nil { + return err + } + defer func() { _ = in.Close() }() + + out, err := os.Create(dst) + if err != nil { + return err + } + defer func() { _ = out.Close() }() + + if _, err := io.Copy(out, in); err != nil { + return err + } + return nil +} + +func Test_helm_chart_with_templated_name(t *testing.T) { + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + testFs := os.DirFS(filepath.Join("testdata", "templated-name")) + _, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) +} + +func TestCodeShouldNotBeMissing(t *testing.T) { + policy := `# METADATA +# title: "Test rego" +# description: "Test rego" +# scope: package +# schemas: +# - input: schema["kubernetes"] +# custom: +# id: ID001 +# avd_id: AVD-USR-ID001 +# severity: LOW +# input: +# selector: +# - type: kubernetes +package user.kubernetes.ID001 + +deny[res] { + input.spec.replicas == 3 + res := result.new("Replicas are not allowed", input) +} +` + helmScanner := helm.New( + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithEmbeddedLibraries(false), + options.ScannerWithPolicyNamespaces("user"), + options.ScannerWithPolicyReader(strings.NewReader(policy)), + ) + + results, err := helmScanner.ScanFS(context.TODO(), os.DirFS("testdata/simmilar-templates"), ".") + require.NoError(t, err) + + failedResults := results.GetFailed() + require.Len(t, failedResults, 1) + + failed := failedResults[0] + code, err := failed.GetCode() + require.NoError(t, err) + assert.NotNil(t, code) +} diff --git a/pkg/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz b/pkg/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..a66f228c9851f1f7d92cc62cedc935c99558808e GIT binary patch literal 4054 zcmV;{4=L~;iwFP!000001MM4YbKAHvpZP0LIoFF_lag%9&Zy(g_-yy;X&xFom&}cu zi69D+P?H3Y0OjbC^tX2x07*~}%enZHxmNffi6C|XEcT5Ry_J3# zE}6EyAxkqBQpJW{qOzPphYT~GDm~!rymxtidh+W0j9ej~NOCK(8RtCW@5^R@C2(^ zBp6G^Jb@*Sg-RpuTOaS*wnF}=G|HKLAO)~S{@a~)yO96ZVdn_tfB*1kBmZBcIl}s* zmenq~ehUJ#1tMc*O^(~TgOHfMzv90_U)jNvqQe7Dx@2Q5e!27+$dC`cJf>rY5}2`6 zNUlUS>yn$NG8l(hz{L_FgRzQ^K^wJ#!vH5r^C;?xh=%}qF?u0XFJqD=%JbNJmC=)I z42%Bo$@8|h$O}+JB%%WrNiq@{NyQL0Ao)0n5dubBvQedoDh+^Z8H!Lga|x>%=OExs z)M~zD5erfMH38bgt}ERmGBYX6aFQfKQ5<6y0$YXE(o>NqN|OfGYgG}xM$$z*=1OZO zBx9K7Fs{{#3$n6U(`moedd_uU#(n{Lyf1{ODS0MtlQS048IFvhlVwvL>cgOOD+Yw- z94>lY(r9>yb&EXXQV}sC7d=9UL)e4_B~KN(o$zo%)Pxa>l5pssQVxWVn1THaGFv3F zxdrD5<7njbRBANvQS@E(3~NGHBuCqs zY?cD+o)$X?3f=G&IY^Tkt=TgHguH^YMkXdB8cAl~T+IL|?untadjDGs@^5S_^grw^ zjVM~i2bkkC{QpA#@3js#{{Po#YtP>b&X>3X>+C=NuAToLgarE=`~NlCod18r6L4g& z%$a*JQ*=lb?Rp;d1CB}fA5+gZXp=QjcOy9~ zY8H>#wMz{`=5$&;GOSd@8XGwWR;sC|1%{fc!sedCz0TqugaZ|238N2BB#QC`&ZnBi zUq;*|6A@x^05$!jyMihw007;E*Kqh+cgyxCV6(C2DrRM87TD^h8Lr-`Zx<`%>Ur8-1s(_5a>YSd@a|4^;Yl#s96= z!9x7MxAFhJO8fAkxkIKr20mwsjCjN_WH^qIEa3#$f2XOVqvnox{yqi!sq?$ ztsT3-v*Ua+WFt^fg)87!y)=(h&G{=u6&_wrS(bru1yKb(kc8xLU;pIsRX!T=_oU%h z$WUq`7273Ru=Z z%WSpTC*X*{3ek+2v^`){0=CX2c`}%38l1hj!m)VciEh*O5^_{-XR(XN8ao)Nd()*_y__UH5eM~9pG zkFV3V9=LhR4Llt4r z2fA6NG&IqatG9J8xT9nYb{In=;zy(brt5e`fW2!Y_ytUN=KJQ`3i-c0KRJ7T9>l|k z;IX^qzrELK9nHyq=iq2>BmZBcZLO-M@^*F}P>MfWfOmF0@A_3Wk~kw_HVCobKDoQ3 z%e7{oS=F^oAe!maVx)c%f9uo+mV!bm8l!4Dd#F{Jg zhd&}t?G>W&-b@&{o_(jgUt|KLnaNwM;_<2gb_RjJPvuxEACtB$>v&q`xhms9Y`Bk$ zL}g-_>&h3; z)kjlItleO&9`)!ebhhlNlgWdlS;+dxa_78PeCr4>B-zmnX2Ap()itslzj11Pp{e z58z;2Nq>Q5`Viq{bf!g^D+!}OYAiE$NgtV~zf1a-SNeKf-*QqPr)D7ADa~LUP(tDw ztrAG7lqP7>f>51s-}fQv>{n2@WF1&39=12F+=znPJreM*yNn;px*E$53PHHIRy{fLtge2NOEQ zTu0wtAfVKaa}8Ran$AuD2QItX=<94qo}gaqpBk0{>HRyXIQE``m&wQzB(wkwnPoIh z0TFYB3qxf%$k>#t*|rt3)7tIOa&nkV>1q8A=8n6UJMLVHtCM!O-RdsNFj;mD($qQ#CQESG|p1RU*<{gPB61RG)(rMdV>&Z_?>=qEkY^L-zl%1>|6+FifZskJ@~ zSn%YC&7i#iQU&B^us}N~dOL*;a_wrc9Cu+C0So-(aUL(3M&d2smrYX`s|pYdw2QVmNgfZ-LXS-R55wDfw7t2*p@^pFD2u|$pKU#PIYa%b z7Xi_s!o}QDjy{>X#bB=B6Esa_xj`RvWuH80w~(cBPy*Y4Z?SG~^?r+@a3o~HD&Vrh zO`m01Q?4O3b?dow{CeErq|u9u?SH9d|b13Q#F0p9?nL|{5+ zdtpthO3QG9sH&Ymy*Rtn+F0D8VGlz)>$ zv@T3hUG(2`f+B*!0MZ)dJ}DDxI1yTAFp5_x3(-k40$3Uia2dWI!U5fADv088rHCnh zK6(Dkgc4MiE*PBK07e#bU%Rd<_`u5o$kG)Ci{+u(MZXs)AoLXxu8Sr!6cOLVUDX2T z10-_8X2;t4UGJ7Z-#VTjY@_eS6m^Gg_aUQ?e|gq1VmbwCOvN!+hFhlp@KfdwPM(t< zo}$zRW!h1&*_7fiojj16-7)DSRudJxK?m(!l{jekfQGh+O3(U1udHE<$QoVJ*lV@- z{C3N4A2oK(%PY`UIt+%_*X7;)huSTE<9sxNgF^%-D6IEi42{j(7 zSwcXbiwuPn(8MNRa*fHeGUtSUJ;IWkfX4J!kW5S&I@2}vm6y_gPBQ;Z0yYjNDvq{( zLYaI6>dGO`JHrZcJNV;Xp{~gP2dE^#CIUIXe2%sM|892{{{MNfssH#YZHx4%LLW-& zS##p#b^^}fAm`Cg8(oucg~-cZ>{*G0AHXFM8AoCOQH}~HFnSlmMG(Cy)8$Z3KWJ<~ zkg%~4%qMBaM*Mvde*S4YAXo;bbCs!C5&^dq>@d&HMGH&8W@hGSGZ_WbI0QLuA5}JJJ?F8~Rh0g)Kfln2~@BhHi z*EHiIm*nE?941UN@oz8@UH~eBHqAKb`r~SWP203h+q6yFv`yQzP203h`+sQv1;JB} I$^cLR0CfPoKmY&$ literal 0 HcmV?d00001 diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml b/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml new file mode 100644 index 000000000000..9ee00d2c2c0c --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml @@ -0,0 +1,42 @@ +# Source: mysql/templates/primary/configmap.yaml +apiVersion: v1 +kind: ConfigMap +metadata: + name: mysql + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/component: primary +data: + my.cnf: |- + + [mysqld] + default_authentication_plugin=mysql_native_password + skip-name-resolve + explicit_defaults_for_timestamp + basedir=/opt/bitnami/mysql + plugin_dir=/opt/bitnami/mysql/lib/plugin + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + datadir=/bitnami/mysql/data + tmpdir=/opt/bitnami/mysql/tmp + max_allowed_packet=16M + bind-address=0.0.0.0 + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid + log-error=/opt/bitnami/mysql/logs/mysqld.log + character-set-server=UTF8 + collation-server=utf8_general_ci + + [client] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + default-character-set=UTF8 + plugin_dir=/opt/bitnami/mysql/lib/plugin + + [manager] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml b/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml new file mode 100644 index 000000000000..a7f5f59d831b --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml @@ -0,0 +1,147 @@ +# Source: mysql/templates/primary/statefulset.yaml +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: mysql + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/component: primary +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: mysql + app.kubernetes.io/instance: mysql + app.kubernetes.io/component: primary + serviceName: mysql + updateStrategy: + type: RollingUpdate + template: + metadata: + annotations: + checksum/configuration: 6adfba795651cd736dfa943a87e0853ce417b9fb842b57535e3b1b4e762a33fd + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/component: primary + spec: + + serviceAccountName: mysql + affinity: + podAffinity: + + podAntiAffinity: + preferredDuringSchedulingIgnoredDuringExecution: + - podAffinityTerm: + labelSelector: + matchLabels: + app.kubernetes.io/name: mysql + app.kubernetes.io/instance: mysql + app.kubernetes.io/component: primary + namespaces: + - "" + topologyKey: kubernetes.io/hostname + weight: 1 + nodeAffinity: + + securityContext: + fsGroup: 1001 + containers: + - name: mysql + image: docker.io/bitnami/mysql:8.0.28-debian-10-r23 + imagePullPolicy: "IfNotPresent" + securityContext: + runAsUser: 1001 + env: + - name: BITNAMI_DEBUG + value: "false" + - name: MYSQL_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: mysql + key: mysql-root-password + - name: MYSQL_DATABASE + value: "my_database" + ports: + - name: mysql + containerPort: 3306 + livenessProbe: + failureThreshold: 3 + initialDelaySeconds: 5 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + readinessProbe: + failureThreshold: 3 + initialDelaySeconds: 5 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + startupProbe: + failureThreshold: 10 + initialDelaySeconds: 15 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + resources: + limits: {} + requests: {} + volumeMounts: + - name: data + mountPath: /bitnami/mysql + - name: config + mountPath: /opt/bitnami/mysql/conf/my.cnf + subPath: my.cnf + volumes: + - name: config + configMap: + name: mysql + volumeClaimTemplates: + - metadata: + name: data + labels: + app.kubernetes.io/name: mysql + app.kubernetes.io/instance: mysql + app.kubernetes.io/component: primary + spec: + accessModes: + - "ReadWriteOnce" + resources: + requests: + storage: "8Gi" \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml b/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml new file mode 100644 index 000000000000..9fe0f11c87ae --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml @@ -0,0 +1,25 @@ +# Source: mysql/templates/primary/svc-headless.yaml +apiVersion: v1 +kind: Service +metadata: + name: mysql-headless + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/component: primary + annotations: +spec: + type: ClusterIP + clusterIP: None + publishNotReadyAddresses: true + ports: + - name: mysql + port: 3306 + targetPort: mysql + selector: + app.kubernetes.io/name: mysql + app.kubernetes.io/instance: mysql + app.kubernetes.io/component: primary \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml b/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml new file mode 100644 index 000000000000..2bbdab8fe468 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml @@ -0,0 +1,25 @@ +# Source: mysql/templates/primary/svc.yaml +apiVersion: v1 +kind: Service +metadata: + name: mysql + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/component: primary + annotations: +spec: + type: ClusterIP + ports: + - name: mysql + port: 3306 + protocol: TCP + targetPort: mysql + nodePort: null + selector: + app.kubernetes.io/name: mysql + app.kubernetes.io/instance: mysql + app.kubernetes.io/component: primary \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml b/pkg/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml new file mode 100644 index 000000000000..ffa6909e2f04 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml @@ -0,0 +1,15 @@ +# Source: mysql/templates/secrets.yaml +apiVersion: v1 +kind: Secret +metadata: + name: mysql + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm +type: Opaque +data: + mysql-root-password: "aGZYYW1vN3V5NA==" + mysql-password: "eHR6YU9MR1VhbA==" \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml b/pkg/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml new file mode 100644 index 000000000000..760b8bf731a5 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml @@ -0,0 +1,14 @@ +# Source: mysql/templates/serviceaccount.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + name: mysql + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + annotations: +secrets: + - name: mysql \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml b/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml new file mode 100644 index 000000000000..c41133c72716 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml @@ -0,0 +1,46 @@ +# Source: testchart/templates/deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + template: + metadata: + labels: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + spec: + serviceAccountName: testchart + securityContext: + {} + containers: + - name: testchart + securityContext: + runAsUser: 0 + image: "nginx:1.16.0" + imagePullPolicy: IfNotPresent + ports: + - name: http + containerPort: 80 + protocol: TCP + livenessProbe: + httpGet: + path: / + port: http + readinessProbe: + httpGet: + path: / + port: http + resources: + {} \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml b/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml new file mode 100644 index 000000000000..6c6699f3d5dd --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml @@ -0,0 +1,21 @@ +# Source: testchart/templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + type: ClusterIP + ports: + - port: 80 + targetPort: http + protocol: TCP + name: http + selector: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml b/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml new file mode 100644 index 000000000000..6fe44a89bb3b --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml @@ -0,0 +1,11 @@ +# Source: testchart/templates/serviceaccount.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml b/pkg/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml new file mode 100644 index 000000000000..7c7ef5fd74d7 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml @@ -0,0 +1,17 @@ +# Source: with-api-version/templates/pdb.yaml +apiVersion: policy/v1 +kind: PodDisruptionBudget +metadata: + name: with-api-version + labels: + helm.sh/chart: with-api-version-0.1.0 + app.kubernetes.io/name: with-api-version + app.kubernetes.io/instance: with-api-version + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + selector: + matchLabels: + app.kubernetes.io/name: with-api-version + app.kubernetes.io/instance: with-api-version + maxUnavailable: 0 diff --git a/pkg/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml b/pkg/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml new file mode 100644 index 000000000000..8ace433f0c03 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml @@ -0,0 +1,46 @@ +# Source: testchart/templates/deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + template: + metadata: + labels: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + spec: + serviceAccountName: testchart + securityContext: + {} + containers: + - name: testchart + securityContext: + {} + image: "nginx:1.16.0" + imagePullPolicy: IfNotPresent + ports: + - name: http + containerPort: 80 + protocol: TCP + livenessProbe: + httpGet: + path: / + port: http + readinessProbe: + httpGet: + path: / + port: http + resources: + {} \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/testchart/templates/service.yaml b/pkg/scanners/helm/test/testdata/expected/testchart/templates/service.yaml new file mode 100644 index 000000000000..6c6699f3d5dd --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/testchart/templates/service.yaml @@ -0,0 +1,21 @@ +# Source: testchart/templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + type: ClusterIP + ports: + - port: 80 + targetPort: http + protocol: TCP + name: http + selector: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml b/pkg/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml new file mode 100644 index 000000000000..6fe44a89bb3b --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml @@ -0,0 +1,11 @@ +# Source: testchart/templates/serviceaccount.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml b/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml new file mode 100644 index 000000000000..ed57d12a6e2b --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml @@ -0,0 +1,78 @@ +# Source: with-tarred-dep/templates/deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: with-tarred-dep + labels: + app.kubernetes.io/name: with-tarred-dep + helm.sh/chart: with-tarred-dep-0.1.1 + app.kubernetes.io/instance: with-tarred-dep + app.kubernetes.io/managed-by: Helm +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: with-tarred-dep + app.kubernetes.io/instance: with-tarred-dep + template: + metadata: + labels: + app.kubernetes.io/name: with-tarred-dep + helm.sh/chart: with-tarred-dep-0.1.1 + app.kubernetes.io/instance: with-tarred-dep + app.kubernetes.io/managed-by: Helm + spec: + containers: + - name: metadata-service + env: + - name: METADATASERVICE_UPSTREAM_API_URL + value: '' + - name: METADATASERVICE_OIDC_AUDIENCE + value: "" + - name: METADATASERVICE_OIDC_ISSUER + value: "" + - name: METADATASERVICE_OIDC_JWKSURI + value: "" + - name: METADATASERVICE_OIDC_CLAIMS_ROLES + value: "" + - name: METADATASERVICE_OIDC_CLAIMS_USERNAME + value: "" + - name: METADATASERVICE_DB_URI + valueFrom: + secretKeyRef: + name: with-tarred-dep-dbconn + key: uri + image: "ghcr.io/metal-toolbox/hollow-metadataservice:v0.0.1" + imagePullPolicy: Always + volumeMounts: + - name: dbcerts + mountPath: "/dbcerts" + readOnly: true + ports: + - name: http + containerPort: 8000 + protocol: TCP + livenessProbe: + httpGet: + path: /healthz/liveness + port: http + initialDelaySeconds: 5 + timeoutSeconds: 2 + readinessProbe: + httpGet: + path: /healthz/readiness + port: http + initialDelaySeconds: 5 + timeoutSeconds: 2 + resources: + limits: + cpu: 4 + memory: 4Gi + requests: + cpu: 4 + memory: 4Gi + volumes: + - name: dbcerts + secret: + secretName: with-tarred-dep-crdb-ca + defaultMode: 0400 \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml b/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml new file mode 100644 index 000000000000..b48564477997 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml @@ -0,0 +1,26 @@ +# Source: with-tarred-dep/templates/ingress.yaml +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: with-tarred-dep + labels: + app.kubernetes.io/name: with-tarred-dep + helm.sh/chart: with-tarred-dep-0.1.1 + app.kubernetes.io/instance: with-tarred-dep + app.kubernetes.io/managed-by: Helm +spec: + rules: + - host: metadata-service.mydomain + http: + paths: + - path: /($|metadata|userdata|2009-04-04) + pathType: Prefix + backend: + service: + name: with-tarred-dep + port: + name: http +# tls: [] +# hosts: +# - hollow-metadataservice.mydomain +# secretName: hollow-metadataservice-example-tls \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml b/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml new file mode 100644 index 000000000000..7d86aeb5b02b --- /dev/null +++ b/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml @@ -0,0 +1,24 @@ +# Source: with-tarred-dep/templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: with-tarred-dep + labels: + app.kubernetes.io/name: with-tarred-dep + helm.sh/chart: with-tarred-dep-0.1.1 + app.kubernetes.io/instance: with-tarred-dep + app.kubernetes.io/managed-by: Helm +spec: + ports: + - name: http + port: 80 + protocol: TCP + targetPort: 8000 + - name: https + port: 443 + protocol: TCP + targetPort: 8000 + selector: + app.kubernetes.io/name: with-tarred-dep + app.kubernetes.io/instance: with-tarred-dep + type: ClusterIP \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/mysql-8.8.26.tar b/pkg/scanners/helm/test/testdata/mysql-8.8.26.tar new file mode 100644 index 0000000000000000000000000000000000000000..53cb6802de42a09509727e267c7a68df02ba3bb5 GIT binary patch literal 284672 zcmeFadv_bhu`imx`6*`8KHHM706s*?4$t;pp(WYTiY0N2vUAVz&Ju>ekempwU;s&s z?fcp9ud2HGG1G$?5FkZo6NxEcrXSVS)z#JY>d|HKkKxXfi*z>cUZ$hrAFkoEi_hbO zgFn!}_>}+Md-!Pg4|@muyN^+5e}C@}yLon)Mjvcsf58I2}mv^mSBlOmsUs~7Y6v^d<^=@Y>f-P3%I z!g&{Ecg%NGn5X9qOeZnVI<~F%3dvsgWA`I^F8Z^4O5h(RFD_61>xU#Sk~BF>i}?d| zJj~P6VfG*?`sq-Q8#&- z75Vu%8&J&)G&9VSQC<*UI)GuGql*X0MV7w5>-ZPfFTNe{T zk7D9HpI*16GAJzLja;l3&uV%mFu|1B;+W31TqmzsqJF z1zBtGZ^Gr4C6uqbUw8K(Z+)uub=&)8j2Zqv1X8a9USf?)`2X(1hX)n@zyIit|6j*k zl-t}i@3!l1ue*o;Zw>PEte77r#YMXR=<#6+QaJ4o4h9GP!Qkt?hyBN24IV#yc>3tk z{=?IQ6i*Mn`ugnjkB`pw`f2v~@uR^XPj^q#NBx6Grw6;){?>U0R)bAyaF}fG@9yt+ zcK17bkKgR?AMX9}@X=S@KYq2h`{?U~gGc{&`)Z_UG}yx}JKC_T?g zC60#>s32rPkj#?#MLNeHNMRrH;V_wiAI$OrJ*adO-5SS$@8l ziL^y>8r44+_?TD;v?q(%EE~^NAb~(;O4I;*C=k5P&huhEyF~Xin&ZWAc#`#Jked>c z4hu-Cc>H7tS!#>A`4c0=wB6-8+C&sPGt+)eUBADd)7dP&{9CCRZ6LwK04x@o02_gq zaD{5iKG53Qi8qksY%)uZNqB}N2bBFZ87@ZIz2ZKa+7hgh_#X@o9CjPefPrV-^R9d= zV~i@HK$3Kl(WPSLFWIFa^d;zs3*b+sX7cmj@Yn~OJLIt`MLyv(gimkY&`ElHrSx?& zpAi{BHpgtu^K>}8bi^mehb~npx@VYg`j$YdpF-fzl6y?5D#;YEIDQlJ)c|EyTEKWOeoC9$JW0N^8-vWys%lCdac}DA% zc5J3#i**K&lgmWc@DVL# zaJ~6O!mkrjrx18xZB&lm5F2A;Eyau1{2?t9H*-b*A{#7*+3Y1z@QoHNX zef=QGux68OT3+wAyVSxgJ9B5ocZ*Y00h2Gf`D6#DsbXiI7C17dgHb*vCNb+6>e;c( zxHDP|=lOJ)b!2zrp9Kmi4Uzw;D;@aQ`)d=xO-6m%H(m0y`| z4xvjJLJPudE)Ec_CTfh94tFro_AomD#vO(uW2bh}xM_n1PQ0`jX4!OEpXuzpNRH?a z2+VG>ZF{5I+EtF7c1$OErZQ!`qDvvDpuX?~0xD!q@2YR1J(L~?^hGKSIis^?qolw9 zt(`OF=-$ekL)Y$mMw7#`hCXEQWGB|rs#fwL0T}wYQUkpPMd|mwd^B1>(*;5$pqPct zG8#CkvBkeOb1s9n=y5XT=g}&%A%csU9sgvS_4Bh!@NQOQ=qTH4`7-=L zcDguEw3+-23yK~LCm2L=F6ya&X@JO9)rX} zOYzd+)od8_q&u5{l5wUWeQRRBkX%kXQjcEuNF_b%CeN{9(%A!R5uMN~rH!)>tCJb9q%Fq#^A^#Mgd|xf-c#=L z>Qiq?b}_{?d+n;2%1z^s_lf{Hv=1gT>D`}dudrQ_amelPS6ZDRJlaJZgSyg%GqY^3 zk96_V!qRB~|AXCSSag#&U>}A%TtG=c>rA!=W-PGq6iJWOeBn~>J+O}*1GJ2w$Fj1i zr+xMWgD+pbd3Kom6&x_h=U+hFpj`k!RM^}Dd8F0N&lu9_5OfXj7j_{w_OW_u+tC@duj)z zE5Fq-CIZg~nx!7kOu4HbZ6-$0>d_Y7BO@nH1Ek=?DVC^Bs7LlDI@|Qn$PAQ&G1I}8 z3)>BJGDDMR=*`=LgHF7eH>4B_CXq2(0fk^VOm^OrD7ho#ZVa4E){IY_od~{~V)ROj zDI}A+!mfbMR1COKT|wul!RZNGq&vtC&B-ZHL9&QN9=->CAJW@|b~$?dR+{P-7l+BO@&+$I`!^Vs$2&0bU)X0^ z|6-DCKZV!^6E&2$p2!SOqVcN$tNQgkQCy#AGBBp1dKxQ7&*_4m$dMZ<3h_s2&l#^mZ z)1r`F!_Ew|&1TT1Cig&O(BUP#?1TXrIY`bOR=r@FWA%>Da1=5=;I{ya%WViIRts3! z{!en)R)8*`!a0IJMFIY&aES(t`5I0n2^9fn;4-q%)Gi-6(g1D!3@ZR@dGFFG!}=_>@xLQVRm%c?T^oxwaNek>DziA?7TbO$;QV0LOVn^;;$eB7+s>JozF5k znRO;-bf9yPK?)yQ-Fff^i@1Z%ljLXaZ$MgfIt?+wz9mlYP9|@s!vzjd-*CydP!Z)|Evoo2_ zOa3X;g!KG2USemMpYBKrZfiQ3&A)m0aQCsmL^_dqq~V?UXewCk7AV7oNsi}EjBAL& zIz8t1s8Rz1-f)Rg`tdCe>5~uH0J!0<_a49CSKyF?4u}!BDZbf-w-f%yFQ)l`XJ{#q zgD#43ah!HLFoaEF*xI3^us}V4rv98Okd8R*lNJ^mH2#~x&hgEUZ@&9l-cE+%sibcf z^RuttifMfKwx4ftsh|799Hb!hu?~iXv{=b+VRFE{EQ&!v?Ky%3D~4>$}BjjAM0j@fX4R{laZeB!C&I7s%nWw)Sq}s>r>{kQY#Xo+xk&9Hlv;_HBt{GVAeP@4iWsZp0twXzwB^$*q`XA-oVY6vMC!eRmo6+07C0_$t%C8Fcem;z8?L8Chlj?z7) z>(ms|lS4N-66TDv6M3l|CcR>E23;8@y9YKoFh_~jC@|J4+K)3x9z;_+P=8?|vY;`B z0cz(ncr}9g0_cvy)4}-%|IKszL@B#6^yRZY&zlkDck?fZEetQj0o1IRQ(208&;Tm3Qz6#)U!H z<4FYfG89uzN7EAdvEdk&q_4=1BBR->KHB%kro4=uV5;JYC3vgiQ8MH`JscVM4{4x? zgV9+G-JrqNt0NFjs*F$}sbPPKoZ>4i@BwXKN0oIB%@MRna1tr6D{G23@{pXMRlUeV z!XE{ub{C?CPvE=Lhu?$SRA1t0V*EwoyXea3BW04YGE$!q#Hmx-N)%Qflv;S_6G%{e z6m_TSH{P=iL2CuLaI(p9f6f!@;IQz!N7!(caXJb|QO$P{qJpi;O{gxz1JU=n;J%PZArpoRk+Mt48v%K22epY-xn=Gb@ z{C)n^O5h_>8AApAo*nOC0B7|qhn6V8RDmU>6%OO#@N$@tj+UR8UJjGJ-CbfdSnlD( zV45W;<91As1xH{lO_F_EH&snzGgG@-P-bmhGs; zbitTHY8VrZ^bbh8#wm4}k8)^>TXA-eH9^ZbU9dZVao3a8o&1%oB;uK1on6>-4_l&I zO;iP{0FnDCdjY~T@;y1^;_ym7sf3WCpHCJBJm`?2&a;G1(2$LaN|Vt5F2C@FAT1(v z95`0;zla566*m-_p0R`+xL}7soYCCq!Yp$^@?i>6qwk!1=B-lNI?VaWpQfU z{+dLo<(nB0$(o|9qisXg3Y8`G0)HKHRNVQ@6_O`Hj-`p!`o~nMypaVjTT~~@3QU(z z5%Wmi+LJM4$mjA5%49sGt%IKuZLHe*G@IqT_i_txJjbuK8YJEGWHDDr-^kCknmmK~ z98N-SEc zwD7_r-N*ziSvq0mHth$qNKqT*#j|>oAqgb0<+FODWeTcUK&v-gCcz}Wgdz7_*|qOH)h&bhQ)8b77`W^z8g|NMR=NqEnUr9>QF;xuH=l$%G!Pc zk2!ce;oQilO+CU@i$CBAH4YR+xpX4wYNVnA0s7`n2S$FBS5cs%G-rI85IBUnz~F6>F6--e-#$5}WM6QkoaxV6+XZ(OWW{Coh&6#SO4H zI~#cr!=+)iII${b4KPn$7&8SXa0xh&{V5u@Vy8FHHwTY2=wLFDX4qIHe3pGoY1|Oa z!hl+YWTyGZ5OUM8(bLn|rso}_Ahc>&`>N038%*UTBzZ;3+p_nnN^Zu-vA|lR(WFW5 z*bYbngn2k{3x#Hd!{DGkfu||rX6Khrb8@C6aQL69lCnCP9xz?&=RgRvb4t##8t_m9 zOoWT;I1D_=%NqY=oa(UX&Jquf1crPpA}DU4zi~du!N?3ij0-~)O|#%fHJ8@7?Hs_JBT0)91fiKz3M!#wMxqPj0Rn~vg5Vg6HEJtdqA&~e@|W^ zffM$m{yTE5wB+dHr{$b^z7I|#7xL%o-1V7!y{bMC-ea>}9@@ z&YJDiEX%I*$_#nHoLgLr;y zUX?{DZJazoOC;zwPOtsO={Hn^>YH0#)!HuEyN{Q*O-0}rU^{@WX1RtC^jGgBZaUui zjK<%gw!Nl#2ny1Y9YCQs$qt?$FF}nf78gjV9fu~tV0SU*(Vdp%jCeT(u>WC$_lLvc{HW7^FP&2$Jyfy5?$J_}U zk6))_1it$m7qTzeTsb`x3A)DNUEFL!_GRdhXvwD*1!9Q@ttj+GFo`2fO$b2YWHBJ~ z7lksRk|?yd+m-+C?(83! zR`{RV+HzK2wq!j?&&iI^2hrdl#$5XQonKaj@ezz|6ys&W6n&Vy!cII1h0kHLe$PHe&KPFPPdFm?B~P!LQ(FwQ z&&7z87#zP^S=Vf9b-j2p%S(SxF>h`psy)@ny^5;bO8PvrPV4sB)$&jub+UKULGTbFc6HIOE5XLJ9 z_qp*w+&OZwbWX~G0!sB>Bw~5uy(jzHW+;Y2M3zS3-HvbL0e%*mE)6Oc;%7@p$l|gf zKU=h`p&b88VSa9OJpbh&H*?akr377#p?)LW03T5M(%zz>xfwscch`=0sYJ&n@ z^AWgRSfDK?VDq`KJ}}UhuEK22tm?19hxqkF16_xaQJA1VJpsvi*Vga{WJ;TL(^V9LJ9Uxfk-rHQ1^O7;AJU-e=aB%;h93<#)N91Acjn+r%C7xEp1Z^$e zjaNpY=jB1Yt{Nz4d+&Pjcvcw^4;8d^t_HDXl=$Lwh{)M{wO~O9&9TG)+`e!@3nGa) zCsz&_w9PQyd5cpcoP8DR;S&c9TG*U^2bQ5*5Y-9#6L+{8RND6^HB(mRfwqDj-xG8J z+smpQ*qFYW;K2w=xeNTUFqD86b-Gn=+gMNA71WfaXwOJ5cL9X|!U2SKzM=It@tmuM z5ZWdj9hw8CvEtYQULP7k=Om3c9H_V~jL^1I?g&f3LT^n-cY%bS!5JLF1Po#~^3r^} z=jzwKY9OJV6p)RCIkL}s$y^skq1CX)umSSN*+ej59e^;{T{WDruE`d09l;n|;h|-0 z$LbB2<0EP!qN)u8?BHSDHWQh_yP!f7o?A2ZHl8E8h{Ct+nutuo)dCA0(T8Ak8y8Yo zz8%;tmeN(4ZMm4P(saw^6loe537`I+78Dfbw=%%ck*g13d0X<|)exp%WK%(gHNZ8l zBp;SG;I>#rt2Wtk1+Cg_%hgk97Fh6~t+aAB6Kp6Ei)Hxh9Y7+{fKCXm_)zFZ3oR~U z;G+f|d-}7UD3LClr&mQNm(s7-4>`0{9g)vtoixI9;`0`(Qmf#|5Wp2sjxv+h8tj#uADEQ@o>flbi2Y4 z?RZGaRM5(sxc4eXdg`aw6q1N(vnH*o&dfPH#$ocK!>Wc71^zBT&j$PvyO!|48w${i z&ztWqVYisFlGjq0oxe4L^lIR6A$BduXJdhSabUbqJzKzqxLPE%4c4=51glu1?40yt z&1Y&R{p8_#4szz`8xPpC?UQ-O3r6#Mb0J|B(akG%Jwba8J}YI$p{xnpv-SDp^hAv> zK^Tc2$Uxc(Z|h=t*9Gp`)=Jo-TCVLS_|hmlC)I>;gN?y^br5L7u5V!>C)Ua2WYqvZ z+f`SCxF{yA!W;f#Z`}<-ul&tyc`RSG9}0PE(@!}xj($hO`l?-!+4{I;H&hlAej6hE zssph>cx@2o3U4&nuL_kPc4Wi-Y%yGv8AJpAPYU?6NU*C4JoJBN$X^u|K3%L4^k*Bk z9O*9XPogKy_P~eS%4V)?J}QyI9&nw3e{Raiq2@M({#l5+n+g82?WrS8^{f{Q|FhK? z3_munO#q;+&7E#21kg4rAO+ERZGr%8eGjPh!vJloHM<0h{mB&p`aU=+VCSAtRAc}ej^4a+Tcxc+I#ea?~1 ziSAxkfa#&rHN`O#<$6b6yn}NUHkHff-^iv2R}c%$3q#=+sgWt4 zOexzRmMgf|3^$Qe9KN_sFL7EN_;}8jHpWJ2=TC?QK8VCb7+C3+ zFAliC{k3v=(1h0=F2IE_pQjW&jyQazLzg|534$J-Pc#S}S;LSS4t95BE~7V3zcm#2 z?E+ko?y9AmDUsVOGnxOn43QCGf@vy01$V&%8HJ$;0mK4}77*yqq3nI|A#RU4CayqF zWiXYwG7|*AP|j}*e1j@t+yrz$B0i21lW2giM*zS&LzsIQZ@de!LG_-a9DZrZ>RhUE zrko24mFR=;J1}{y3V6Sc&DAL;Z`QMqlqrQW!Ey3^snAJ&;phyi2XcUlHAPOkLpY~A zu(z`Ptj+!HH%1ixEP=0*1whnwlv0stkXCI;W_VuGH@Vk*-?1dw^6Ddf3T~J{jt9d2 zyS2u1ZH;EwrHa&*j-t^@Xw{ox;SA&rBmYC+s5gV-5ESc&D4Qz zDZ483FG}zMzL(BsDW!p80iWlB*a|s7Nu9$4UaM%Nn@`Mh#rk2QxAB^PJHm5aCJROD#o`gAaPth&Qf;-Bj!?5`Y#HhT1RWQxm zw*!&=wW;=(j4tcIL=nq3O<2@v)pBm%DtTB5(b;pFY0y?ehfEUo0gLc~F#IB;Bu*6; z{^GLu2jmkdNU&#LWQ&5f2nbqW&5oNB=frEOABw}~q2-e75rWeWDfBzY^G8<7a=Qxa z60WArDz`BU0vn+~#?LL$bA!6J%T<;Bu}_yO>zgIjiL;dIt;}pffZTd2H%xD0oTOS` z*N(MW2(oxR>;_x}_X+W9H|jes!U$QDi^$O`wq}ho#b3M3 z)0QlA>mx^^nDwL;cQ7MhH}PDYpv$QoNaM1DKe%G)EbCwPhq%EP#Uy$x=EbdaZbl%X z+1`OnN5*D6r_Jz?per@HptV^dT62PpRE4a_Iv^}WHke_ajAU2_&r_E;j_tLm%fq(l zRbT+|$8;teOU%qjP#h+I+}k~1s_~kTI*G?$vE}a+{aMO4%+d*5ZfPwSktVl)`6cQ8 z#FsaB%Ohh9{q|d!TVD&Y$^2+`UUbdn(AE|V9R+S6;=ECE%9|pzSuP9^j`7x_1nN3X zqG}a$Wjwo%3Tp&bhPPT*V3)Y^iqn{>77%3aWjQ^^m3E8ME*ubdrgpp9DFH2c_B_YC z4lXL_BWDT@7hPz(CU5x~J-WsYEe#i=u~|EQxzl_+oSeTkjvpW{|7A8lM{;pA1B>nH zTX<#8_5Qv{srlsHVo9m<;RM+kYa21IaWz0rB69@Ne0(-}OKBNKY1bUk0Z>Ch1G=FV zS%yCBexs%=i|nFAX;r>Iq9mOq%thKoIb3SDyeKA{bX%mwYF&_$7``Rl#_}47!<${c zg)}zBsX1#=LGZA=Q7O|Mr1%15NW)}E>E=nJXm}#FNOL$Po+%Mzh)$aqR4^kNU$Jy| zkmH0re@o}_w|uu?b*RgFlc35K>(Df{?(0KpZ}ap?)(492hkYw}SPlsn%*Q|$w#5ca z zb2&v$(JJQbD@)nS$}kqV0RGrbLbgM*C0R4E-8p=}qT1t*o}I8c$i=L*%VbP!RnpYKGEHJCaXgjgMosO+AYpA^>C%V2YIax!)8z?Ew zQwT`5q;IdZa!755AW2(O$RE3+r;O%Z(p=+;viuW&`TAF+Ex5kxUmF`>(BAHm4KJ zLEBp_H+dY<)6wy2H@qszh1E%WyosRfc`P{PToXrK@MxQfwN2ut6$?<18tf5 zYSu{vVt^e!l|9EQacy>-2#*6T-@rlv4Pw-+!agxtoENw}3sx#Yd9X0NGk&u6s_+4#+@8P^~;Al#_M z=3T%-`#Aq&F|S5J-r<+SWKH3T>Ws-|>7K&YE=~;eI2?}^HYBG!Og6?+N2iElIt8ufu;m`ZOem(SE7YK9x&PhqhGom5SSFBmRmA z&kHrCHNb(UP5xd3j1{wgH0noEL?-5?y=?_&HF)J*GH%>LgV_fd`kcB?S1vt z!5{Vx_IDqn(Ek43A9nW+b{{?ZL$bR`AnMw}dP`>jsqTH-=Q1`oxVH5^ak*cxu(v;# ziQmRXN-5M2cXo&bI))r3vvb&bNIC!6_5&(T#v>{_IsH5A5%LV&0%F5F&kDSuMo})E z^)KM#*PkzDR9>EX_35Ig@j2J$^7DK?`Y`E{Zgh|i;bYh%&&HmoitM@WGu#e@>OXJ0;CiS zt;n=6NV5r?YqYqV8Ufc#Z)tkMp~w_bGl}h1d*za)IeD6$@$DeIKPNNH2Rw4&`vZ5j zS=y&9FC9+d+J=1yjvvSqL!^-)+RZq+t9>y^4ECiHW1aA3Vbvi>vXew{%JONvGmGl0 zEake}S@#8-z!#%drScT|hv zZKJmLMjPX2LPg4t)P8J6}cy4aZrc37YQ4y>w5PU^;qbZ=zwo-h!7x60<16%L}Us~gK zscB2Wh)UrC9B6?%BI7_mfJ?A27g+ZYX@R)4`Q_7m<~D0;*;{7xbDO+hN;u``3VcM! zk@uGOSD-neCd>R|J0i7FL-c_N3FQrKu-owepl_Ah^87_ktgFh^=hgZf z<$F5fezq2YN4H;aCGn5)CZ&$Z9fe@s8Lz5ay9L{U-EG2~AV!PM-`6&5!FGvp?l$7} zMKX(4x{f$1I<3X296R2=+lueDV%nlV?N;pQ-J{2~NIcw(@$0v(-$?Z+`d>`~Q0%;X z_2$`0cm8p{LA@lYMwjV-A3c0n(f>kSe5e22;2hL7_2kb-uiqpuk6t{3gbz1;gv`$7 z-IvfPCcpi*C2xOv_WI=ctCxO(pGZc8XFbXs9Umu=5=YaiS7__YFOy?v4j{L~nu?Ql z4m~K@IVma|raDps&dJPqnvvdb0L9Ma5-Qv;zuemT<(CdA0yTnQkf*qg4T1Xo7f4EA zE#H9Tjcw=x0gR_vHg*lc*z77I7#20@dO{O;f;$P3g+OZL1$n8{-JCFIQj56hoW9#O z>4wb@2wq3%G|bcUi^DA}CsT@W1Zc;w;~^0m4*GOUdUrk@8Vid#2_*Lhd4H}diMpX_ z&#W(i_vv=>`TgYA1UV3(hVCe?uhd zS0fZ`NxBe*2>m25q?$AU6U_hV;yeJ2CQBhBcw0g>&y)Y4Rg%2=E_w6kXUXxar~iqn z34G%(wzi%lCCm)F25Gld8EObysp&4EYxhZbLM zCO@=7+t!w`UL%kb1Izjs6OF}TW@6U~??M{n)79KG+esHtcgu6Gq|z10Ni|jnRIe|fj2cJ%>U+#mx%G0eSs$uW=!pt@xpHuC9QZUV#Q?BvK%T& z_Pa@l5mgx*LQ3wD%iRv`Y}tJ~WJY(=xd_I4Ez33OEZCgbnQng`A+>EQ2((_TSgjAs zgSP8OJy@ylf|4;1~4O3hby-m`yA6OBjJi} z{S(XWK;ioVH(6ulwvcMDRqbHzu;j+`vt;{!6&*8(Z6p&ZNsARWLQ_O|*4fnYmv#Vu zzpE*d{5#7RFboY7G9N}^bTi`UK%#dcax71hEmiNjU2qOYTob+Hv2!&1b3&Lpej1w$al z0G-gY*=#c71yp8{hIyb&(Z%;gR&+;;;XI!Xp~fL=E;d71NlMRHEc#VDDoCrssn%E> z;)7W3FJnl$hI!h3q9}Fdz!K2W?((5k08qjLukgOssoZYWa~|oAo;Zq~x7fW&=S(`Q zO##}V|2=rTU(x@5b+CJ<|J@Y+m$mcDFFRi*@AJ_i3ssa$fIK6vr>vo+Nae7vv|^|vw^MmfDE?oN6xztc>Z3&me;g>r{p9gO{+f?Y z7D$BpG1=}2YvlDv>MsjjleBG1>Q=!`=(a&WLPTIU7vBW$K&YKvWoWj&mVfQ zI)k&dk+j*X#~g)3xj-k2EuPToFpQExAa|mZInnXkqH78nF3rD4zJNRh{!jdzA0^w8 z^2I%t-+a_8VKWM|Ic7*gKlaG2gIdoy2Cy}u4_aG$6oL!Fd2$bjMLywBydr6(#D-m) zAoG6Rc&n-WHZ$^?cGt!MB8!k9GI>5qk%v{27$Q88^@J~hnxbx`Jg#Gx5K)(N9cfb5 zH))r6h=>u~2K{7FKBQ6o70$Unt-;6rRS;6u<6-g#rzfKSfAYc}im(R0H+oqU&uUVc zTU)^sjud#2PI)nOYjIZYK>3hoKBFES5Vl<^sUoD~T5bg|sm3c>S!}h94pl`g30pmU z_RgY5OzAI@w$CX1@7ObdFc1@QI?0184vbc#2SpGi@nu|rSq65E|>y4f}(btKl*nAzy6wBT)eV66zL8DXK=D)q0!rV9Rox(C79ZkP4dVd%`!R z1|s1Hnxx!?VtWZ>z;{IiYD6f9r&}Z`Pq$hjr7OjYOn{lKP^GO!Dwt;}L)j27s|q50 z67s(bYf)TatxNywl9QF_kgLy-XDvawvXz0gBdKK#T$QL?o65|vYCQsNT$$HIRt_%6 z6VOW56INRlj}oBQGEZ5Q1YM!ZTy2sF(aj9|3^O!gX85i4+PW1dl^mJrg79{Qh}PAT zm=^arm*Qqwy$ZjIHPmK@@!4n1LCiZkSo$)!YUWvWcoRZ8VU$;n)qfKhQ91_Y%y&U7emx=_iJgTUhhxCR7u zBl1ayhkrz|s4o|zgh#WC7SSzl(ip9$93iWU)i+kevO29h)lxO5Wgt4BVDSWPY$N=*(Tf^2; zn~LEQu`=57eLHxa*xj1Pz}wIIQxgQi zLp^q+(JA(N(+UB|bSC)yE_8$${oU!!b=TM6BBR=_x~7IeqIxD#bkE7c4&U53rA z)IyC`OrcRZO^#7dkd&6(yWwpFcU&0}ZU1o8!Hpf`bG*WaQa~|?8w-K?J{6=~bP<_D zG%1is0_Ow8-{+>C&}vUJ5WFD)8m>Ry!)+(wX-Ser{*K$9lI{NPDsH{~rHz^uv$OP7?g{{OQpfa+?3|F&Q;%m0x*?wx*(tX&y+RZkCyAHvWz= z>)HP}$uX``DCXVEbhH^0;BxyP!uTIQD#d?1did4;o&9g)lO1W02oC~3wDT%qFaibjY`UQw*SC{qG|ISb69ngr!=Kw91uAk zLbt=!yD3VCc}qHcDdokQ_t4h{dnLeY#%s{myss^rUmuGelr1Ki zfHX5s`85gUluM{W@9OUE3`% zP~QJdG>Kd?x*-XmY5zZXRL=kTcz^G1|8FsS>lZ2U%4jeVu^id-gUoyZEL7hl# z*mFOfrlh1o7$^4ysmHPrL6L({Mza-t zMj`c+4>&BI!oLM79SOfAhi~p$wklJYwxY=1rFMOl1H!$_%#$BHQX(t@Kx5kkMCyp* zzI3$GZ3!z>h&~bQ$P29W!EWugZ%-#6rCy}d_-1ZT(qRvt zN0Bff%ipe90kHIpZI%J=S|y$%vcc`p6huMu&!l`Xza3(Ws)m4vi$zaTLKomrbD{oVcjkpJ&JdbEER|GVl`Ua5SEVW|j4 z)9)RseR&^C*^2Ta3wHxWpHUsVDn`R68pPwt;AvjW7JOOHw~GNR5qHOhnjDeKREOsL zvAWHvV)MME#|712E+J6l3Ocf`@GK4ZPXddWPuG4SIdW1T*7~}v!}Q~iW9NM@T?WtU zI}M6Oy0vz-ijtNTkL4BDQaek^uvX_>4+B?XBX^a4rx>W)|6wheXVA?c!gXyqVEO)k z@bDp=eZu?yzIyoZZvVd#igMdDv#v6q33hg$CS1K>dp|K`b8@$>*QS2n__l5gc+QK< z?sk*G2KAKHR{!;-4-+u}PqRJj%b#|SwpW9bg)m&L4J!4;6w24^gtGl+=a*s!tvDO$ z^oE5+Qn3_9akSg7DWDKdVdWb~rFdN%(P82keq=BXlQB|tSk1bItd*Mz>Kr3gF5kPX z-~?~gu#5DMQYtEi1yw2ydIAtUq>znlhyhB&zH0&tRL*lwdv^G-3^bgQ8aoO#M<0$2 zDHbDRb~CE)V2_H?r{`fhxGMQ&_x&~Y{8ec1dkDv)vKd@!Yi$utag%&*Ct$R)W8=h*48(ExOcnBILs^$YwH2(0>{yuT@!%-(npV zt-xy({5fvjv-36Dp;j}11UWs9auLyph7{a$LcxKJCV~HYl zg@38DuSFOBhjbi%0YT+tgd`kg>a4cr?qdav#^ujh+FDGs7b>E2P3#$_T**jSB=>w% zu$HMpV#lRGS7U?KexN06WA1)YZN;7x5(scUIF2*dU_31DmYUg+nq{x0J9(+~8aD0W zUAncpWSMaT`lmpwkC2Xu(2|+zAj|J4FX$F8@ro2!S;FGwVI^r1y{hxeMFSatny@08 zTp$sww3;@$l)KT~H&GE+s7(^DfjvhI^stlQilp%lU%*7a_0vf@S)Y%W?#ZvgZ#r+$ z-2k1}B{o1A;wVR#tcCJQMxNQDf^NkDQvJ&kEL~R@zq#og6$se*I34A3kCO*anICws zlEn6TnCyBlN{gY2R|p%E1v0*P?3=J*9}CwhISnss#&MYzj>3saZ2E$-9D45F1cFi_ zxr2ElqE{m-PAA&KI&9IngT+;OU}w+kR=Z?E_WG=)_0=hxYc$evY1))oz&jq_t%P)=5(0v2K5(@f&WHCr`Ab-aTu`XQ z7@to%jFb-XLJ_4^$f>Pxb%!Pl;u}(Y34m%TGmKoZ_4d2xKRl})lIC;U4sIu8p1K2` zgcOB^>}{nyURJDS@FI7(@EtT|FqyFjhjkh967XY?QRl_gj;H=i(- z(v(*CiofflFTE&mQ>3NoyWCN}mZ}I_SEDKgI)=ngN>Dob0I(&r6o^W(^@AKmN8cWu z)GzvgTu5Z2YZe>GrS;_xx@73z(zQfpUE`cdB(A8m#w;P2s`jFzfy)(MM2b9n0^v<4T zC2tA*!ybEU#Y~kS79JO+!)Z6wbdytR8RqY^F@oB0f9Pp&!8`#XpP+~+Q=%%|pxkQb zZd^5uTb#?{sh&u^!3#un>M(j+Rf);dvRaIus*LeAU3~mz`}1ES=Y+$~Zz02>deHox zB|rb1ba1_Cv=;AcR5khA-~Kmc(i~UDAngPYe15NwiRebNY z-gNF=IQ)y>X)rn_-kKAv9ATwp z<0d7NIm|GaXC<-3H+T*Bro0X^JkBnsNI*U3iU`9S z1eKzDcx(h#UI^~5(E;dNy(Hv`#`o6_h6@Csga*6GbRL>G*%=iYVaoPUs#h#d#mpCZ zyL^DY4P8~3$0nh^sJp9IUqn>Sp1*;}oO&P^#Vrnq_I@3CuYdg+tBzRCRK-p(+~otR zP6)iTfAUDZJRwZ0n>dmOowVc}(Q`RN?bOx=A*u*=U-D z1|U3Yl|~gOw#wmOj`xu>)0MS)gANjd;jEm^{$;H!_DIC557|zy(a2Jo);3xKJ#v3S z!}a=x(b7OywvSd!af%gh5z_*{x=)UO0&lN)*o4v{=y_#@Lq*v@MwPcGuYP>}h5)lKF^HwWgeE+pQ5# zmpk4HK0O3VDV#0F|Vk5ENWF z8}Tx!pAm0N!P!(m1`yUH7QFTusU66om`_G^$JP}`74-H*uRNEHV%cHhI#!qtaM>yn z*)yCmCyx?qbrFUwq}3b_L>>XkxI689gs8A=F9I+L5!R6J(HWHsnpAP7R3>b#rjS`8 zS90MffzfDVuM9A9D_m&|H4C7$X_!G&m$5xs2ZnAgz9y$)QA|$MMV&WlJ-0Fd`$Vto z0ABh9xKB3jJ4M!|S+q!ds9?>&-$z!fhkBA#F`NLjic}Wz= z005Jqo#)LM(R{%$+Q68uw&i=Gh(hR713Lnt=a)}&x{87>Z1Fp3i^@P-6otkgj!_%Q z#=@A=U~IFu(L- zW1lrrgQXCH7z(}GzRuSh2+y4F4(W#hV~l+SG8E)l9`I7C6&{SDy}Qc(kcg=Gs1)_> zssQANPIx?t?w8GqvL6HX&l#>Y_Z~eg=YRj|;laUO{@~^=Bb4!@V-O>)$B5i<8x0TpCT%ugWY>Kd!WwQkmuQRvAK=f`aJobC_9 zm7((eFnNi~>+oc{IK|zv7cVDsa@@E)8Vq0$MmCPagzieA0?sW^r9Ng+ctsyksnNz= z$~Fi>evJuF-lFG3 zF%;0;jO3S4-5f(O0|_-34rio(0a5BBYSdF==%b}XsfC0bbwHYia)q_+8qg-a)Rut? z;uZ^?W%& z)3mP-u_5>ZWJGZpB#`E+u7Zi7my01@gYX(|f-t&J7A&imz}}R+X)vZc#ck3aL_zb9 z^%WgMRNWF$+AdRc_#HGzVcrflLrP0l=XY>JLH_-0vY1-!eka+M05j_>u4V^zoMHE9 zNm}(jo1J>q=r$3r4@>2DU+ORr{QuG3SEc;_j~;z>r~j>+vW-4B zEWWj*ti+1jr4e4V-RD2k zIWix1-+f(l^9lNJbZk^tEs2iIjUM(lx;Ag3u`>UMv(to(ESIHR^6S9T#Qz`eR_uTK z`;YGIf9v49ZC%6v%lAK9GBA?9_z}V@5$lT&+)j%3?1t9cutAX5#cN3I)5-EntvD73 z%z_M}bQ`=A`~4lIz1UBFP$bOP-vVvXPMKJdNkzSKOX@IXx_9?Lw_WPda`pUwb2orC z@c;e2y~ic_|MBkK{a?}P+W14m*2;E`bk@Is^KyT_m}SYf=gR9@G~7bHb+?5&&_cHq zBins)ZxjpP-PS_aEjYi#>S3k*)}nqWrJdU1x=UnQz5LUE5*QAeD(Viz$EoF=MlSyx z1<#gs9269zL(sK#XklXVJcpx4m?j6~$Ih(LQabX0**JT;Yw(&eP{04f+ipWQfG*$v z5BB!H+AHn6=V2P8@%XFOLMHp-Ggx}qj@n8jr;8+3+^wYwUqrLDCpBHSi@=2TKP8i^Txt)6$0 zLxp+$gf6_0{q|FD;^d;hy405yg?pp6CcH6JOm*T6936aET*VDD%j6%jUAR4K4&TrUGfB39<`QU-FRwjd=2zwZtC3l&7%(8wuhFu4ss}IW!+aOwq+xCk2?6g24gpS+JZNK z|ETR+7{0sIbQ5rrH|9~>OI)e23#cWj#VMm4lzx--w~#I^AH9ckX^r)6A`P29sYLG5 zko4)M_>bRai!2C@3`lL!UH_&~%Rs${n;L-EHRlOo(vFuH^gHk5cDCBg!20L6&tSEJ4+N_f+qC=@wTxnz8jzW1kHvqNc z6WT}3t`Y@!1Gv8(xo*8oI1VG6H8(){`oNZ2UkBNh9M-y<-ljWC6(*A-+q*4p$why~ zFnynecMgT^yf`{}^X&E8KfgM8Oe=;YA;5zRPN)7k4+uim_^zqpVy3opaVYY)Hm&3H!-&U5bh`&{q+hX$oQ z%8lQD=;upxhx*$4LMgwZFNzWwghb{ebK}rPk{`AKdd=G!V-pZtK-Jsv2tD9kxJvI6 z-lN+*Keul%k%03jX)*?E;4C)+l>&h*9xO`xK0aEx}h zPR8Mkw;Y8v`q0apAPdWJdA2tMYq65+{#x#xbOQ}i*{cF7obaHC98iLrCu^^u*}jW5*EE%BX8aYa_ySeRNn42^7@AEt1PdQQoq--OyGaA-kp^WXuqku1@b>> zfxtJ{5>Y6F_%yMej?~eaY+ON~>n@Fwnaj%bstI(LN4p3{<^sZs#I#IXR^ipeBCH*sQga?AcXvbXm~Agu4VuWl`mPOY(v39KUR+Tb5^rqP zlirP7k6EEkLYAi{azoMU9Rtl*Ak9~9E~-{uQJ6Qt_+E7JOQVB3r|Qg+p4TBQtLBr! zT0&|>mhk8`LX$O*yveYPr)zI<^k##RwYIs(wp_Dcd9J4s0ab8nvq#$FVbtWa)|pAg zTWQ3@tUPFaN2J54R9y9z$m^g@9osVc1KhCsxJU=0XnAI%4K)@n+2)3pUjqI~u2uA$ zBI>ST1Y76(fQ=l!%+kBc0`d-j8@ssBky6n5$_t4X@>$|v>gSsFZ}W->&`LVN-FPrP z)m-bx>_Zmq@8Iq|iv_dX4Wg>r79UAo%%`u3@*ew(h?*=Vm$$uk>zeg;wQIP6ucrUo zaR*-`!)7sp=SNsu5^bsqVp9-++J9UFinFx&ejvxe2^sGYvq8g{1)~ ztr~6-tb!A*6J}PZLr9y6D~+=q4ucE-m|XnP`ftV0s#e{jGhA04;~e9HmJIpkj64>M z)wA=dR*oeDqr9Tm4YNc=bh$zgx}p07tFy=pYx||S{I7JQU|sQWZrq)M)$)|WgiM#7 z{Y4)&AV7;bBrt|e2(P!nRy&n3xs~sl&_=zCb|p)OO&mzp-LfaQNk``81cpCIl@zd< zOsCy9Sy)K$L}ZZ*%8WmfJ8+ra$?u>+Am&X?(maUr=G8R) z$0FNu7rPYy7Rei{is%Hjqu}^zlJ9?uP;y7q;fsSVPTiI1<+h zU!LNcE~nu!MC`XQ2t31ZixGMF^S(AM(`$QQ<^P;*G41y$H}Ph+fhPX{sH*>YxOb=j zzqUE|8#aqWSB5!8lptv&+&^gXGgG@M^-pQYw~71vd_87%2(j1o zitCYq2X|E?#FFM4c7@WRq@v{!B>US8Pq`^~Rd}ndt^%f^gZA!tI0unzOfDFDNT#qL z_^S-*hU_}eCF;j$S-h|QBK(BkYxqCHfb;oeb|d`%;lV-q{*T8G4<6s?|8IgU%p&kT z1Jx5&J+^dj(C3Cryf7r9#Y&rmp&+_Px<8p^6TYotr))AdjWlc>vd-&H4Q2=pVA|a! zy4n0h*H9JRnjx0z@+!8$G4>_PhTYf5w3`-L_oW&$q(5)vuaQAMFr|9Vwzp{ytML9B zq^5&GPIvI8!z-E5qnT0LcI9moesc5xy=BF?(pPoU;jPTui-{1bR|DM0kxcFC0Dd~j zZBok)l8ciD*Wkx!b=xf*w1S&K(xFNilm=Ix2*EF3I$GA%4pu_MAXGDlMSqq~vp4xD zn=CvdYbVoq0o{s zp$73mlg^6Z-4hqgsAY#Xxj0iBtDu=Dst0R_!;xy(rPXS?%7SHq+$s~y&cFi$`%dTm z3n>@sxjkT)YPxlOcq6)rP52Y`A@X1MA{&nKbI@{j4X`wv|92nZu)lY(zx(*{!NLCi z-XC`N_8;B(|6RjGM4J38IZo$fOD*6^o(KsfA1<`eoDV0b2`@dAevlBqnCI^^j)m}^L&QV_GGM1S-v0vDH1# z=R5pYz~Abg{&R*0_^S^t&hh)=Tz?kt$2+c}Q;cshP0sRRR&0IQEj~=~_cVQnzw;5k z|9>d@Q;J;viz0dc^cgyYb@T7I8+WUl53+Pe%Hh#g1i!cak*xpM&yJqHc-9>at_9qs z>woXj<40c|l-56PgnW2+{=1gRC^h;Y|JLbr$o$)(jr%Zpae4AzKO~*be{O;5<%e1S zgXHuw`8J=UT)wp>ufPn_ zY=Xr9giHsgI5Q>1WdUtUH+h;BSgK$RsCR)zhFLPoivnx|SvYWLKnD+!i!6PA*&%-i z@-`SO`pi}^k|EyHcQCB2pI^PFYwfZR$>dCi^S65xt9rPz^WnpXuIMBoMBLxndNWH0 z*(ja8!=-(~<|b$J4=L`y9_GcI7$HU*!9^FPGYp4BZ{~Y*+YsdKF+qZnnnXX`U?^B?~A)7M*DyizgT|< z*F)d~ao^Tm6f|cvO+Wt`D01ZqVcp{5z7huZyD|348A#786&V+XmtsrJXc?FtkNXF? z7Opkwou2Rr?>m;YZ!rW81#ZmmuA^N7I}ft z&thw<6* z27=4(4RtaZ>zp%&L8Xw741E(N2xeJ$k@YL|PHZ&kxdqTWy!i$8<|4z&!}_F43<^U- zhMUvW19}gChHMlygY1k`&IA$9!sLF&6`2?wWHS)$XSfN|{J2jpLd<@NRYH0a+DNfsPI+qLWx}Vw zLePaGH2D?nLN17ZV4;t{sC}w6xZNI|7t&#uGdsP`&N0ce%O1h;s}ArfGxLN?0K-xw zgj9b0n)J4}d#DL`gLb{^8he#?ZHr>i5x;ZH5w=t8%d_+nHF`h)t<-LT;|EwfLXV3A zukf+((#b?$F&8`5Yoz9hB<;Fa+?N^(uT1C@Im)!0(ADRY!MxHRr36=b8BG8cwUfW; z7uCLG`ib$9v^T?{tmnW+@sl|-8f;8J^gPE>#WB;ASgwP%-)cZJ%>OC&>U`XDjs2P!cT|X+ z^vb>2{5ODRz~~MqknOQz32U@}O~#(h*nkJnWZ8Hscal0tWTk&FUvfIJ315S+Wk~gI*z)nQ@Ly*-E0?acgxEIWyi4C~v2N=KnX8M(Zd!UJ?(SK_}6NTh;LB){N zfX3+@L>}`Y!!kHx*2V%07fLSE5jL%f$_G}IVMNvAMDVceFF<4V>B=zH3jab`54c|w z+%m$Tz~^4x;za_p1q{=H7k-IvFf;(| zEQMLIXJLzhW~4d-jg|q)512yB&Il|}w%(d(SeAE+u#KtEj5H+4`8O|sC~RS~b}P0X)mTWA(5y-J__E9hHv8#r+XqoFNQ2RohMU_3EmG=p3o ztd{uX<5>y-hGaBKqA$_`Gq;h!&eL;!YMTlNixO~VAB`5!DuQ$4r)>H&T!}k^>STZ& z-&XyEUohCOySv@}uRDY6G*8E!z1_}i7mVG_>=Z)b@dUaMIUq5x;b5r*jQdpN6Aq=h zYYg`J*~`hCZ_ggjds?G3SP3F9=PQ(fgbcN6es&2llofosuucB3$k|O>{)GQxuqF9N zfbj;4DPNQc*$o!1JzRh^Wn^ZXEQV0WW>D^mfbj{(R_PjpEr~7OV0};fUQkuZ*e{XX z>p_n*9;8F)?0N~*fjv)?+q(s;7K8O6v?Jn~n6|H^bRY*S)$3;mB7j9KzdAYE!*1-eZgQ$va)pS2CJ4EJ5VZMR8pkPO%qA$ z?@T<92_>*dnz}uIG1$@N+vu8%-{2Yjt!OylJI*w2TIU4&#Z>klm=TN<1tKY zx0!}p4c22aKVt4=%*vp=mpoFmc#A)nSp|4v2Ex!fGS=6yi|A}Z=CCRgYk85uDjnQP z9@R|7%1uSC=*h&1zCm9pqa}W&qs8uv6ZDWRX;4P6-#;wTe55s}YgPrm%PtGz8;GC7}3 z#%xf14<8>sG2HZ^n+GeqXlcAc#%6<6Dl%hb53XJ`|8$T17z$Wuu|xGJq_;=*Tj)=W z#}=&vp=*&)NW;`477v<+dWzJATsYt}5L|k`Bo}nzaU9|_WsRz!n;8@-aR#R+Y_oEQ z0GZjRIhz>M8MNeQm`o}OJt3L5ZWHAO#M6GTPxKDY9vUFkp1IOfyyHkg zQIi7LP>8&n7nU*D8gbFr#~sgbj>jCfw0Q;SG(T=$MHg4E5y`HiCBwX~QhWnt<2{Oo zLGLEF3gsB8Eyk73X!WamfGC;Sf<{aNo&}uYDy;pi1>l%aTr_Mh4(r8xRNe(&ni4s%eaW5(N0;6H_>8x!ctfq9#`#V_ zDk|#M za6Td0$7$aXsLi5585F@fBmv;c9{~P(GNecn=~=KDV0`AkKPIy~IiW~F?U%G7=0zd{ zbweQNLws)n@)j7!TC19z6=L%*>{~W?4a!$C_`#wqPN%Hk32}}R>q;m9ykv(aPy&n- z;BW;cGZ-sbosQK(ug5&428P{Kf|?0lWAosk*R#CV6jHgan;Z>?lMgsOkl{Ro%WSWJ zyR7K!;E*`znNHhce4Z}_J_E0zJL zI_$FR0nO`mV|Z_Y@d)6M_yf4HkA+0cfU{czoet>mz#$*L>T(ISFfJPYTn%ucKM(dL zfJ<2mds~z@z};rcS2FeBth5TQqQvaDpSMzB#exs~xg1t2wXgC37;oY#H0>J6R|hxP|Kz06f?9nuc^lNI)}#;+C_w?LEupDo5m zg{ZPwOEmKl9`beD#u}i8Ra;=(dggV9`6!=R5C)~b_6#dWLa?F zVS=n@aCYy7gU2rgoZo}6ZJld_9T;!o<3mJbjrH#s!CLt_vDBnHa7g*8(g7UV(cV;Ab%4Knd~Y z0_*Q$GBhWWUz6|TInKuB#Wj)%feyeQt^|0o;Onv~fS<_on*_WGyosxX9mm;9l&gdlHwCe2hZZM1*Giv&9ftjPqiiM4 zRl=I3KGyt6fcGkGECW2sR_eIQ^??q+o488Yc{j>d;Bpq_Dq-CUg4TG@0^`v1&E|{g zGEoIw)i~z+h1US9xheo};wnBYah|eVR`KgM1Jt$zVZ6##0vh)_j`I`(JR+<372Cqo zx*P%Cz*YSI<2;1`kI5>2&DIdLEVwF-M|lbX9+y?F5A;;yC} z*2W+Fb5Ny5#9XM-Fci!!TudHVQ;7v+TAMhtHdQm(rHP9O1C_MnT zMZ<$_YPOcbXhn?UjEwxiRp29lOHqx~+bq;TUJJmXkSGv(kBs|u>L6;y2w7g?DqR@& zF3rx6COrrW()L`U-}$<3xS2hNy`Lf+ACNbqz31DXYyjRQWO=Z3)lx4VxP~0Y$;a;_ z#>sgj;&4THx*d{hvf#h$uz9|Kp(^Bz&45cyc$T8->R?uD0Pvr;X*>Tb zY&_X3EIXM*#fm@AEP&^?)PeN*~l(sgS5a z(OP0b7H*jyTkc_!XIvRzt=NK8c+(JW71D>cfqx;Za4HqVSVPBb+^A)@p}aTbmg-3X z#-2rEbZ$7u>iswwn3N!1`_->RDkk9Cdl_Kd1Dt)IW6)m(aA}NrV-mI0 ze_|umd*1lmS9Y}VkxG?x*94i=fCo!t9bm5Q__r2-Q`U(75&?@Et^jT-T_5ai0H;)V zV1*SY6%Q54Rg_Tw@-;b&b3Jh@^U7}KD*!jCMxGz{Y?A?v8vyv4W>>_P<3nE(6@wOK?l* z#y%?m?%#u1hQ17VSeAk?1ByquM9;rKvbXwpRsbCQoWcx;Z)W5()DNP+tvucz=9I_K zq|yY6*>?|F+FNJ(YB>e~ZWgCM>L`u-SoAM0rHafBVQz!RGd4qHgQFxo*hbldkYiOu zmQk*w_y_8J%(n!ZiUIJZol|MNPNbD)poOO3TSzEKwduS*y-ILt&-)i}hr6T@Z5vj# z0scH}P>y z;SZGbKPDAw%Vl$<@H2(eNlG1QH+ca3W0`UeqjYF;BwWh`v?3lb&XX`w zr>a9yr3v(ZwP97Iq14RdDvtqvoz2^JM%z{y{96!QMZ*O6bpcime0!uQ55i~TY) z+_AOXLBvi7Bvy)uqkcE^X@&h-@LL30#fGyX*wIWY>|>+FwJNZGuM=(+8OIHYY!3YO zg040{0${(H@2dylGJj^vd4j^5Ba?Vbt!O`hsK%>#S*S!%KMuzgp91` z^$q~wKU@*~xVLkiv-7os-;90h0=$IvSk4m`-T;9tYXaDBmU%*m;;d&G=LzdC183ED z0Kb{@gx$wkPn7e76<39=MR{7fHGvh5^Mp{ugDc{kC#=~5o)wD__`OP<65|Yk=tfyj z9p?$_R(e|V0l;tOJRu0TC3uTQ-U|DEq(}9fB?z>f$1l7lZkwwE{ASMMLloybgopZ| zbH9EwNUM|}>{nS&iT3^O<9vs}ho2=v(&=brgXR5-t3cJdEP>z1d3-41e22h~$vl3| z)-bJ5aCO>`@*M*IrbD}El&cd@T;xGRHyz-m;P>cD@F~&$ro+7KhUl~rS3LFt`^|^N zjR$*e4E`0sKiO2s*T#Tn+Hb}_J5#p|{Bp2)ZehkO&$rfZF!YPTZ#K;__>JLTM|#U8 zJktTXicP< z<$y2;p7rf9iPq~v!U9HK`vc%Ni)=plP0>{jP+Q9d&1^Wn>I&HRd`o*iQmTg0D)^g@ zvTDgVEg&r-t(IYb)3H_zeqFSc1%zZ|8w}YLZ)IyGww5EF4M$ut_;oQ?7LXA9TN8Cf zq>H87a@W27*ei=KGD5bC4+8itw=Dtx`XjL1AnZO|Ka^JBUqc+01F|)yeLkn(TY-N) z(O8=pK|AnQh{p4!|5mrH2>90#xkCkZL2-@acsFo5nHtQ+|u<4qNFHM;_>l$)wnG;JEm2-W9W*JTMT~luB+hJ3Cn_q zn~vTx*l(73C=|-`eW7}lk1EKo62s+Y1Y7VbCv_s8x;QS|Xm?|gT((K^3|lvri;1V% zIK<#@BAzP-{u&WoEjbUSRHCnX6`HXRDlXVfK83dCI$uCMQQDWtuGillJz3@Arz;0< zx%ew$ddn_S0=X}HUH{HiRG$Y9l0WMADIluLVU-%-s0*1BLD+yxN*94t7=ATB2$PmA z&}lRuhI#viyTN6oF69lSYmn{-<>q5ki$2U=7waQnub?UC`R~DM29cP-S9@_}!F|k5 z(|(8{SLN5_uNN7H;NPJ7@0>4)smJ<)(e1#vbj7czeCyXOA-9*={KI7S4uP+9J@ytT z;Uz^dw)@?eVjxH(Sqlt+nQYo{wgr@B-Ooi=@A^Q z{s^RJwlgNZ5L6C9e`e&jUf4J+j95qps1fA*A*FIjr_?4Is z=5r$9Vw%F4)A^EPUHXR--W&8S0j+f@Z4Il3d~|n=!2Ss-W{#frvalN4lk%Tv#M-7|u9r@}zYDk|jj0 z2F4=Q%6(nwH$Wdu`tNXgdp_v}Eijv@2Yosza+nwh_*FpHYC%_5`vK5T^Erfve8>FT zDK4_%uvbSgf^}qI0L0&q(MvlVmL8odsmXPqEPVC25>Pka|+sI;ChpKDBfrXhk=<>ZNl4xdD*<#SxD#bTec3HndUXUPY zf@0fNW#m{Oe(jTm&hQrxGQuG-!KT>+(wthKZb*j&N|p9)9mh`Y#4iv*AU_ ziJ-C@U8S{8wDW;G)op#HL8v;q63y-n&jOxpgN{a8p0F|4C0S_y9ZIQqJs!DSK5mC7tYTPd}RljDtK?DJHHR2%1Y0T%$pm2b+Yx#jeL|hc5tAINNaMgsC$XN;39NyS< zh6YfauJ^0a8T@j|AfRcv6;~U&LDY;P6ad4SxJ{9@*b6WdJsD5s?~<8_2br-Oxg7?i9imXHUzxQ&_(N zsuf=oIkj5YArSHSw`lZRSaC)0+7+jTeU(8(*;70Q?)pbTgXqt~B!* zzoHjXVo4Fei)1;&jAnzFtTyoxp=2*m(kW0x?P(r8UL4Pom_gHDc2pJ;%Z;Y3_0*3S0NWg{Xp0@>q8c&~?NU;yVRf|XgSGZ$^S5Ig4fF@7g^ z;7Q()RO!VcX1Hu>wOv51wLdtc^TpF0a2LR-V};#A*QHgO!=O9FrGPS$%n@`ux2r-jSahMhxA zp(18pcBo9_5Ey=ph%HLZBE6$ot9dZ8)57%!Vf7{vIPPAf!J7n8BJ;6Sj(m4Ne8jCr z@LNfDgw!4a&?mlIxG9cH>wIdRU^Vw^wY|N){Z@WlyPc)Y;NwC7RG0f>Ekpc@gSEz^ zW0crMpcQC>ZEQc**eJ1vU++tUb_?NX!09padEWb%jJ;%8VP80Qy2*aP7en#pfsW~TaaNwfdN568h|X~prexXo~l1QK$1pOJ`N1Z}f06AG?&wwKOs4-o&4?B%Q7XB!zW z4a4Z*nn=z-ybB-ND5pL+@3il;M|@{)93n1x7kI~MXcmV|nK?||1KGu|FsfXP#z~-P z{|H`({C)-{=a^w+P!;YXftY2sokI}MRp|c}d)CZ$)4qXuN$+W=#`$%c#G?E3ZXi2I zZq}GcAjz{kuiqkxB&BOzE0a%#9V7v8y;$sh&JE2fmbO3c_C^g*eyfdZCzvyWMT1gD zgF!Fn$$@a417N$^gJkMUgWmalY{uU z{0v+pZyHSrSBs9%mE*z3#%QoIMi$5rjJn@$SaIwI^F$g+OU!Ls@EV&q=pTV_k^~as zH&32;u6 zp5!_VMg-B3mL$h!gL#vVyN6)EKyu}rz^*5naic1xbFEg^?|d{8mL7;6T`-ooaKTb( zcE&TRaN}FbmE3a@+JR)dNBu4?3~FIeoi*qb*m;0U208ESY}`ZcR%^f8!w*=V6oJRg zxR2kHZj)JSEXqa%`$IfofHkRjO$pp51?Y=Vi#!Q4vRsw4V{dPZP`RSqL?^kQTVPBEo(J8y#!wuju|Z zWVD8Xbmgw?<<=O--%ofDSEE#wToo)#Tn$|Q;9c&MB0Ak01n0);)zaUjEk{( zvQIgziE(hD7iN#V)V{9xzR7CQezXfVpd7G4l*!qmz>@X=YXVr*gdAYLAd}&4wSefn z6Y_IOUg7MZ0a*iGNU1JxiygbnQV?bv*h@@@gomhrjK_t|py=$l1*M7H z1nZ~96AiRfQh_1>OJRhNyxd2TkO((Zl@v~MU%I1))Q0&r$O7HOQ{31_aJtBCo7qkm zW&m-Mrjr?QK1b2Ghk~(2kjdd(bfqg1QH0%rE>FFw=A&)UY7i&UhV*M3l^U;D0>WU^ zNAcI!A*FO9+w6U8o#*n5n3%I${_(E)W9SpVhk3omgNQi|1JOlwzJNGnV5 z9UBp)D#SVx)d^0ww^jBMoDl7%v4p)-o@)GGQr&65y$g#N+2zGixJJ4~&bW{@#ri=9 zGIGT#*6^}ByE(m~DdhReUBCHTrUy3I8>sC@#@O+P_}#^ts#EQApj`LBpNly{VL?_0 zKZzLwFh(nHUVc01!8Vs`)fEkUpjKEl?!QpG2ZNrDhtR}2fQIo3T>QoZaBx0Aqz!OI zuUzj)e+owyksB9jvxA@r{tEhN-a*G7(_eB7tD3JJjVT;C=;XT*z_U2zVe99WQm^Ij zJTl-27@?1q0XOZ4QaDPsi$gVhk+4T>CmCCZ zwy?Sq64qto+36$dLUEZDcr{U}70FZC7$Yio-Yt65-KVlX3z=KJc5CpF%V@+I&G3so9IDv3-(O`!TFSOC+ z2x7d4=J6gs;MAl^pU~BYaDLEQKInDeL?QqWMm--P4HFnyWOYEHxersfS%h{Gj*tKG zV;y@6?vnpl-{2RFAXCQIAM44weysocYh9WA4>S~BrYGhMmEgMZgIBUhkXsJ|fr=G? z{}W+KNs_Uwg>8aZE4*;AO9MqtD!}fV+(l8;pP54ZSe>$_SIJPKg-IiYppYrI=BPuP zkyEfMXT|%?`2d`KK#?B>-KuE2+KkU;p)A$z?Rbg2U+~_+Gle_z80l zu3XG7oq|vOU4roiyKyFE1`dx8QP!-$hJd{ea%?xKjjr7C**8Fk_&>R-iFVy4ZB9^B z@K8ksfeJBqN-w42w!~rdesl~H;CY|>i@uWnmGng?_DMv)n9IcAL8kw6(2q;5uoz{t za&XrUtXEjFxWm<5s`|Ds7G8+Ey0yT?fvSXxzFg2AHxIMjG*r;*qG;jx1FT}=Avlvf z=Xy#HqfSXp9ZY?MhVZa5TZOFi$mNJaB74nZ4G~ z0S5){eN>aho!p=ixS3(jaH8(?2WvC0kHE!^5i%#0F=>=SVb@8c6ix=>)0rBtc=zVj ztKIjnH+Ob^c=hJ#-&JbxVkpW$+67{VaU;k3`KUXB=`FJar#>blVfPcd5qjR!G55HNJsFZA zDk9}aGS{@F4P;Gy9Iz&60#MZ$V*m}n3T!Z?!ow=*(Cm2F84QnFeK^J7%febL00c!qm=F}80QkMu@ZcCqgm@E^elZv{PzHa*NB0rNxI`8< zuy}gnTRm)-`H_;r;Y-4zL>!B#Hwew&R3eN4Au^q-3ta`3pxQq!q6jx;S=@8Bf8yr zwXU70KZKFg&fXC(;}k9=OpU1^SVGC%uqmrlfSRLR?mp-ZcXKn^p&L&=B5oazTyVdX zJ*yKCD`S*EEd}$?(SB(`NjKI84zUHLy7Tjxw|&Fyf$ezGI{5IhHG~$9`M_}QO1uFv z6OE@FjX{2}4m)iT`OYK3%=U!KV29jnQt!2gO3eIL177B-ddo={lFk^Q*!ALj7rhWf zd)6J`+@JN)l!%xtdOg*pM0|-qO4$%dmOZvIR4Mr|K6T6AW3xIgD4lKv23yrtDjd#d zYY`0zGkPrbogT>;pPa@GZ+Fg`^0NfSfI%x@G$*b83V^ZF>8k^xjaQn-qmv%+oh;=I z&N3>;fBkj~+Ai1!;77(UM5k;_Ldl%k6i|Sz%aR$()z0>es}-t=hMR(Wt?o&}J%pt~ zDIlp6E6iyL(#YXApgC*K6o&`}1d(7*mb*78E`8foHeN_lnnazuj>qqw{nz%+?)J;S zyaxm?DqstJB=S{OYUmm^Xxc_7GR=_I%qxK4(A)*xY;gDLP1RT)cJ><;*zaD!@J49i z1jQn=wFmdJ_0{#&`>K_mt-DW~s@(}pE4bl#7(XIs!i~uH79-_nT}5w8FTv!GtGJx>OI)pVXC*c%mXPTZ_!t5(xveh7qZC0~ka) z7{J*S$vp&F4?(aJ$Az~P9RWbn$=L(ZAG8-3M&?e;L3;CAnxL$T?r(U>rJU5ILoW(GK8lTjhsMX$;uY$vq*=w_%60J?8Sds?yn#Q=B^ zocY*#92%vrwlEup<^eyO8r4aR*JQge7vpcT{jeosm>g!*_r^S^{kLFFL8eV8$-L;E-J|1C_J;#}{4=R;Ju{=QRP@!w>gChz-R)N|@$K7e^X1>N z@3vn)y`Od9jH9di6VMV=fC(B^XTEj-|=7k zDgV3r&4X`#xAt&-^${Aaudn@Xb?uw=hikvfRxdH;!mq-u+8Sa?3%@X>p{^}1Zc4M? z+RrfXp~;%cSfi0kZ5smSwJA5>BDo-klYck{6Ez)vY}utI{2t?~yU%uZ<%dk?zme4+ z)bRtHhEKcsaC}N@`N_C_)PZ}3FTxK5ao}UX5u`_n`Oqe}#n{zGwvJzx8h3rEanmY} z9K0~E%mUIXt}MU1VVR{h(&NM{H9L&K2j(2GMb1SxqkmED5RRk?xgyG!WEjqUeW3;< zjT!X6w)$wjME_r}t*zhC|3bjMWTO~mle7i;kB(5CYy+3urD98w~n&8U|pQRjuRuxuRpgyI^4*+&RcBhvPfjy*H`%x6w>HwpiM zvry?#{ZotrXc(%sH*3f6A8%wca_B{UofVa+b-@l|7d90$%s=*7CL0c+!OxLfs%clp)Fmty)iQO>Pq#VgF`gpYk8AB z)U!M7E;5Mf(4Oj9{o7Zs@VCkyJnF#{l+~YXzQK=AHvf11?%k|`naFhh@sEa%ICliT z!r(1(5j*XtV_K*?SXO(?W4_x*I4tWwKa(VODmqZI8WQp437&%vzw-~y{~R2n)EON` zXcn@3*%~X@$gu7vK4chD-{9ZOhv0E|D%8o(x=im{n{!nNj=`bS)W^l1I1Y!vvD%-< z%vmUmli?YZN3dr5`D2GZNN8=?4}Cyblr|D(Uwhg{F0_yA;PdXIj=;IQJX(AkiECa5 zeV2&^3h>51(_YTly(tv9$i{^|BejFVlpa3|CDw&*6+dxuuJd!&l!F*HrlBjBxgghQ zCLMO*+2;|t*sJ2pGQH{>g*dFl8}~g#5wzc7fbvQO!E}U7kzL@ zvF4Z)NTPNoDj_SUcK^}XwnB|8x`ATkCE=v8uBPz!0A7RQ zU2t1?_L;(yt*tt5jriqV9>nDGq^A%e@^s}wCx1>t=<9rOx{2xgT{2DRoJ zJ(k~m=`AOP7BXxG@TQ!9N6{IhpFA3IJjmmDnZ@PU)lp+Xs)g;jR%(o5kKj*mA|b7- zrpDz3Y(tZ|%2bz-(j{r`Pbm!$5(^O=)TB$2+XT+5{JH?CVJ?diT4~5hB3qKg79lVn zle`#WD%J6?jGC@T)u0Hw5=|qKg$0%1Qi_Ijx|p7E9Y$X=#vJ?q08xLh7;s?^z#RMk zn{U=j{=fBA1b}Yr|74p!+s6yo|IA21%p6hG!XYpX6HGPG3z$TCZ}B4~T4U|5)gJW* z`^Ymljid6RC{fl&+OXI~p%u9twIAVnr03+jYq-s=6ytg1$Hr*-SUy0q1~W*a4epw; zm?s#8BVK+4nfE-?gu4SB69+MdUBsO5*#mcSh69`kDKr)v-IiI>RmI{30v1{_kte^|i(-cz;>jo{k4iwO6=Gk=|Kdhg>;Z<4L@=IG%vjVO(yT zG`l3M?699Ld@(55JFn#|-r~Ue=F+f&x^LAW7K4{f6mGEQhd^48ahTl=)87Xy_yy*kW_Ig|@RD0yLFAgw*TcBBI1$*&ZTlKjmDA(g&youIO% zFTkm;97v#HI;9r`khh)!92alaEcn4-c$FR{+%2?PpozQeCMCvoAS{+qG4Q`#no_FR z=>oV=^;(h5nD%f3wL~wN4>eZ~7!cv-f^j=XMJnDiydYIcp6nEoszA8Q%X=LVX_SLu z_rE2@ILh$_5O3D&MFhMID7W4`U6d^y4BH7qiBtGo1dF|jD$`P+oAPFyvLjupucruz zQ!z+#^)iBT#zN1lSl&Mp-5xX_{IOZv%urZ}hr2xA*6Q3AT7N`wCHkPlu}2;)z;Q!A zTg$$1kU$Os6s;R}V^)onzCz7UOFPqY1EEZ5R{saeS7ceJN@v9$bXu5hGB#$@;vt&O zCy7en5h?(*a5ngm(Mw8^f`ygL0p=JI0ky=J^b8RsHVf&MzUs6sP7nM$7KV{{wn&cG zw=xvtz)|2sRu3UWZQz->Mn z?BGNMAYmZ|B9)*|nJ!aoTijhR$N!@>d@k(y(Rs}A|2$YjvVS4}W9^#~dvvVyRIU%9CtfuS=EoItKUN^8L8VLs$r3au?Qo#(dse*L5cdcp zP+gp{MQX(}7LHn7WfZIIJ&28ROOj~v{()YnDP0C%vxLDAT0+c*)w`ObLDOfo3a><6 zYchuqj8oyuVYkymIs;zO^Og;?jZTS)8LoY<8Nv`>c!q0Xh7-Ut;`P(ozA6Nb8LdoV zl>#1abWOgQSnsNY7t^+@Yj=wQXR2Cn{D7wqsD%$C`JzFTNPEbU)nW{eC!6(XXcO7v z$o_%Ih^;Anu+wr_TevuLk#Ji4kVmk=qZRe=`QxDf6;Iv|QG`Gspr@uSG>PH1G(e6| zalF^?TCs4hKANuc%>yZ!vbq7w(94M|KjAj?jA6UDEvvG27+cCu%0-n@FFrJ;o$;U_#}X*D@ed@x3q6rkYyhT0C4 zk%~VQL6nw$m|mrtyHjtcgFZI@UExPeLnF`0-!GZQB+LPYMR1r;?uHn4(pn^_VT^O_ zI3N?MXjlQCxvMM9;rn`4Eku+g@*-S{?+pF^xYjO{H32B33>`v|pyj{B=eB=|))}bjoR(l8fT@3p=b@;i}IlIF3)rh`^vpyimSD3!vxkM_9X51b4~=1xkFpj6E~A_anJw|lqtTBsH2&Vgt_H_C2uI{dbMwG!ln~5+Y=z@ zFv7zq&;bQpjCofHb6o)cJ2Z1)GD9`zCr|{{VOT;uZJN(O@(Iega#S~4WkC*JF`xNJ zt`J3Pq*z75ll^mn1iVF&>e4N;Ep_|qt-!Lta}!JmXlvuAh)_)+6FCq#N~QZ{EDF$9 z;V8dCFs4Se&C1TdxWVPv6VZwQyB0FL_T~JBi1j*DvTGekl3{Cdxw?vDlLYn^03OO%U9C zz3m|>!~~=3chYV!oMZV#j1riml|rHF2mh!&wQu%}w@Sok_RScmi=rBQeJwo-)<7-bYlI@&bt7($@}78{XzP!Hv1H-5;UqCV%R)n>(nc(;u@0-(4@G zV}mw%&f1F|;}lOH1;(s~YP>&8O71|-1t2Kq!3x#gi9?tg7Gh1UK46Ht9i3s>)%lbr zSL)~7WxUG-rWdqwg&}iwf|ol^)kn*lL$M>OXbgT+6i*0=Lf#*iA{i_39XVJef;>9h zu~@9`4$gMnjt#SV4BLkEt-F)0s&WK0F0KP4JFiDKlOL!X*jH|3;koRrtvT31)|g5n zH@)zJAQN6-T_ViWq4$Ri0KQe78W1_{z4SWo*`iMv#9$p>g1?PBpL zIE9D{nMb%_X#vLIG1k}C3zou}%|qGA*L>2L*`10cjXy1&6%=vTvQp)$g%|)12t%-0 z69tY>#o?F2Rbi>_mtyXK(Gl)x$#da0mxAZEQf26ENw`kF?u!C^uLQio0r9z@8xk`P zI>Qml`}E=88Y1Lj6-$$1Ns^eb`+P_4UF@`Ld_k^XWCeSsK(pc`f<44|0}S{yOiqb# z=Da8@)*4Dj3wHy_ZD1${Dx4Cvul1U<4sVBu-Gl(@R4K~7U+6*@?n1G*XzL|CVXw{X zDR?fNPsbrO4`CjyRgJ3HKsG>Db==AkpUs-d9ANb%U?(8#8XLXbV2DWPVz0?v2yAXl zjl^_?ITP2Sm|fE#bM${Z8+$4Bf1duo#$s8W!G`{!qPn6{V)+vKe6<-t3-^x2?FSA8w$dNarsLtqa7R|32o`=7`EAAJ32 zBL2IElAt&Bf3JA2tGZi)|D*O16O*XX5-r@DA*^<~!$2{Je@8PY9)-l2=#f;32m7SXpj3EejX#24i+D>piWO^vCY~KNT?#nL-N|y#RwBt|C&g-1vn9E4 zP%f{zW1xD9%IRneb3`uF#Z@HROlGz#w1^ILE1{Gr{D6do!k+1PUAO^}nI&+Sj zW;@M{;H>f{8}d`!YuFBMruf5vE50>T8!UuOJQnOd-4@`$ndM+FY_pk5Y%?(j%rk&P zdnsxeK0@)=r6*X%Lf1=PazAw3h57`A_>7#IQpsMRtm#OoUP|7G^iPS4(@kk^ao&u} zOAyoIc7pPRIw(B0v%EaMj^Cw?B26^5%Z<~(-^{##8FTWjB(WrehFMQw>+DtwVbE-{ z3E(MCh-;XbOa2&JT{$lnUpHWygHJq62)KSw>kQs=E+~C_$O91 zJN~z}{_w$rvi*M(|GOq^SVI2`;Xg$RWAZcn^^QEs0F708KuuG!A}37Uj>R@)sZxH9 zl&uON*y+i6_-M3R8Q?2*)ySTG!qEb#F*^Q=<;gd)S8`iKtEblyaW*vFsU>2v(eE6uJ^pv)hxVCOArdRH$LCVh!F zZZJS%^SdM0KXX)_gaH4pIH4H`t+;WIp};KrBXiWFqdB?KD^z`^wl=FS{w-g>j*MqTHRp*q~r(*OrJMEeQR&;=P5$uPJacydL!QP216YNnaS| zCTVXT3=EOS7`f8-xz33>Yo=eS8p`KXj{5SZG)*TOMJ_0}FrF)+s(6<94vvD(%pdSA zMZ1tYPz1+fTu0t#NmQb_1``Uo1I0dp_|I-w3#)_r^%!~dH^dozikOwfSK%2JImI zhFvgjc(djBdaK7P!EvjtI9+gO+5*ae_m{)<&G2dd)O%WZxK3{4#c>M1-;;?DyjB*Vq?namaupz^@sxUIs}eEr^8{;G1LCl`^#HQtxzWkqMpaZ3OTgP#JHOV|u(jJ8z=kAfA6a z9Gno71K$Ap(*s*V_k?JpiZ*Rn;UzN%mKGcuGe}1UEA@fXHOw4cx92IUPDr9$Z!q93 z({+|#t1FX0uRiI5(?g0bGfRwi$la#gcWrBeq`94Rj9-J;vP_Hgae7YyMLjE{2_I6pXCYm81Z>zN84t(m`@EGb+`u4!4FrXX z+>wgH)Le91r#N+`sYZ@W#E|4@fMDOm67Z`_%?Lg(W*}g!`!s-A{xOwnz7kVOWaj0m zJY!XcDk3>EX@NU2s%tV#u1Yl@8OHeqU|6gRfR)Q}rir(8BC(4yx6nh<&E?A;1U#9T zOC>!EGYwSMH0hI220;yBL72%(?!e540J=lrwb?s8Ze?pY)gfPB;@0#bF2X}Sg*!;} znr!up)eb=XC>_C}2gOl(%@4ZWNX^EBr@g=;J#REP1sBBgE^6nX=xY7PAAcZQbDgCA zpA1=HDOh0kb$l?pbLR@b&*f~PK_d2AAh`S(VLhacSCO76wIuav>u_)ah!fMB(IsqA z^eCjA1VG}%Qy_4!)0V0$drBt6yr>o}C|#Yi?qHnz1*+f%#?D+<7IPq9L{rw5LE30P zkP-U9_2Vz5oZ7(9gRVqb@zpw{O#od^MEV9t>8MD=t-MJHq>Fi;{~y)}jI_%Y0GRFn zUtL>!^l;Muf0O@lEfIhv{Qt`BGEv!}aZyO^D0*a)G7g?Np}EtVCH|oLO3w4srp$!$ zEC(?0Jz($VI#_8lX%LT1<3by-;b}PorJ+PRi-utjWR7GfY3GpIGC_UlyoYWZJsur= zH0&wr&Ocru2WFt^ht(E(Ld&(k(E*fyYY z{|FOK=OH5}GIrj(D%8ucGg+NT)1cp`OS=}tz8Pd82+ZOC@uc!4ascP>|J6sUrTo8j z+!y}nCjW1t4ZLKdrT9Nw(?}(2sp}4M0Ovl8a81m3JEcqtO?%pqYGO z%D}Gip}5_!&j2Iz!?Hg~UzPE82haLvZ(BofDp8y|pLnL1r$|BvzyA98_8s00b1>?` z3ovrfG{j(64=zug*t{$JyV36`$5RZnD3071WIyGDKK10s^?!)_x5=(#YT#ff(F}J- zEAKpdm^B*h&H=7JWvQd`^{-FHT^zdY^v}S^aX6jJ*8G*0p2wZR!7E{*4ocsUlkK0rO<%9DO`MgC(+jFF`;XqdspDX>lUw%}<{!BLK14ji zMW*iH+?@kLzNI{^3}u9XDID^XVb9t7IMFc|&iUV#U89S-xPz#~83NhTPRN>#%qWo` zrI69+)&5U>5Q8GJ1}&P(X11GQqzH5fd`25?$9~d1c3T3>0#~v=%r~Ugg zItSy`ton-J9B<(+lf8Q@54%HV^rH zrA_gqu*pEuYUwdS9wzZH3(As2yLG{k)_Vuk6LfH|fh-8#1=U^C{VsnF2t%C3kmO25 z3|UlNxM}w<27x*HKN3rE{qfP|62Rx!|7&Zj6aL>v->lx~|JQ+POX&YUC@y5J{lR#I z{1&FL4F|m*|3!7sS!#bkBWu6gLe^E4u`gv8S*evwZJNeW*&ErsuxW}Y7}$3xG~_&5 z-BX=0WUOk6K&xcJHcQo8d0r23(QhHEp=J(${IUJV+rQ+;t@W=TZPc|k>8#HuwvN`SsVk_cK#AFu2t|hjJ@7L>SbRG?*59x_ zYGB5BBWW2_3Wv1oB-eqbH)DJr2+ZdH&_OLJBVT!cfDjJwW7t|Y8aRjlf4#a^zW;aq z!TL@7@AGVUsn;d=|64_dP6iWO5dpl)DkHhYuZM`qSmi#BnHj+ea(UiRM6uA}wEPE} z_w)%_0%c6%Rd22PgNnuem^YVZaC#1GEcr)E$2=~YpBon|2AM99!L&}>b6~@)gX1G_ zm@5nB0m=m09Px27GZh)LmOTycv!`nBZD`W+R2jJ_a49y;gefMf z)8H=KGGU>RI^04XY1>i{?fvg@l~;-vZa~W1mV}r4upmS!^hW5bhIgVxo}57Qg(akjW^1TOPQ6x7h4G( zhQ#0?$`DP;NC^-zZXP=r%$fj`f~*P&Degv zP!;1^*0*5!3H1cU`9fBokMVhIz6y{=Ia0P(DhxG$-xQe^?i(+uZzV&^6^i+NSM8j4x$|3cF)RWjPit%=H;F+*XgEXsUHejF0RyvEc?68IX%*DZP4HfIuS0JN*=*B zVJ9|f0%-4t=B(sf6#HPo5dJ*({V8*Bf|>h1Py2LH8xD5Pl&;{oF1MKM)y%5jnn_&V zrKx6dq2YAX*l+|6@xdq%`PEp*g&7c0f`zPs-V{;FUKmZBnHf>mPJpvdgceXa zVCsyf5Y=?-n733z3`oEN59U4b&N$h>ujxxbYrB_BAqaA%HpTLFiQIC2*3rovRch&#cR z6JNwEhk~~;%_nFp)q6cHR6VVi1t1@Ej0y*ick2s^-4*P8ZeZHYb-Qm;Wo`*rr>_N{ znY$Vh^Wn+O$^66%IB#BjK5|P0iE9iOqqC%=B)BrM&1E`GVVJ&mMU>=@InwVEI~&#P z&*+l-e7hQ_9|^9Kb2@zrcq$xm>F*UgaxIp>4zi5@2l*+3tM~sP`)hq7{*Ob!H~C-J zfd&`y{}2(LKKTO@rT%>NP9VcFHwAcVSXF5Ysk^RxT?qg$($%UqP~{2IX$RH(V!ohs zs4LI`tCaFd@J3#?h(pNUH?^oje=E6ATt%sJ2o?JZDpnzjfFsOs;}pAy47%7yDn#c` zB$b_|)yT48R~PK}qBDoy0(Y)Vx=zf1_I`qWUhHFGhY+hs@x)KbDaFcA{CV#9KGD+q zPJDCUH@k61a_&o=n8m8k=JvFSeSQ`*MuOU!L{H1kHFHDB&&jK0({vesNji%vmkDX* z^LfA22ay2cN-{;txH{c2kzlTK*(#5O(%kbKSnTg*`My zC`Aq0t*+0IG*vLEuE5SI(OX=@oUQivNKmHL6&!2n?-g#Ei@2au+hPe4NMDNv@#ol@ zD|Cq#F{`+QIXgqXSaxZK-Ab*w*iT{Ss@_j0kWy{M&R&P-<+m}$68^u_K|bPT)Iax@ zOaYsH{s-m%9+dOH*1x{V|GFOhxQPGnwIagK|9XDq-oFIL-0t||4!}4kJql2D1Sqz- zoCh%Wa}|1Mh5s)O5DvWnz>$kC-~vp#FfU;7Ttvxb=6X_v->ssj%5y;OEqfg}Q0liT zo8*(dQX%kPbAPoo&B-y6oQN6TCGQtp(V)T6%=eWhS{27L8~@IiIZotR95G#_60%dR z=8BD&ZaPN@1=?_oo03knpvhb@3nFsJ>bY&Gmuw`RBJ=UZ-b;5e?q2ZGq-VO)A<9m> z8Ng0nbF`f8wh7ymyy?if&<+e^@=?WZOitC#%PqYl$^62~l*=gmzO+-LIE#XtWyZlU z)YR(Y^qSZE?vUbn)2;k@iD>fSxdvFlsH{kEa;IN^6&(3#1)9Pcb zQVEMk+Hen@&$f*_d2zRI-)8NnSW8TN>dDgCD~OVWfpI(NHb*c7a5CxU9JA#czw^z!e-0oCIMB_#X0H@SxcI{^GXW)=hXTRW%plhEox= zJn1U8wlQP^w=z?L54?oco${V0>ne`r?ykF8VL5S?6UN}wF_!JP!dkqDAHYORmMn2O zwqYFvo^f$hag{4VZ|Y8^4y{?6WpTXN(cq-!@y9(#HftSouFHAz`W|E=m zNOG3aSRB-~lAtXXP&9pMvZe6=lOL(qSD9;oA#kN~#8lR#pD)2lQe|~XJg!uCKco9M zg5>jHapig<4qukLB`S2d@+tOSKqKyL%IO@dT_Kv&)fLGa4+W&7otk35B&v`>B;7b^ z{L?eN$~DDJtwfoK#wkdGTcs+XkPI`?u41#V!3DU4{}&Z+x)&-0INSfb`sl&xdinhC z!<+hl*Rc5*LL^Rh);-xk*`pCEzVIhe$&%ymrK7%y@%*fN+J67cq)7Q+M z5iE^DPNh8j4i%NAR%a@70>Oa`Nu_OcrBOWM@){&NA?opF2Sw>&H#T1M+Ut1Z_0AiUBU*%cY zD|TC&uSTFa(;Cngf+0y#_HsE{)qO;vRH}Wtmo+(h7`WYvpc3UWdHFvTYGqS;hhNmblOqvxCH)J_D3MOgiabb zQ!i>Scgn11I`Er!Gtq6FKN6^^g2DmbQu&a=ljVLFksamPR*8+cI8geO;Zt54E@_?4 z^sdKF`d1xg3Hv`4YfIe-Hpl*d2nRsP{$KxQ_2&HNwV>QZ^1oDoWpojUKc2DWlhe_8 zsy%?Y7`}9bt85MWhpVep+>K#MC$Jyk^!JNjm~ zt8)Q%AqwR}BcubE9wKTgu5Zw;kP=|OLkcv{aetxb1_+l39?QB6W`$}RxC9s%!yRGF zSEtN(YV~GR)2<#jxh{RBSXx^K4!V8Oy&?#aF131mZDwn;NyCfPDy%IGQJBORy*U0OB+~ z3ujWpdh5PEziezKwa=%BQZ?temb%wUaJrt$016k(B?aKqkELk%4bkAdoKWp&BAQZ} z`w5l+GUa*w%>eaL(7ycOMq6rAZV1r1rE-8%1-gJ3D4j9?w)AT3<6H@64U zl(xo0+_S3}{&C&mwr_1>E-U98h5HoUwGl>|Oq*1w2|kst#pK&lrV(o=X1lS_`hm_= z<&?9Qx_I*u%^R2pF-jIlyKFyR1UyJgGL8|os=5eQY z(mm?qj*Baq#vJ{B_3Lu|m$k1StUtJ^|8gb3pVP^$>~(8Ihre9rtD8>0N&?7+kj5Ju_W1{tkLU z2mQ{GcwxvAd94Q+zT7_fKkhcOSAAT6HR#KmoKnV1RB^pnr&()0-Fd&m(!1zlORkQ{ z-fr#S;)`KkYaVq+EAqEazt-IU?;(D`-~Pk#5xyTE`PclczY;sz$NI*n68g+*_nP^~ zQ~bT(`hdSjC;0q-(G++7b_e4;+kW~C1LD@LpHQN<)DDoHuUc(0d-qpJgRJoL-|rlcMyL74$_gb#Ge2Hw4-WE`(crX;e|p-(d-28Q zE4SF@Zbt9-(Qq(6B}>uUU$WldsC$6a^wGx-8IDav--9#Z7WmEcKR)u!8K;jFWgGY$cRMo&BD9iuwNASh;N8g1)@mPcdEf>B+CS_by=a}*PCBC& zO-@?rI$_)hST+OsAa5Rydp&-#c(%WFaxOaFMpnme^aj}vgJG{-*GT|O+x=0v2(VTY zCM^p@^4Z`FOp13#8_Y-0IC&c~*9HDcCwlkbI6G)z13I{8P;VV<<38Np;1uloW2T_8 zD+mP}CiefX3kk@9K4irZyXSaiucreD$A9-7E=k8lLV4_s*QWCq*$zb+0KPXk=cR@c zU)ApAD@+N%;zSWuu0w>&KZo4Nn<}Q{se<7c#T@pDW1|jQv>{}m$(_;~dFRvQY=(VY z;+SWz11z&QV_XD%(kV`nmo7HDgB7UnPriHo29oQk%rfIQzdxuo9t|M;Fxc+%o$R19 z9CZ&_db^X~C*B{C3>Y!Ssb-=kzmi1G2jd~HSMnmgk2f8X$30Xq~mXJ$YvC5+h~9 zkrqm_L%^M4O?ck!9v)&+*chncbo}U4Z)KZ@xEQ)S0*TgYzi?I*<3K_{64t`dHGBE@ z7l1B57TRCqG;%P-lpPp5j_=63%Sw~@~K^GofQ#_T`yo=DMrmY)@&i*$R+q z_Mta4^TFY04`&93(6J4BYbXDTo400Ps_2 zmi-ZyuaF65{O!F2kW*m7nJ7ronFvfi-%LRFP>&D&od_D6VpWZ%fTH3i0w(7oBI)!vd2_nk+R>M6gF4Xkgfi(nJ@XXWBzD zi}@dynkQs9>Tu+xIdYIi=jlDnpIE8IQu7?bw62)s67ziXWOEB1g&bPZ0kq@NWM+I_ zYMuwfcJ(wDhUeCsr_0PPPJF3(o(`ZE;-Z>hZR~kRi|#w56|}YSu!|&b?aX_a^Y2pg zgc|*EF#Pa(a=MGo(|TBLj$Fc0^K?fRHRmF~_jy7;MzMJwT_wX8>qyjWXY}CR51J4D zI0twYkE`b?BLCTNIH1lxPj`;Jes(Dzhq~OSDsAC%J@V~k^o=S#V2Y?W@oDBNPlY)Gldh z!tDSp8P`^CA7*W+`<_RQ7o@Q7nD^%lQhmIw<93i00&!-c{^>p-?4qwNC z;JSeetku3tQJE|~I0>e%e(nBr$gCq#$zkIi=ywtlMlz3wEqr2D7hk?tHOV!243-3$ zZ9dNY>KB>14V*)$Ha?F<6fy6D45Qe|lHab6+blV;>CNY{ts*12K5i7p{Ssrw(J%C= zK#*oiG6)J*u{XIDzhr;u^gBaw9ynjx*0(Zy3km}H5;zgMpxCE>9b9bl^|r@jCi|WB zOJGg781t-q(0M5lk^13aAlYH^+iQfN@om2~tQ#}tknenI&KBt@g?j~^6rK3$>!%xk zAMW^-_0_e1TUq^PW$jNZFs~ZP+j*ngZ>%-e9>L%5M>z8H{;2gSxWHkW7P*`O)$I#} z*pGK{sy0IR7kKo$eT2~Mu#_1XlsIgNV05PkFMEi!!{vVIKLF~4Yw6ORe%=nMFx7!- z8@<&-B?CsxC_*ZM3WTJ$Bs9EI;QosMmGP+;8oGOfoK-~0_H1V>fEw%ozSXdZACxVS znVqm9oiOZ`BjW<7n?}F%?i{#HEyU#Xn!X;$KlyQrT4t1%H1vsoUd$}_Zo0tp);><1 z%~J&~DEzDB_dEf4LGe`eT1t$F5@MZAJMyDqBPg(mL);r=`a&6M{9Z*t6+=lPk%ITa z%3}3UV&+l8%-pvN?wPK*V&`UUTov>emh@FGr|=}N%$v$F75<2&(o&-MkO|RSA!Z@Zov(^BB$nNWC_L-!hF!XYXQz z$+Y)$Wm|MU=%P-M9I%WsQaza9ikWP)sr*v-LaJCY!mWt*PxSXJ0%$ajB^1TFpK zJbX+oXmO!D`vkWJlr$wP_))w=ooS+VM`ubp*$aKH0v{45_O!Fa=Fv%Hcs3hk=`1b< zK26`i-vnTNPu)yk1V-tSN}p5k1#fUuRY7w3fPDr&Z<9fs09A)w555&4g!6-%cPu~ZU2JD*xzLWq<+xk!P)s$@} zgv0wfTMlzci^}3JBXofbbA-se=l=Sl?Y#Zf{98a~-IC@3t#UE~#pEpY%G*rIXL29$JZf3lD3ud!!T&~w5J2j#-T;=2aEa5YHhQzY+=k< zxZ%~{)-t`Mc!>H0XPRlHp$0`ohC-MMEsubbID1HD*g{Ao#0(-rN~>yA6r?GH3-4Q} zaM@Ae(h>6G!5D5lodQjpIL!T!-E~NJ3LK~~g6w4x9^v^O5)>gdX=fSMen0BY_c1KN z_AbEnuME*=qor*BXGFOx>pI1~ii44RvKvo$q6)t9vo3TD$zjLgz^>$5Q?to5kls?2 zVR3Df^3BwBQ8q-~jtAN~_M(3#m!UdDIATdU5+|#K5gFTL;&;e-cb!Dgf0-4?1j90> zC@#J_eXOTm8~$Ue)l4n}d747A-wp>S-%-t{iVF9;^MXVC4ib|#kXwU&p6{{^>7pG} zP8@MS<+%m^gk`9n>MEyxOQ$)nugYa$_FTksh{`?gysL_bI0`pPmcSXyIg6GiM1%b( zQvxs1wssabq?RP3#vV}&`~DTn()bP*_RpK>M&=c@oS6KF#gBP1 z90!r$NQX?&j66p7TF9yjita+G&td;OeCUoEPsQuVYPYmdRK<_QVn}<7foeWpeSsG9 z_K#X&dp~Am$`<~jry5gqnwFO-c?vJs%hM-29m3vSkgJeST zn{gp6sL7rhIlL37D61>xb4q|y^3Q#K1tG?ku+ zmkOrO#E<6eUve>k`x(U-BIj}!-oMv+*BfuyB(u5n7EbP8oaYzb!Z?xZ*~K~gr$scA ztrHfpEd9Mni{xKw2Pbz_Nz&0B%?gNA80 zsG9Mpxdw07ez%4E-qptN(SurzCo=Td;=E1;*oUz86bw9JVdouD(&Df+?!jva`|hjF z-pAHCD#yXY7Un^ieZ?Xy`RFT@A!VeQW#-s?`|#ypglZo-A_8COe0DUuBTlF_9JbD} z7SsaK%CECG(95DCz>pI|IO#t4B06EJm&__XAaci#R1^Xh`CU{Z0kp;mxc3KxUZ>T^ zR0@F(0K1XP7FiPS!3Y2!a+V&~~5i3uRZOg8&s zCbtkB%s!f>uz|u{j+QCL@mHvnR`I8ZFSkf-8(HP<-^!Bh$BDJL6BtOBu11O1-9cTJ z74FO3`JdPCYeolv+c02#Bin_!hWb)u)zi?~QAdnmU|qT$*b2b)7LGjz zz-C1Npgkdwane9$&N>UjN?9w)f?_UFL|Zc2d8;H!cz&oPoM7wCO;%Ve^1Xy>)esm! z1uPc*V%G&WVZVp^Ulb`FFYAN%G;gyiGnEf5eY|24D$F#5Gn047g0nEyTa5CA$=-tM z;%evy8oh>5fJ5!=?>gs!{0U2ucLWK15w#jf8cqtH`$Mfl0HzNv#9D|#i{du}3_?XK ztO2eyb28=wfR@FkkoE%}*FxbCpc5U5{1?3=bWIR}v*leW2_x)VRS7&m{0HEJNRL~> zicDTQ4UErE#jWU&QsKJ>t)>X{~ z23p(=bQ2RqbX%Ze?LT^m+0Y!Y4h2jN(qAip#O=Qgs}n>#x{ zyn6H0+VBzltm6(NZ?QFhR!{;d-gFL&PiB;Q)0Z?~6JF5Ssg}N{x*uuMf!yZBNyI&bcpnHv=TKE-4Chqj~K6w^v1yHvWR@Um_ zbP&uBbm7}(#S|IukoW=gUvO4UchO4A9xNBcT3vlyuir2Jw1%IAD!(fJzWUW<45O-) zbLp+ex1H4Auc8YuTC!L|9C8PB1gDj#<`q5q2NDYh5d+6SH;-@MVF9Uw5lZ7i6V|=Y z8j`)5qnIwFff@+*h}e)d2H8*fpwH<&uK%M+o)>&rz{6kbWN-*EJ$i_q+8vge#l=OO zLJ1IS7Xrju3J7#@6@WmWR}P4^YCxE7lYK9hAs_&SdvN>Knyby#TJ7Dde)e*3);U3u z$87Dv{cL@8ef7SUOgvlnipG$-Ir01i*m;gHMZb#*$L)A-6orP5$NN&95^nsH6~Wy6 z7K=WO^Tyz?0pw{wD`xV@T}J<4=bf~ELM$Vx7efSteFp>46)C4z`f&OvHlg(&5pJ_x z_-iP0X%r{s0v>`z42D2I9;jiru_WUmQmIk016{S-=m?62baM7U10t3PH^~pZv)d#Rid*`UihN`E-i^?7pjoCoPlGoKTYWDe}#*Z$;VrL=4|FTE(@3~IHc<9 z+UvUUi2ML{9}0}qFT^;XZ4@C$oj;jyd(g=_ytOc@Cfc79tKu}`D!5nS^nmlBWNAt> zv^T_IH4RkB8vq7;e9U)133u>QV^i-2IP4=1NLd{`fN(b;yOz5adFXKk?VPQ@KYQ>V zR}SbFuiOeB8(;tpL@7Eb<)Kt!kHC#&^9+!j!x{W{_wIpKaH5c9<3Hqz3tp=l6Uv+B z4H6zcLWy3)$b0z;FniED>ohpIb@}^2gF(qgoQUr83LxzLEwYk|=2C7z`|UKi1{tq$ zw%!;G?BjzK%CGD>iWc-~Rc;MV&r!?%c$EDC<#*5jOzE}mg}ZAtYuC=kOmG(B+=QNi zeYYp~v$yn9!NG>&z}X#QMBN_M@BX<)%sv4r67|S%1>=OGJD(0Zr*c%A1yp}*8XN44EC8z4V#v;bm?ZTvKCZ$>MH~vDu;jk_z{KdILqc>c!Z+iveta%`S#Yc zmpjiIm?gww1>gu{oZj#@E?$OwdQ|HE=1b09shj#ym>jei8@j1?8O1jT?1h`U12^$n z?$rO$ixVhuz^-=S=T!at#@TjuzG;oTsTYpCaOiu`NQZ^)#5-WaXflaHXfyVvQR1i6 z>NriO$w9X}M1o74fuBNQ-+{7S@i$aR-Ae+d!^FFeYU@gN-E%%VAED=D82i(dUcI_G z`#(Q0KVdGraFcWkIu3ky5O`ZG8M*=g%MC}&bCVmEMr@XNECSG363ybFhvf+qZU8-v z#Q0fqlce6nVK2PIzYM(*)*M4KfyX9(g3xpThSY~Hw%FKnuWmX)ru_si#i8mT~yB#2Qo{NTkM z39_s~zp1OMj7Gn4cJ^CedY#?XXQJ`{0Fb(lK5F>?_U`Upf&cG3x#$14@vNG6YYe&v z|7(r>OD{iq3#Z~MtNdwWm*Z~K1DH+cB^zjA|_m#j+wtX%)S-R)klmtX%+p6=YQ|J!&TJYfHm zcw9{RE{Fzhz@}~tOaM0gqy=Ba%NMlaGJoP;dbtuO0SHteT7e+xv2^UF@E?%EuKgfj z5%7c9AK`niPzd7|T{UnWVw)oGJSrD9g!x-cyk`5E#cJTTr09mSq-TV)=Ali0}Cal!z(AP`n(&2yQM{vMjqpUu;krlu9ZusZd05}zpP3tw;yIpceR;`fd>1WW>j}Gl%f#TmTIC7Xzt&3Ie~!Hi%TE zBuk_QM*$>i>Vt5kcSdk#aL5KNu;8c0a9}t2wsKtwN&H2Gz_KI$i;d_fRyV#M$l@s2 z!I?&DhTu5Nj(+T2H24*8{)@!8b}YnkxFcGKfK_qIiajB(vyEgn=x~t$h|@zZfXg=l z;9>c0I_xE`SKP6Qf{oD@;5cBve=flJgFEnoB8UNhy>94u?>WG{k7?|_@TNg@qt@^t z{sUYu9rMo&cTTv<@#K;3UWQQu2V^#hU=fHF#<4O1W`r$dgmnMPdO{@BJ@qY@?mmAJjol3m#?FtmHmp- zyVSQJUoX%Xp4<5uoz4e21x6P&JY9{fsS!u*ds@r3-BgyAN0VH6`MOX8dNqpT?+t%4 znWaQY2oy1i*u#ONI5OHSG%^JaeG&!!(Cijpvnjo|R>|-9XH#$JU)(UE$e3r#=Z%iM z!R%5s<6kiTB8XrR$vBz?BP47=#kj<%+5w&Ep(ZY8+Llz zop@)Llc)s3%{FhPLay;zboy;FDSZ8jSd=gk=-o<^gNTIgLI z1Rqyt8v9q!k)o%r5a2$#u)9dUI$cD5n-)YjoOwP?X*~0AMsK1SyC%{L{0|;}Njy*N zhQMc{ctYDe!`&O`l(%sVP0V7DGoBxw{S!HK7ev+9hp&z~MPH&QCD6usjR1(gO7@(s z4mTc-{S??3;7P1dQn2K8F zd8p;WM{@_mH%=S@b%B(N`c|g?K)D=;*2m9%JnF;_I|f#dXCFYR_YtN9Q6LxtEEU+? zu7J8n7Sd^`F?|5b3mfe~&mHWzGZpZ3qOfz}$4T0WXQ9Nj4taJc(8s2fW9-5tP5MXJP4Umm`A_0pM)9#FG59JeETvyMZ4M3wVPuh^x2M=L$%6HU;cx?Mg9K z5vvKvfqD_el~5}@*&$cMpBHVGF6;=l4IxQtxl6*BUm}SCW-Q0riqY{rb)M%LDcka6sm#j}pFFLstbhw`J1e zRyop9?D9EY9VLMq2(cp#Ff#O1E8pTTnl3_v+_Pivx0#PKCIL~~zfs*L>PvtS)No^GyZI=SU zIgkm5IuawX2o|O2gTr%;v(&c^qmSe&1g$=9;eEX1gwp`(%;2nw)W)fY?@dGyh+Lo? zfDsS2RJfn5us$w|zP^7No#;EQO)#;HQ3)sr!vX7Fp(xoEa#lAZYgueYYu;KcV?az} z3T17YTmz427T`>+0G^{pgEP93)+XnLPiHDO=0YPCV*K8k^rz}5V3ZRD5N{wz0^esY zFaX>H*Z{|?BM%jGdf)Vh;Vy@--}9!NWPHFr@h|W^_%GnIVHft8v3lnX#}RA4fRz)0 zI5P(Fb=q`vFdDBI^btv@uiPoi{!)4mbL4CTq;1%|q^1kV%xH>>V)oXHibBq`PhyY| z0e$*`b;{qF0-1QJj_&eQrc`(%erCx|(@JGf=u8#aNH*C95ZT2n;LDCP0I@FlxkITJ zrcTU~_?o^$l@b_IF_4M- z@jV>Uqib&jxS_6l`>*&I{czL)Vg%eI&$b=BX(YTFXr7@5pUIA$BbsuoBIsy(qQ?;4!9j-1L#0g&O5zo&4 zn&rlf!!@U=Fw_-|aw=g^!?OchI(x9CS?}KtMpt*9WOVpj!qahaFMzF8 zz0omb483~RQ@{ytO(BIGsz+}#rw*#2} zH%XyweE_`d9Po3jsN+fLP((R7nuf@?4M)s_{E7D{d^Ek{s~vr8astbaf09PBjX}P# zZv%`Iw7TVOzB&xyq_{3>7Bu6h_yP;uvafS)fNxn>>MCByL;SQV#VB<`6?IB?A#C`} z3jkw)C55}!YU;~&(Yev5-7*sANT^f9pw2raD5dbGQQ*Z=1`uR>uDx6DWTTECrHRep zQ1EM@Wn^l?aspdb;tkB)fbww&5mfZi`sv>tjd4s0D?L$RxjIc&qR$Q zULkKjk_W$1%BpYuBKad!hd;CI4;1g6CBjYB+yW|m9&ys72wlU_K<9tO~qM(kJ z-iTffSZ{l~XPcNzKXfOiM^!D;$ndXaQ&l&%BYkL18;@tvqT>PS^wdgtsKzGC>);kZqiH zYa(@xnK7L(ZVEj2%HuY^WVwd+bVEOy3GjyF$RB1$Pkz;SPNnhEzz^}36SauLu^SHg zzi6I^D(=X*EJ^smJMhyq)3*s=wpcO)IRpTB1v>&_0d{e2fiQ~LHEdMo4Unof3;B5Y z(flGpfeIu;-aN>NSjHXwiUu4|V1hk_9@I=+v=~Md#NH&j;ugt30D!_l>=Jq|@JRni z^>y&bu^($?0f*0Owojdt<*7us_*!I0nTc0T`-+5eJ$^0o!@9J<;Ax0Dov@wDV;DmCw~$^#lmxC!AZxMlWUwhhbC9r^SZH&pCC&??g#>%_|XCs-uEkr^KwR7W|=cY_yR z;G(rp6oOcB%-_M%hnL7+>8*~IrmauC*r)A`S~#I!OErkP=V+G77SH+5rJB5O{Qxk2 zJ_f=bN5P1F%U{6CmTwkm7Wfjfi>%o*{-U|O&XYQ>CE4L?c$lg$p*`3{ze-vp!pka zxP&YX(>A5Q{hAgLdZMMXDZzag*O3li;T)xXtFJuY3YxF&+SM~(a}8F`d<`~SHS;yv zaK#iFZ878BT&tzGisd4Tm#&xmagOfcf6%wdOt>wWNtnW1BO%w)L8;lUQ4(W!d2ZVFu zNI$|Ai*FIso?$_jc`9Zg9I$q$GjTs6PlTgf%5UP6XK)(dX7uH~_=24HS?|e{wj8q2 zi2X{3io6W3estqZh4+$pyUp%T8g!w`QB-^ZFq<7F&3+nTR~W_?Koaj)2T$JMRBUlm z={`#~rr1xxp_Uy5u0NSyPotVnJ3>L2UB70%mfbyRMe^IS4s(IrgpFe&Emc`R8xS84 z4X00<6*5cRB42E8GEHw@_;E|S&nWI0HE>Eh2kb%3DH_YlX)OW9GMjQ7vfDB{4A@&z z>_taPW-6%LAR8moA+5NN(nn{wW9PKV8O`vimM+kYdz_2Lk1vFI?P$yJk>(zj|E<6J zhgL^e+O|7wTeUX4y&n6+vGj;c*yslBW`^8s9V2&eU6^cd778fX26rW-&0`ZU97R{* zEC|r|ztA0VZsw~IF-G@4!*LW5xoPcXy`!#>ZCp-wkXjFaI@=QNH!&JsW-Wd9{mU)< z%>KgOOug_7;~H)1lU8smub(s5W{y!ecS9myt*Q=$TQHlbg@L3X^zf`F8(x-CVWZcq z4bCudr$7nfr`S)uH_%NJ0`~u%SA<YZ3jhu;V6oA3N49TYC<)U}Ai;Oc!OFxgsnBm1w}c{IJ;(TOj0hOTolb!%^mrtQ_S>BOMGZrF$*gAu+LJ zwp>?@~*}Ad3%@5=C4I%=`(K8tec_Bn*2-IMlL?xUl>$@rYij( zv?9_VhP=%j9MXl*M2kwl_A2g&+2VE{D0fsoxL&w zG-`7;Uo`_(+3|!A4#XHk@1d9F!uZ6!As;W{rdTaPKCl_#yzrBFMq597J{w)~({N_! zGmjcMo`w3<>1yTCAUlUfj=KY9b5-pS zm{bnP;u5$d5#)5m1>IUD+?iWo4(uAozzUdc+MDHF11m>ibq=%+sM0-f&Oxo(L9p0; zrHi1smHdZ037X{5)J#Y-u+oRj!A2m@Mvjn(p@ALGQ~u=-KD3!bPuY{+4jk zD}kfnrdQSd${h7dh&glBt0dxv&Uz)CdV=H6{z(%?OFDdx!m?L zP*>r&SB9+`*FEjXv8?l683bkSdzq2R+zxyi)I}G*436*BuFH~zeEEv~Xx@C4{WNy# zD|V4{?5phN4te+$2a@;ks~pLioc)SWS>63I(EfuR{))Kzk9GO0CYvRk{xU+lcl)b# z`zy}EmvHz^hwuEhDTU`SIu>)G7@Ofcqk05m&YkqclBq`92{Z9pyW2549# zSYM|$pdb1@47DwTSZX&@an;6K)s!Z~2AT<_58%OFK*9sX94 zy+K!}5n%Z_dWvCT*aMo7f;)os*F{gZDE9x&*;YJohcwsthdIOeM$YR`V-L`wrE_L~ zq3Bjbu9-;-j3kWG{B&~mW03i4E(9^Yk!w``+SA_WxHB{=geS588{itB@|rZA7+>oJ zGvL+PFV8j0x*!4Jr4UTW%11Og0XaJ$bu#P^H$odsPEkV3B*KHC9LblpO8YsC;uuiW z5>hkCXxz@4ta-<13nrkDsuFSR*3LG5LWf~5RF+mz=x_iy>Z^A~rImZBVM!*+m>p zXglVJ;Vo?BL1ZHjwHG3%J9NIOauFMdv8?9YObMSzT+hiAZM%xb#)tZ$gIK4oQIRd6 zN`PL|y5w!~(QpX@&=C{HxDW{=!ToBGiF(i$p++%|3fKV26F(tFk*N=V|14%zOx=mv zhbUfJC~}Hx!+(@y!18X%?vHO>s3?kP-3?L875nZMwE#yb(d^h2wsDt{+uW*)2XEz{Ai*YKHc( zW8@0>miJkhD=|UP7L?N)1HR$$F;(J7N*QF`IPmnj2kB8ui2Wd+FS&d!Rodj7a!sh# z;s~VzzyTc6!jVz-zW+Mfe=`z85Fw3o8#&i z2G(z|Srh>#mwG4mE}=h=2+b)H;>H0A25c78=a9i@ZGQjf5p3d zf8$C?g%V&b7q6p~+G?sfIpRtD4uT7s(?NZYQ@=NhjaY?cWfzu8u z^gDPJFfF#*fUC)7bG+=Ogsg&P<*O=2Rj6#`M`m^8EV+7fGaIu-jm6{|F5@J{`d^rT z@Td-xnj=@ECp6uUqku`7;85yd$X4U1Ac~4fg%$1ECcZ4y_)8-9zkcD!< z6DR(s6ZjY2@MaizWO2$7XtOw#bZ%VNm0ZsmX{FVOUg`{{tXU)0W}#~Sa<06cGBtDQ zjF`p_#j8u(H6ycFCC>Ps43%5#D6`_urZGBgxH<6+@pt zx2?mcJL|*vGI3-uG&8_H#e?FfJd9MtkGq^>yjLi}up(ud9E%-g+ReqfIxA{NO@hAk z)A4LTp`S8bb#pBVFT2d}uERPVjQax<@3^xxdQV<(XvzmI!A>ov*(f*g!yvkRFAP6& zAHDDr)&?{aMx()d41Y4SG$Bf*(T7<@smmZ5xIx`#WpY?7;fEK|d(^H^Tt^(xp{<&N z0(3Ejjmpw4$`7KA$`2_5U9m!ToCUtD3n%pjo)ToeDfYQxhbqw zuR(>h!94IX4o1=%WWnvKOV>{t8X1-p15xur7W1x<1}G>G`Ekxy+8Oz9LQdb~dHg-O z1kPE1kvfR=#`UgUziv?I>iwRVZcxw<(G1T@*LqnWB*u?^)S!Es(B>rr5aepg3DZuB zHAm_SSh{%c^}d82lEA(4N|_4(O2H9}AIBaX3buII}}@z zx+YdThb}pl%0c&D+J96pZ7EYMk><5XE_svu(!Lf;`C8jyYYDII-u4>3wrd+NdTlp3 zoy_4Jx60$lf^OON>Z@|fwzs~7OSZkir5v*DO)l+@ZEyC=IAiyAMDWo$T;S}UZe?Dj zbvxDq-q@>L91Htfmo%_4Z|jowm+`eOX>vJF>yk#7_p>f(_)B?N@7}^f(N>5wad{Pm zsJyH`^ADrgixTbV-;uYeIX#VNEpKX<)1Qb^YB3yfOO`X^D?63KCb{LJ+6M3VMN`sw zOhNP}14tURFbTOPY1hTal|)he1%K5H2;ePz|)qAZ5Qz-D@7TOl(Ww8Ao9h3rj@_ zhW;*#zAOQ$hC-`57fom8ym_<9w?Zd~+Ce5hDl-}KCm!&r);I^9+Jk^!9l)6;maYSu zSxJt&5T?mi3J*=0cswt+=?;Qu(4DwspZy{l&hV@xqB~A~XEJ(_Rl?(yj=Ve)NVNXb z{qr?GJiaBPxt=R+uPaRheA6$y+3XD_O_oq z`4ih-B@kuL3?mW%D9g5!AKX{+{0=L+{g6+58}E=^v-Qm%x|biA}WKcsTY`C;}3cdaKpvDK)4*fA6OOec);dqq-^tBFv=C zB|!CVSq=~@@77vmw5i*gW!9*>HwsUrJX%U(Fcmfy9r;_WTg)(W@zZuNOfjBl2X zZR67LVvg(3$hcve0#}bvx`+^bQgeyWq27(6Bg5Y^TSxmBRnIalyuBz&Ph@fZOZoKJ zrU;v1s#~LEZcbi^>>#v1vltK@P0$KRrIlcAFde%v1s=*4yl?b&6Nc_>)%NGCn+y|+ z4?(1Bku9+-)O5Zxh3cg{Q!QEhnJzysq94_gwI7YJ(GYMMGRRu08Jkve738P&YZ+@x z|5q+rp|+oE-OBr8MI>{q+_Du|4BT6Wh#XipMCiHTl3eIsvtkK2^i3S{3Li9B22m7v zZfLYg1;jN%TbHIS1i;b6El#WY!Nb)h&}2nF7!9}ZdF+oyUTA3fDL17(AJ*D0ZFzE(vKTBMDINMP=WK(|c` zI!7#me|#aFc+k2CU8Y)&alZs1q<(2 z-(pnW#ME7Ye$U0){QWB*3ARYf{OjA$M_jhQ^NaOY%J(udT8hA}+poGMpKAlotg7zp z7HoQD-+ktL$2Vbx&aZ15w#4B13TWZZZ^Rij+;7GATk#)%E7rK*S~lYr{{Lsx|B3vX zA_zdz#dMcvPWmxjp^mXm|NCTjx1j%p;`jRBTX>HCd3buxULU@C2?YB)bB@qT&!IpR zpFg+w*B@V=o*ln=trz$a$ubJD%;Cw2t;FGUsugNIe#}k+GzjDt^eOq0UypqmSp)0f zHRr%XW;voH+un#he%xyP{(A=%fzpFuJdda$~_z!XPp3JOgN@3ku-09 zmaj(O97?Mak0REfaj3Km9eIPMJ8m-5Pw70*-kR9DhM*2 zgP~W zmm3Y@EVK144kL@Mw1$C{M1T}SLHlk*`%6Vzn(Z?Z=cQP=6gS7G?8*^_;_z^F`);63 z)LWs*KcHJ4c>C&KaKd?i`sU5~`;)`7v!C9azIgUX|anO8{d0gbZf zu+v!xQZ6zLGSxuNAp?GJn|E?X600)f8)(C>&%5BaH0s=>ix7YSS3H6Lq;L%E`1s2s ze(>zkYY(=QNAQL_=GyQ=UnEKzajTBpIuwUuY{y|HMx@GE`V4NM$z2zBHg8i6nIK@D z@%oo5>&)mnu`_Kyw2)f6HTPnLA?)S{U5s}~!|q^+n?|Ev`RcA6x4y6uo(mF9He`F& zAW@ra120bPWsG1(o7gVT&wA$Lan+0^MV4pOWFfc{58`CdYyCoaM$V$jtk0EUd9oQI zMT(OfwIKoK;{edk7OCJ-Dgdx%X(cWCc-Ws=iXj8|9w!X0JS)c(%P0|qA=f{$T#Qh8 zjbYCltw^AOE_{R~6Q&oe{pX}3AOe}JmW^x*ep%x~dr)75{e{MX{wz#bdh~XlI@|Dn zy$030(%I6O9YFnTdKtSTAUU%83Ez{Ak-^zqSQ*|zjZ z#?>L(PsA?v2p5hcn|!2M0lH+od5y_DU@FzRUg74|g!wPGfpL%tn-afGfy>LtW;g|C zhC&C<(y^l(f;@X@;D>V?ll#HR!G}?JoKE;CEyadoZ}{OfA}h~x_cANkQS3`+PXvjK z9na$rY+6cis|E?}x0nlelXX6God&l3Fq0tI!7~fz*2D{=o;0tKZ6{crPB$ICkL1do zYFNXf+Og_;@21U7gAClZNvQ+tK9_Cna2kmTz`YjnN3>nhnz4 znV6PF92t5`!6+a8bgrQnN4@OGjZw?Suu>ii8;vB23S68Pty(Im!eRo`j5xB2YrvLm z43&s`NKnbLKXS$ZwO>+#BV+vu7I~%0B#ux_OlCpqPXo^(B2)?rW$tDJ*-%aBAm;ny zsg@kUJQY1Axz4U~pttLWRscZT4YfbNv}&)>lh^;ghy4b1&ZsqAaTr3$(8RpR65C7r82T-3$xwbW*e`606n8{A zuwpqGfghS8ZI%qAg3-Hu3M`$Aw4bH!F*6LbWaNJIO_8loXzFV}DW7b!t=DSb(GE7* zrGYK6fq8OJJvZ5;N*WHujhs3&=v0yGMwz5EZmg6RG`7R70w+M3D!9AU1uFY!E&P57 zY9&po)?ViYocz6#R9+mZ{>v8B1_;&-s_Q3<^0l?H?c6B&SS5tS2ZvU*JL{Rn@?vL}k76 zxC3ORA-0CFnyPq;Sgaty3RJI>2!&#rC{Va}Ws30%)F@?V$*y40x)!uaHc_CeUJ2JP zBd_uu#-@5|Kwkn^^~6<%H(u~eXyfu!v?`@_DRI<{l#4rA1}TzHXi!5hraP7o-VF*b zdM;LhlMNUI$SU+Y+1zR=2?O8CL_taBoJnn4q>oE4rSW|mM-z!BT~8f&Au9higC(TCZTwXb?@viGNUwTlhE z_kfd6Y_PvKLUW%wO2y$HqA%*}*@Ta#iWcoU0pnh(umH=n?)rVfW`01WB?|%-{pQs}L)ozGCQ;A!m@hP2H#Um>3R5r`Y$d)&&r+@l~oBCyD{e^T1YBKXy)HRxQ zO;A{1X%}x-YF4=AI_3{E+t!<`Mb^9K|J`WW*C5qO2i?c)2(Pmd-MD3XJOcy2+~z46 zgtmaIn^>7-QMeN?Q%PUDG3Gf-oLH64YgiYlX==I_X`xI@Xq8xwqRAA^yWJpQvQ^yv zBg)tjUc#pd##_Dgc)Zmi7Q}dKr5caC@qxtOL(z_TYhU<+0yM_h%3ijMZD7G)BbdDVR%@ttv{onR>Z~sZ|{_n}|)1CYKzuS0(#H*>ki2`1bSvW52PN!qjy4U&^ zn78Dn#QdcPegCK@zy8H0(LtltRWX2?^}o}5`ZT}(pX}YQ|66%-Iw6Z`tvmIFBi8}D z>a{-j=n*3PIxtU{i$`*dM#-H8ma=kf8Bsk;-Ab6qN^N(0;@=z|#S3M0r|2|QP=@ik zB5tEzq@M3Ft5cQHpvDWC#f8fdKk;YL0}4a@MU64Y1o>c4`Eh(RW)Dj09U}Cb!rRaUCI0&+M#SP$=VN$uZbwpYv3zDiTtYEByL?g$dOd4p}S$Y$BpxKt8Z;`jWdl-916B$WvDil5^OA21d z2wt`k+Bmt6xP12XVF`qL2ZyKU^mf8;L_ELjGFGpx1pQs(s))B4_v8 z`fb_^mF-#4XoV0KTm9opAHrimd$dXo{(9cUG@GK?{27K-^e%5W^aW!hS=%g} zqcHTNZJ&DlTA)Z8W?kjcA$$qm($@nP0(=EemV8r2of9`sDdn)eMQOeyOIgQN-Jn6K zayu0Btno3Lth}L|j-UDNQDklM4M}E`uA%xi=6HjYW$dAIE`W2M14BG<(=lt8+}fc( zGtgTyc6XlaL+g2mn%wnnITNwi&dtG1>X=E%3e^|rH7bYzSJEmS9%>pl$ z`3`s~m`T zqmkDbKrCDbq23?|k6S^vKcGKpgVGofffG;Cs5~!PHp05asyAFT-_qZ%hWw3wmghCH z*)$5gIKSa*FINM+-pfj0k0>Er;3kR5D+{0U0%ERWJpH!>p+AR_`K><~KKz@|=8Gl`oJX&pMP6eUofz^)TmqmcM;#le%zt6phO^w0sbt z8t`q9e{1;t$cKaby?1l!T@=4s9nkC<>MV8PyeW1X6A#7FqKh*Xx9r4lJ`&t9IE z&Z4!!_0iX=Nox+OnsG^O0jj#Q1T`-$SzenXeQ|hx`26s!d}a(R%j}6Y6CUl$snB=4_}{c7N?oC{mi1=TZSQ7!Lw*q9|- zhIHo^k_|UYEUFT@o+2rI8@PEfCy%n1|9gIV`2O1;4!=7qA#}@W(`dZCER-+9s~_FC z0Ou-1F{Ay4B3ZTcYf)n9ogBTY{463C=cTbXAzyq4asMl5l9zJ~n7}@hP

ul_p1{67&WBl^1$Ra)POB)3U9&P}eX4cHO)L?tl(#nNzc}Y}7d9P)r zb1m`i?|P@7^FI5fIs@82e=z2_j!c%;B3Sxdp+-Sj%Xsd{uUQ0UnTl0l7Q9~uf5>HU zYEE)3rL1Nl=>ovjMMV`C2_d{SMu(Z<#LxOMaMKJ)q~;nPXeJKXi!bB+U&MD^7yKMFIPRG-;B_&I4rX9Lzqd)*wM}iIUxUc z6`HsLqsAwvi&IhavObpA#mO(E0<;QqrkKkfm z2v}_TE6Ye;6}bMyM*TS@nW~#|=&7Dbu|s^7xdmF%mzM+JWCN6%Q(F@6hMo&K*vUlh zg^#XO`A(Xm(wb7dbU1>YTxFP&&e$p!N8VYS*+35#4;o;)+s);*T;{W+jno#;YX!XL;W06$s|id-24st&5LWW z4aHKORRvn&|J!OyXCVvGq%%YxHN}jy#w@;_*+WQF+eVtUYO@<=QJ>8ta> z%pDHE)>|XbQ#oawjN#nJv#Nhe<3C*uJ7YL5f)JG~8vv><{_m6BeEj#PyHNZ-{?jcy z^JjN!6m&+>PU}9D(>yVf>XNYC#aw1)`m&>7hDO=Pm_KYN|2|-^VIJVkbT$b5Wc)fx z(QxDDa5Tc~F{rLVN10YGv02d^aL#lwMyfN>jJ{#bEq={MY+rhDH+U-e|82$p-F<3_ z|NG=V{`;*wYvTX+Z2u3>_AAZ|%clsfjxkk}BXrIkt713F5^icLnhi}mX06axdmu(& zD=De=M&7=*RaU^0tJGA9B&!FJJ9_R0E}X8gjrxK7pjBv#)1ui(CAhi%sJbU|vz^Bn z3210B+{m|Ltzn*<%Mda52%C)LjPwAfsCh_=DAB)KFQFPYU7Z7PwStl@^Q%pG(iS}h zib03wdL+M&Mjo~U2%dLF%BLh0jcQ5^`Kof@vQSR_Bov2ptZiGrDzs&8w(hLZnV0|K zDDYM$_=%_P{J&c`|L^VW-sgY2mB+Gcn1TL8=u)OW#{+jLUw)o1H3U^Hi5z_jgjXYQ zRwseIPaNM+YP=f}mg8B#b?F^oYd(cZQfl-LYjZoVg3_I;lX*11wY~DhZ15`x z#$qEdv`>QmXnf_S;w3SOk&OwnbGf@5Fivf=bR5MZuchnsFnY>3-^CJJ5TQU|7|?-FkiH4<7BYL4A5C0cT;z);Jat|t7G`} zS)eu2yUzo?_NScxuPy^<4gcTi_4af8e}DTv|JUt2CCc)0tKUt_LcMd@LhrYh|G=%K zBx9&argP*Bt*cmts9fFo@`lz|w%;w#^xalgvW$ek=Ht(Q{8PUFuPy^<_5Q!t>pkt| z&wu;7JNNtlZ9MZj{Cwejq4yj6x^jkUo6btJhT7Ww| zD^^~k^rBh)MG}mb)m|vYs5TF)N;2BCNrSLgmu9q9T_@`=n`pF5J-l41QG-XUA=zkY z`*$_nsJV?a+)8d|JDD@#XvMa2t6R$#NIIHl0{d&8;FgY3UeNF3zqkKk02E zSO?`kM`_leB}b`oH$GpM(#jdV&r^Dzr?h<98c^MWnM&(hzRy)!G7;;_R$A=iKRsWm zcHmn>#?oBtfB2lG3nltX0{6HB}aIXf>MUsK2Rnm$h6oSwgb{r1)K z2H6CyPM3`kBRZq_!{vz!DYEcaoc~ZCdyNaxn^R!$#g8Hnj z6>n$vxgqa!L*D0xyw44JOSvI64Z@1ELt0v1SANJ9otajZBQg)pyjddea8|{+M7=MP zR&llY4Ch%u>quN!()yyQ3w5%Y+t{~ZT0g4!)#TpGyYQ9U=dUvVUVXFc&A?aE|J}&J zX9sRQS@^7dEs}??1n9M8;;ZRzxm?Zp4w+b+ca z+TY&4kNqwImT z)ohv^yBBza41#3?nlp!BZJ*RYeHOuDJ1fc~*l?W9#SUTm8O8K)&Feg^MCC< zeR41V-^!E8|M{4bJdz}Lx1unT!Y{o15P&7`lXopA)i)F3??$j&E(X7uy1&glEwM}C zR@HV4!~%|HqzD}i_V>JSXbNP!wW@~d+nD!=%PQ_~twYJmHH794-G&tTKZzu+NdC8y zZJ>_-KPl>ec6;~w|66$qr*T{E#~Qtl%+%LR{Zksv^PcU46`vwBTQY4b~JdG4Yz zN}f9#n>-2GnCGf&Zlr$4IKKIm@P9z74~NQI;s3jPdqw`gw|~$7Z{x9O{qlY~`S*fE zUI`K@N&F=!2=hn}qu7gRwRZDchiGJ}Q$vXra1PAS+(vSn&0KX&i?n9jmcbtX(0f?Z z32{dwN{!$K3&Cv#yJ=S?R!m?2kxbrDz|U#qhnY^-W^CVVo}d+Y;RcyfQod=jbc(eL zc74sjNQSXH_0Ii?7tJ&stMR%CKqc)g2-P?Wqd1?|KzrE)B5NB9!V`PMKQIG{))+5* z%D>^KN)L#tCQI`kC|DIcJS%3jA)YAhG&@!7vN9b`Dbrj!AC6ofcTY}R*TdyMXY2(N z{}SlXTSFf;^56E8UOxZ#&XarlzuS2puoE}Md(i~g0pldVuE$=;1~Wex!J=odDnGcF zuvS_R*!kE`STdW^eIZMLYyyTh%zTML=`Du*BK5C4@`cdegFFKbgx)20q1l+m-i7}W z1jlP1%Kg`-!`_6!4TF`4H4!8>^4|4k?G_x}e) ze{^F%nkDS`#Y^aL8b`kZduloU$aA~A9K8A(c?Dnpr!Nm*yn5+OMr-M#a{c$7>_2(B zms|h6r@h_#^?w`BfB#pf)4@%#BOLp0&i>C2tkd~N3#gv|5bJ-$1~=?E_Zn=q_$Ocn zF1v7(l$O1Z2f-~6$=rbdg{Ml`-EWY!%;3BfO@Xg)7Y@NDP$nn;oB;>{EV)TiZ{n~Q zUII%MdW3poXe98M2=(3>#<0wxgDp1p+^d@o+B=}N!Du!lwgMxO%6MQ{t#@y(P@3_s zS#-gN^UDVMsvdN^*VosMxao=}o2}M)?2f#N8-D;9gR{BlBE5Ej9R}RB4@Yakg#)E= zI4F7(-%C|PAWwt{36%Fk^f3o0E<8LFGi*1y4tYzd*#HV8Zs>an#~@+u0t-#pDDo0= zw(=)aV3uxpLy3d{^)&h@MzJM|6AycK43C)H(|Q2h{D;54IBm81{XY5SKV&EdVD7?* z!I6mx7g2KVO#oxVu?t+^5wGbyFaDNQ{Sa0YYzdQ_PRs>6W`?MNu3FghahR$(1JonB zsdYApqBH>vPLZTB^|HydD;a4U#YXThxF?45*P7c=LP^7Yt!xN4$MsjWlsM{nA&C)v=pE=dk!-U?~6q2uPzT zHt|k^nPG#SWXtJv1_V6uOb~Ioq2Rp9HlBZfa=M8N7NKPL3)k$W2hh&a2sVG{?EHr_ zHuU1uzrZO$st%&dOIidtBBdvy@!kM;$S-1`my_5-KQBM=p@CMbql4RXdVl*XyrS{$ zIy=rb{yO;)*}|2NTmxoM$SiUl&hP{t+|a5HFJjUKzVR)` zEKMa}zntEYjk12W0Qv{FzrcVYC7z7Kkq8IvM zq_JN%9!%AbO*9cAdO=p1P&3(A`01vS{$;B`7o%Cs@lQkI-LzW&V2>Z4`_spd57-aT z7{k$up(bJ-_6Zf(6=x=IL%(doX7W>a1Z|`qeb#43TVTbB5_PiT3cV7!0NNzF0>~s) z=y;XqO8A;2(a@*Wpa_Q354OR(Npvz2RIhM|KOeB~$XifDZL9T(QGO`v^9eUn*f6Iw zHBFvRjC29)6SUhYdsxM{>KfE;pZh%W@JpxOB}`*{)91a50lpw+&Vx7sMtBiBy#00? z>K5A7(lz!A?Wz{}RSj2Jz#PG3z`h*2XcXFi_e-u_;rvNCx%$k2_e}dlTG2rDBDo!8`Cv^V#T>O!gpuMS}iTO!l@u%1qpC+6|l=fmcl=Y2~oTbEQU}K`> zy|7f_m?=s!>)k?jskd?MBfE8d3Tso~^qYCC@%c5e)|buZ!P7y*GR?tSWJ0Z=3FiMI z!Xftc;zv?$qE8F-I1gw742L2g^#y2oJ$PS%#+QwLKb`U&75{ISfjtTpZXGY_PnHz`i50VC%K?50{GN-orrp-53zTAd$ZLznz$*%4xZr9>{l^}Pk+xF_7GN?IC@~ED%E>|iX z38hu7Sb3KoTxKM;!il@Dzt;yljS@UWYs1YCR+hMJ@aKDANXButFW+Z_P2J0Wv6?*ec6!^Lc)PC;HVaPM z*Q)7*#S(1FUD@vP14BB#cpasb+d549>b*Hwb!Jo5&ZgebzqnyS!B>BFy8};A{t@r9 z!BP&re!iLAt4P?cae_i&CrxVfLN|OS$tRAaY?k+Rv)VSEL>wB@`Cyqdqux*fCv9j>t_x z1QL`4j+YLo#~j`Mv=?P%@m>v9-rV%fY9wCaKwZXdX|pRHkO_rR`5k*cp<3{LaX{H% zajHV|N(U?H;7-nO{me>8xm?kg)llD%9|Tgg94+j=a8eBte(fO&;3XQa>L^nM ze(-J*uH_nGMq(isSOsDIHSQCsb37mDyn6Cx?;po_ArCIB;%TzyF|Ih(|oW; zG7@y3aW%5|nQ3KnBi?|+6nftZ1zVCw-t7y^vQiC7tQ!0bH4@wA?Tk<`kB2lEaYMqaL&Z}uV7ttAWn)m zX_h0j0k~2(FA6xNI$0^y%>c*D9*2RC77eBFe-bt|v@=bk5yMKrf2mf@pci`@0B)Fs zlz=XI7ar$L#1bb{ha{fmi?*xf0k`lS1Mot0ehct|2}PQCo8!U-I??T8)&ux#I>I0v zDfHr9-W1A;X5LaMmWW~$pG=+5gb}BJ8j}PfMdRMQ@B;vzMr3Zod+rwjkM-{-cs#zWv3=U;|PU66v|06dKXFRo(Z)XaHS>b3x{BNgcM%p1;Ppo1gs_M4fbWX%Y(lth$d z!b5mHyAKDPbOrMPevy0^M>9>@`o!o3JAR?$6G}{l42`FtdBimsifHwHz;LA#lyCXlcOfxJm}NOgmEqT>?(uy^%JAb-~^MaBHr_e!dyORm^ONYkV5e zxCuCJkG>nc@B;VdjKY%Q1bkx0{5_h|yEh_WDaZGE#sUC7=@;LY13vX)pZKEySvGS* z?{5L(4DjA|0rCpKQywWH2RYEVh@fy=kR#v)xT^raaQ$Ewd*@?Vf8!_+#QuqW%ijYl z6d%?MIsmvk7vRZ^(q$C@KjZIj6Yx5W+cS{%bIk~@lFKljQo>c% z3HiJO&m0RA$(`q7O`L5H1_y#H*bPox_kcx1VwF2B-^Yu$EG~%!zTs$gmo4UUWTBHv`;~o4Z&+3R!v%;{sg>^DY^k!Dao-{r$%*U3cGM z#}|wY-L@#Ejm-0%1H4wvtOG9Z^{E3J#?i)4BE1AI3$E3W8b5PlyckSCHj`7{XMqH` zeq1KDW<1!y^3R-ryCrwy^?(l%*~U%z(ty`qiQ99K&j~oM$$0wm6+Fle+qj+o6#J?7 z2GfQEcPu9r|KFBP6!ZBvIF3S2b8f{pN$1H$1+BrGkWczG`ei6WV zW4TKstBu@(faCf2NbPBMoRguA4jWd|cS1XO4Y9i*;257_SRr6p0T-3l2YVC1wRCf3 zz<CXjuE^T0h}U#tpo7g-R=DxKvuwI8b2HGRe+Q2 z%F3Z{u;mvLhFT!60z6AJ3jg~OfSXA}`?+RJz;g*k{R@-g{e<=P1dSauSP*b6_2XWVb%`D}!78p%l&e=ln}^vUYQQ~-S9hlf|LPZ45m(8J<{O@1<) zuy8gRcySj*${`Rto_S;pR}(@aDB&X?dlalnh7W2q3k`IK1i1V0ZRnPR4uHEK=LcMn z&BPsHR7%x;L%snAl#OZ4(!6J4;O5LXKFWLMskoX7=|xmAf@gH4_VO= zz3+zQV<@9>-B0LDCY@czQwY2I65)JS<1h530`%B>6*0V2@?xPQIl^;7zZ4IzX)R<6 zK-FJB$I=}8r3*R(|86?r2=L3ct_=Ludu?S4ZaELAM&Pg5cWYI!H^=@Wj!+qt>vDyv z+!HeJSLqIApnge*sD|LL%qc2kMk{fPBJj%`qcq03GS{fH6~o^M`zv&i%J9DuBlQZ= z*#y6-o0I`xwWE~A+ScGImBW5RXQ>?cweC{Ojp0+d!&KJrJT6li_zj(=mV&<#_UCb& z%7I_wI#mUGlV?%zOWmie;8!|OEi;5>Lc^j?R5?DgYB#EeoM#zVs`ZYb5%|kERIMHS zdg57?Th)s9ELM5vT?zgjaIO;Im$~~G0&`;*t3;P5*X(4~5c|tGTG@uPQt<1szY=#V z%Q$XHWLuTf)#^vk2>iS0dL_WGr~SL>fF-~$bC)Os|E{=U3GmB|ZOg#FE6!LF`~vjV z;NKOOECGI*14SA5cf~PFfM1XOyXl@Kz^~^#chgBrfM3sf?xw4j0KcB|+)alq34Q_k z8u0Ik+m-;op7Y#I=Pdz#J@)UW3zq=Dp7Y#IM=k+=nKM@j_;<#gOM+j^dG4lDmju56 zeI@vJ#+}Ueo@XrDM%3Quycw%n^{ydIf z8Eor!{c3!#+r<6rP-aRI+3CCrcQAFszT`|aQqkUu6WBrnnH&4-aRpm+5}MsCF6|Iz zvF)jG32Op;Qw8}P-c_0of9?KN3t+#_$EphaReM=Ay;YyX)2a&mRr^|T&Qs!TRYE*9 z{#KR5vto}c0>8}Xss#Ky<8>v#Z@BBO-}lP6xohTQ(|p^yEe|Zq7EnHvM&Pf(4{HHT zFAV;Ays=g>f@a{);ggjCrDd64n{(Eua=)x1_)R>sD#2felT(>@RuTLr{#ljauiR0q zkqJCSNA??zhTXQnbyXCzl zz;7Thui1xdaWkb)<$hcy_$%?`%E7s;FIPh);mZ8EY~U~9(be>@*vP9ZXM$a4Q@E0+ zjP|)_*Xi@aqj}u97Ixms{IBYKuGpd;R#`!)r_5ROA?}i=tE8gwx^y{%codtn9mNy3-tAQLw8^9{CethvdO3Cla z@R+X!o!=0W{BpgIMXdRyl~04X<<%YNN9> z_CCbH2GK0eViC}F##;n1|JK{yvw~$EMD4{u zVOx;kuhM#^Fo+UyBsT~$4LFQ16GueyvQ(0R+$j_ekJ~ ztb=H_CbB^&Rdb=*yn{GfkxcYH49>m@Cqt#V+a}DAy zdDHUK^-j?{qizs)$){GR2<&618pK`ltS!65tR2K%@~@>qSp3{-2XU9YZ0DGXyX$K^ z2Y0%=9=DvF8ZGR*>vuZ`ce=aYw~McdJLH2q2Y0%=p15<&ggDx~ib7PulyRpce%*d% z6lSMF5ab?y!`Pjoy|`#ucz9&TsW(%M{}jz}YHh4M%MC)^VfKy2q@n=trBo5-hvJSC zUz-gnNtXvP9%R-Nb3Asto%%ueVTx9O=H;_F2w8-=sWes-xEF>}n6L|{WjV7oCRl8l z33$aIc<+K4^;Ki~AOr(r?ggG1w5gz~Xf8jg#2#5lVQ5ZWij{-l_XMyQ zvQpB4PIwuJp};u!rRS1^m`m>@2QiMqD9+o4WrNT@XBQr77%Lvb!V5by)YL{o;k5|Y zs2^Zr5XvZZh93u<)->IYA10|Apz$CufE!rl($pCn61EW1dP#Io(@J(j6y>)1c-m^= zq1hcoSDto*#3t@kQiX8i6aJJV?*jG%g65r}r~YMQoTk&{pxeEKUS|Ug@Z8N`(+b@u zRoHacx4^q5QH&e{So4J2(@X=;O;EzT;DbaMmhDO?j^W)5{X=BZROrUQ$XhWm4iG2WRwOUF(eMd=BU2~6YY8P^a`#m_A&Fmh8Dn)IHz)9HKpgIZzC zKo<6`PqK9Ig{~6KU!;#c_R)Qf4B%vEG&Hk+7+hwU8Qd^mnD3(Ynkh(d;~KHP4qYEG z{tx#0&H2j%hHh*;?*4F$_gy|x7Dg)fMqAh-{6^cjX_O>BT~tKEii1E#8zz59v-l{H zE_VRL2qpvM`eciNkTYiFzKNDm6K-uMN04lZDgGx+hQrSFwiBauy-~sWM=GHD7M>q~ zV8}MRU_g?vjgudfO=Dg2u_E?kufrRH!EPaJXdhS{+F<4fX$RJtJDR{*h9W4#c@fYN z0YLr*Q^vV~+rxm&z_37Yvjvm!uftbA@Sxn>Dw(L=7ltq*5~}nX($@aa4M~g1Ezi+R z-Y3HvqFK2zXejfOdFMiO?A*AMz(%S6V2>Z4`_spd57@t=837}4Bc1KvX-)jHq5asz z8*vr2oX;R)TL8jejJy8iPwOY?P z0=S4Nr_*JjcP+@8T0m5y=2+9IB9&6HD_kPnV3HzxqHVk?2TRt!41qjh!j;7qt&Cyp z6Wr`-7C=FF;0N%+OStE!!{_lW0h2+^nVRSk*k6POkW!S>7q|q9mMH*~@uV%~=BPKE z8wB51v<*YMmCZI3po=byL~lpCO)k{dFQ!!^4oGhLj>wU2>}a%@3;Ssl3cjW-aa zly*18Tpm#f4s=B3bi?P8)-dcEd@p+^$&o%TED(@cD6Jle3NB*tv3?@x_XHYsOv%BA<2% z4Nf(F&Kk+Cr?Nfi%6BaQsyYUwucK7#C|Tm1NCrfy9;FaOgfT7-Fyxj~C;|7KWZd7v ze}040B*IO9I+y?M^NNX=jSAH*0t;$U0cQgi2n-d`&B@-6vtAtb6YXTwkI|C%(HqXN zDcmmvI?*9Au|s3jjl$r@Xn=N8aZ^c+2v!aM8+bz(iOhr`nN8`w88u+i-zCK>E+RY< zKjaw{uiAn0kh3PdxZ$=NvoPc%-V%E?Nk6;^EP(rf=!7#lF(`~3FNZ{7l>yBQG8x2= z3xtk~O2`9zQLcbjNOsTxX3&wVPH~Gf;XX|ukcnRA3l3ZE1ugEed&Rkl$JJU!z#Fww z!KeY3w5t3t+aW3UK*BlA7Bz=L$q0^LQMPU}FK!OKzF^LLz{ia^ItdPw<$(c-J0^-1 zNY_j(vQo%Q91^fl&S}Zs4)D}Kvi1SBD>y2?ppyXo)IVi!Pn6Bg0q~w{_a>olNF>xO zKC)?isoI1`!G$iz2duZVyT^q$7%7OOF)}cTrz}8E-sL{W7IhbXony!`n`OSx$jgTg zqZ_utZ}IhJz!#85p|}Uor5~!9Ms)Zu~Ed15ky0gVg~^bAN@paYn(7fH#mf$ z{7%OaWaId4a3c=`4n~8r!~oHP^l&l^60*&Z`U`&UfljBh7^E$fa0Rb`>7$tQuye#Q zYE#Hf81*UG18up*!`@b=ufXN~4rKfp>PyCOs>W)UGu#}p(_-CbQq_g~TQ}Eg{&zATZt`oetYN2LO7IZFuNr7=;Gl37(H$cNuG6?E>Xz7 zD(NV7lMhK(o^oLU2rX9^_#7}4b-~pUo2j;6Y0O2&;++sqhQR>Wvnhd%` z1cczRRRi{_=R9G5!GHES>;tMal(KPm%6&9D7 z4m5em!@ZLX%@oq#1FGM1qad;SsHP5x3&L7UqQHaGH6n1BDlc&Tnn0eB+b}X`07qTN z#I}jYXTSg47Cuuyw-4|K^gtlP&!5}cxPESb{@muA?k8LVCO!m`x0K_Ryoew}A}(zH ziZ~@f2g6#3NgzQ26E6CRM8=kF77ahQXIv>OI20WC9whCQs^K^(BEkkZ-{iwIJ_y9 z@eC+HMxzc&qumFxw+<>nj8sj?feyJ{Yj(!T1O%zw5m}LPTN<%UZ7^HKR8V~Bn<Dh)Ev{aLve(_{fpian4>U1CFKZ8=yUGw=yb7%)^42|4+y$tz~-+R8o@^I(z2 zgT5D29clBQa6&sb`9CbF3X%}#{gLMee0lJ($XyO^kHkt9Qp=-+6r_BB#FX9<1GDaJ z6`L4ef`QbP>D4=^C+tuM9Y0BCfKNWMx6{iQZ=NqvYX*S8N`;k%M=G&!2*Cwaxvq5K zITK|&=52P@jFy_+n7~iiMA1R^C#;S%DtAFmTP>BQ7k&&p4Q~cv)}slXw7kT^Jh(1Z z=}tAJvf6EM8IT&0iaa{Z6uzVbQ#Br;tpwjxPPh^dI|bUi7DrF1xhFOAEEGZ?SNa2M z6Ivg_i#&|rpbiS^TUka^-x_jd>3zk!(>HI<-=7?wo&EIY^u;fH*Z3s%KpZfg!*C*J z1CZO&St>X{06Q3(9sm;F=NfsRv2;@SvS>hJtYXZDj+f>^91vk)e&cLA+pSiIoq1PC zin=Q7+9US|eqRBo#!w~GsprPL=?nKNqU^7ziHbrVQItqw;{FO>gz}0XM^K!FMkfh- z%=yM+nuW(n0O!xg{1iIyfUYpSB#c#E42ViXZ6Iq&OX0^PD90r7Z0WEA%e?SEN<~ng z_bJZx*kxZF|F82Ul}$DkRkqD2#iFRjl~4|i=}041U4uwii{eW+groULBVAgR#t{yi z>5GV(M7c^P%T_DD00Ke4O>>CvyYX-gLIi0D^eX)7bV$1s17Sf+jfa|eH-PC9nj5Wi z5dUCMw2VbS5uyNh`RlD?nDE}#+^ zSC`l5K9Egx_=`=1h2#UNRwkL#4^z_l1F`kZji-;-I`0C15K{m>?5YHeG@AND9eN|$@+FVN!v%4fes*l)+H1)=#qpaT z;Kc<4(&og!MER6oaN)V0bnnEMVL4p+(JYZ|ICLXMgWl0)DcZhB_&AVD7F%@L0LGH} zxXXLkEpY=$Y6ZM(6-rATS|NhcHKV*seaF2SW|OJ5*?U(Gy{0(_3hDx*fvu(s$mn`u zS5#JW~sts1R#1t_0mDW;ZGX9^YZOKaeXd82oS;#Q?Ed?&>!10v0UYQ6h{iVR7#8h?_Is%0x#>_Sjn0aS1;ed0k`$;2M}yRenQX~yu2hPw!n`8 zlCe>*BbgC%i3>p8f&tEtqJ265>?&nGO#LpO`o?S+B#!_Ec@m_vJ6^H3OWIYN!=psI zCI=DmtzaG~{kt*kX$2vfIItCV)egeQO z^Z=z?oT3yQ`&*>57k5zC*Vm3qP&y#9f>=bQlI{=3M=xKWz3l8D&AsK6q}DeG=fYHg z9J1VN6qnu0STI+#$VJ&)V$y z;o0%oR_mwZ^MAg1d(M73JUu;peSZA%jJ-K!M{i!gI6gmq^BR7A%MM@vi+z9m`o$LW zd?|S=knAlE~bYtq7c>*Ec{Enz9q%F9?b&SL5uWvLKRaO zj`OEG@Q5ozzssfnPG&CCLDK0^s5ZPnTB46_c=q@9=wEp9|876sefp=~-p=+u6x!M8 z{b{@RbZ4*kC$_z^QC2)N%sm32EZa_ga9_y-n+8a-%A5`P2ok!``;x$q`eDu;$monN z&c@#G199fo1Hqe-y2*SAx11PGqQF7)qvrwBPLVB?*}1q`B=`?ld$--ftH@CnBbA3pdOP>)|27^i)&Y?f5Y=S_OM)F6QfSMa`a&!@0C^aGPSwwz(}VxD zMp+D`19k|T9M`gF;yf`HjQoatLa5J!=n}TlftOyRVoylKLS=-<-E+gp8HFA$f09fw zxDisj(;7#(W|mQ;wEQ7LE}PJWbJPxFnCzqr;6{nEHUJj&!I_4atq{#Jh)R#I(XaCe@h<4)zbGo z{^)L6Ur29U*sdv)pqTHZn{qOI?nm5+}4xRw-*Z*xizyIES%&z>&0htBh`44yh zXLzTM>YfAkxcm8YtA)i{ryiMexumXPx`hR|3nZIh24zuI8_Y%?YjcWnG&15g-Pl5# zZH)Y3%Gw}3wORYyH*es-eD8rzloqS~{P47m|NX!1%}v$;NC=$YfA2_h;F=z=%9Lyi zfswtK;R*GO=aYcHjxWPVy?)7Wxa3_luY`98(x#PCroH5fA}KCQYgZV-0fTWn@CSn~ zGZ)cFxWl7Bt-Tg^t=b#La8xmAk{xjzH(J7eunYEy{We3Jh1^%H4ySPW`r1PsBuH&7 z`Jq#s$T8l>mP}-$dBg_RJCD2vtRnZL=F^BR$T;ZdXd@uc@n6wgR?OZK6Q|}3S3ArM zUoC5yK|XnWO#TJ)eq$6QQT8cDPG6gG4M%Ix3s5brx60{do>qAcqg8?L3fvP5)U4CG zIfJz3^MMAJCO<^=eH40Os^)@pQLm6_x!*g0`y&_9^Ix!bn8MpOd)RoMG!W#lA1WZ& z!L!UP7CO#8sImnhv??^FO;!YSan_t+9T&A+c!8evAX|--tjhzM+>QKP@VhQ&VxU)G zaT8W(=%Y16%6i-8G8F`kI*ARCm|)H`q;2Ml;;nR;Ua+=1o#t$cRiy*=XXskPe4#sy zzvubPLs+|8>>;=A0{j;zB@fX<&ftjIiF#IcROmQM9-;G1>-gEvL~m z3Zl!K?~&b_2pLC7<~7oMA6wmOGQ3LG4Z7C1SWz-l&w{Rl8_yV`=e}=gViPX z(aRtjxPh^_@*nj!^nw+oI$AMW&)RRfK{`h|oZn})9odTPptNOuQHx$y(?g@9iHOh< zN3a8u8LPUulg>aRbw@p*J8GxdoK1x*nQG6Nxx||Q1stKVSqu&QchA}Bb++5Zoxhw8 zoK4jr?cS`-yX;=|&=arMHqlyEw!Jc32_HH!@Mc)b`Q0@|*3+eNl|_SShOf*e)%oxZ zNd>D!-7t>AU!y@oq{-5^Nm~OH*P5Y97F`{BG8@W<7}6>u^rY%^sTLGrQSekPgQ%&xQhSEqHzuH73Ryp!A8+Dc!$+%8VJJ)-4&>IbR- zwB>AbM*#Eefz1dEZHbV~!t9KP8_1$OwSWKoxy_g|2Xtgu1_TR(b7g&#b7bsYdLKz4 zMt)tMJ3Nn!vL&Kh4`YDrQbP@8&zm}SwCd|4FeOtMIzQ?n?hw@pfU_GLOzyl z?x7D<`1qV0fnskGT~S<4PdK)?sT)L>j}nG1Hj`Osn~leh8;T#?sVdQ6HE3DGO#AnZ zt9RR-zx}fL_}%~e`IpC^KR*PTHqgU9!R()&?`+TKPbG{t0Af1nZ_eq8uoFrq5FXa~w>8%F`c-nt;{%>iLgL0AAqEMDoYt)`_2 zi9Rf)hquf~5e1v(6&LPwr_ydvxvi4hu!uRA?+soF28W)_yr#o`Kmei(tu$^vGUz^W zaokC_kfri?*PQg=@9QXhgbssobS+sw_A8)7|8DhH*58Rsj->K(rIgp#p)}L&=TSx1 zaKA>dPvNy)5exdw1zK1Z>ldzmj+3{-#ZuQlJ|q|gPNC2C&!KcIK4ULWPv4yK(9pCV zMU-k8NGT>PWa~G#$Tcrar(NFZGa)G%F98gef3i@h-W7DjBZSAE9*bm)psas{&(MO+=sWgKY2}PJSH< zQ`%AUBFc-INfsF5pL3PO?Ws|$NRXcxX7ssR_gVMPd{2%1#{*igSN_}Eds6WK+}+u| zm;Y|#nM3~5j{H2%mx!+_EC~wYhoBw$2V@Gyr49Od3al=(nvwDkw9HGhHov`-B1PG5 zZ1$k!G#PIlWhUGK9o47Gysj6P(c-PeRhW5I0VDpKrSOe*P}4Q0_KQq$H_2O&5Q#FZ zID}^#XL9SXJ@*L4B!MK!edt2q-b~ZDr zHt(pLkhlSrCS+>wkthi~#A%+^bP5!9$vR2tIFeLl>ERjb`f%Qiw~mz%&0j5}Ymp|Q zQv_;yOMn1oHgr2Ot(i@lk33oqa*!GwQ!mmxob0-Da2Q~$j{ZG7L~<{EaneR{Snl2x zL|#}KD<}2bp|FO$E%4M6x%es%cF2?E-^N7C66MAvVoI&$R83-Pp9IGf`y6U&MM+uq}y(n%XLZJvvsQ4;=iEo%y zBKszO<@J*%nl@5vQz;@H+FdC|xt1G2l$2xFYhypEcJ~;wi_t3zRJMVJWHSuGw<*Ku zh-wb{S<7I@DR8Ppg^^xce+9!N-?!U$`O0%6;z!s5OKPm^IW8`z|Zn( zO)hsOPGEA!(hWO5IqF=bUH!sVTzxldvq8zs|FyFVfVkoPW;AZIR@f|-DmN|Q9cVMp zw*j3LvhO@kiT0glHh&Hh-wsrK4oibPMM}0c;q@;pwc`Y+m!`5^=tKGeSOG9BSp}+O zgFbqz=oL1zZx=!2ru%#O@3K20)nbRAki;xIW%a4kJOi-2XUKuD{O zXyYWG)&^dEo~!6p-coW_HFlHly&GmU-OeLIq_N>wAu+P~M%7etjbE+MX5x?Z)X4wB zPj9XI|6Z>s|L<l zjq)n=UDPS|C$k`OF-P~Lw3p|sWz-qmm^EbPc-RoZ_qHyV^cQ(v7r<%Jv(^mX$*w_Efo=;WVRyK{%f1H-?tYZ zl&cVTF~fWfzs=l$r^p7rO@nn>KY6@$DxYE>2ziz}neuFDJ9&0$#J z@%jwQk|qqcr%ea*eHsz@mFc*Kdp}cf5#Gr-9Dx_mR^uEC77mh17Ltzonm|itF6*_n;=GLiTHQvq*V4ez%)_A}35L@*Lxt0jCj7VQ4EtVgpIW=kt zpEAOfF=DVAp^DDR3A;RAl9%~WM7ogFlP}!(; z_4vQ3$qK!8axG z7_3rWsGSM_2AkT^EQ2hS7zwUHzSe-9PHxnnRTq4@raJ7U@uI7I58h0QKGUSp!WLGzp2$n4=B=gLcP$7qntqnwaKu(utT*Rrp(7kU z^K_r>u4GA5xOvR(R(SnT-(w}rj(txnZvLf3bwD!=$l==s&phAx;N3K0I}~AT39iE< zl)-;Vu)Ak1x5HY{(bwt{CXF(rH?ZUwRJDGA(`+g##Rkw|kb2V>u4Ubrfd>mK$E;zR zwG~^$9$&$6T^fpOmUjpX`V)@4k5#oEDxk}a=W3UmY@&X(DJMQk01kr&#*MT^erf@x z1iulCFL)o+VRqntdZUxV&PAnWFPUFe54{vpmSa9nMud-;vUn=j-0BskZakp z7QbeEnjMi89Y=ZhaRGB$uJ&Dcyh0^;pfB8st>ZLY*@^kRvuQIBW79^7BnhzzIrPTl1~SISh!@dj?|Ff_kQ}qQY<_q|(&rL|dNucmZ~bliR06 zp9~d1XlzR`+8RU_BGtVv@}Q8@??Mp|ilgcj=jKO?yF3p=U)FJM{UN+y8?))OSn89A9q_(gwr;6> zGW7id4=$^Kp=eI|zJD|t z$r#SGfoENMcBWGpI3fy=SOBl0NxK27Atq0Cf-N`6ycVK`n`O7o%rT$mZM1%LM%lu|Q^ctirPlJ-aSWE6_9;K(4Fos3SfQjHL9@8h4D3`pv2E z63uIKgi~mtnv4xo?D1K3rAiek@vUMl>9gSdMrvTA$YxHhB=9jnvkl^e@De>=?7-+A;hy!z3N zWf~{%Bl_ftyk5M4&!3+?+&~|SVHyx}`7QgS<+W*hKD98Khw$!D;3<|?l;)P*VkHP^~2 zG@R*ia5Um#N1H*Xo!!2R6(}xY+FE7WhSE2KUn$RCPT{VKiP=;=6>jadcJw)x=bI@v z9Y)=qcg+nm!qkt&rD$})9{o9aBs6z;7b^4BJ*>W?$O6I6&?IIs5>v?;X_u z89C$(Mx5epQl(B?EIM{K4%0&)v(4@UIwDwTjPMThn!>=<>IrG!t!UbO>E$ z!AUnoOu2mY)=&Ii^jp}gLClPQ{(1D*ho6$MyYpoKpv}Y0{d9Qx`uO#C2kbN(J;0cP z$Pc^c28!fb;MmWf5AY3^`TTiviwR*PRI$%_Kqr2E9Na+nI80Ptkmr+o75SqgwnfI1 zDWzSZ2;?Bo`_Ke8npM9DTid>KV{e?jd;zoLNjv+)4E)g<0cT4- z`;`_~Hp^5;8K!1ENv9b%$?9Sl!6uJALL%-z{gEtOe0z!L7n* zPng3sYKWK3wJ;jIO}zLd_R!&_Ni*mg!Yk!klGby&U0rr6%IueOeVQg-BvxSTDpD!r z?fxkOVcv&??pQbFM}ZI?l+5%LC;|K&1LIh^C3&^gJQjk5;g*^QT8Ot@2Uw;7D}J-v z`ehd|^*mq#)a_3T`I2P27IQ+~KF?~NSMJn)IQ!@az6)a7#Vq77h``Ac*cg_-_s>#L;t!m=M z%FTu{-`q>?W}o!tlOfw_FblepuzW`3z$$QWXd999m6$!-bluD1U0R3p`JM~s`c@z% z2qi0su#k6t_34ACne+TAvrVV@7pV189#&H3-?{?3!!`LFl% z>HYcdR-Q%9f3J`bUOZp<=}!{aojLo-ftH^9X!R_2>XUt}=gen8>0Bp1Szk1V3BwmY z^yS*JANkyPIQCJKI*YB>X?cLO;`x>U*7Boct{vQHu|mnQ5jD~!N5@$E)HZsxw1eBP zG&9lKn2Fc5_3MwX)%DTa2V+zNiIfeeSfbeK9X%4~XQ}j9Y!oa%8Veff;1sX;bZkIi zj)s8L31tzd)B?fTPROQVq@H2NfI+t?Uy~9-V!DXrm8v1~_V)Vlc>)QY`?R&S(5^s2 z$)YN!K1=xqM3~iD1Db3wRb%WwMUm5F2V9vhXNAH*6Sx;p=E!C(?Jc6nY1qt))i`nx zbIkjKDx9)bmQ>)#X79NAMrWtr2j!ec=7x% zv3L3R)hmGnmE9dBkUrXSO%MlrNnMcM)tVGSMfA?65EAuQq7X9MDp3ekt8vUGO4K;D zN~PSO%9ToYO(BHwnb6}WSAQg4-4ce0`bGiq?*zixM>yy?$9v zX7lJvbSP@hqHEB8aUFyX!ktu$30C(Xpcdm5^hLHzud1DTWgJpgZpb-U)~}Px?PzyO zCkMhx1C?CHmgUXZRcwe#fO{_A$0Mf86s zoDk0b&krlt{%JIJXB2=LOy%motjuy6K&`L!=>O~po=XL&HL;`{kfkN74p5z1ReAl8 zTi+@Q1oCZ?>E>$s&}x$AvF+o@tPD`v~p2gTy$M`z30SdoiS5eidh zBrv2+wF`VDgTPaBDoKh%5Qf(~}oJm(`n6v)H^jh^h=zN-v`u4Pqjw6b& z){GmL5@^1Y`U?~LK~ILT+Q`qT28&%!wP_Bg;xJg+$Gi=%SRJMfaZZX^Oet2;(2{yF zeX_m|)mT9!oO23F#L6{lXI6VWXgs2|aF49$TXoxA&p}95E-uXqx@Gzrn-)aIY*z@3 zTeYf^VJT@Bb|HI@Yv3fhu(}v4F~!}ln2N3ASsi!N5j`iT%ePw7)Qb6?-rIC>mS;WN ze#PWcI;I+uDItkzZkg-!P*v{BsG;Z3|H6U7Unme@jsNfd?)FaJ|99^`{@?99twj{T zr$`BZNin_J>J30SncW%fFM(68_@&Rw>3n$)>rwX>kl9Qz%d0ih$(Gt?8jWBeU1v&V za}80@K{xjQ?TvoK#5{H-oza+9#kt~j+B^lexgW6Z-tD+$_V;GNy_Kw##*>YU~KV>{>{ErqWKcbIVa%g=wB%vRu1uvf$E= z=eEbwd;j`X*L8|0&S0zdYWt=zpZdw(#*$|<-sqiFI`JKk=Mj)cF+Tdnm?oW!*f`gC z%37HeQoik%PcyHh&sjC$_&FyPr4w-9{?}hCW*$X@1t6f+Pc?QtV+^) zZ>zPQXO8o~zOBur1*kdy@9pny=k33qZr}TV-paGc@n0N9Vl)=l!C&-9?i-XcSt9KaJtZ zqLctd8=s~|#t}~G{BhyTvnb8Alh;@-nxZRPab>=nT%eW%(==4=wyt%;ymzwHrVO~I zy(!*yUls1T^=y^69K=>;6oKI!&EnV#GnHc7EFx4^l5-gO6bn7gl@?R0p9dp5!Lak} zusq~NIB6gJ9R3_iYs5b%KZllcPDwgX$5c#Z7xm`(pEplQ{R(VNCFpTSBlf6+j-Ct; zThAV1alr);3c#I%XRwSkvf0<(Ndy)!aW_siHu>Kfd%?uN41uStq>mc;fBQ*3{!8!4 z?#}ML{C^wI19sx3$f+oqFQ0VWc0xA5EV0PNr|$3r?DL7!dH~xYrmC7vG3_n<0fZ8e z%?q2j>2M6Cw^;1K+0nle+h+D2)(bQcdY9ZvdV?z_gf%MruT6)&34Y%*B(29zay^Cr4%`p$-*f_h|9?;vn42HX5_bIJC3HBAqhGyY+H(An=XQBH zc-3;Q63n^V{fcxFw)KDd^6$cHZ>bLxv*&;q~Bn@c{VE4}X7g+G_RteUz+QBMo`@B%39nU1J8ez8)J5qTLFNEkj!cvu+_{(q0k?&u`)pZja>GR< z)`r;#BKA`h!&!&}0cbi7QMJ<=rr8ROMaM+dj( z^#1l&ctzvgb#|QXzw$bG$n_8*ND_G42b6Qo^G6N>p3s?JkVsN+uUtQ%Z{p-9ma?D` zvVIvxQ;$xDBmd&UBdHz4@C?)k?C=7PXs{V0pq~&b+{S?jL&O05sXw0pb;&UH>6pkG zKcCpkk5is*^EjN&()tRmPo3(g?)$2$0VJ$1mb|Ml<%A$NI7xJo_F-^5Q#Y(xe8KRO z!08P?|N1s;L9@Df^|x={w1u7}%d`Fb@U)Hp{lD$aKC6NRR?WzO1Yg`XBvV*s93=&u zz$Xbv7yky@vH_+bfySpGS%#^CC({u1O*$!qgrlT@ll4JDi|NRnx&uFGxST8tWuf)e z{IRXX`o*^VGFn9k`eY9^JUhtb#eUw;4_C}Saz_vxk<)9eSY9k|@Xc}<_{bln?-Aa5 z3qu2~2h?++0wVEJ*hv5)r_c8^CNsg8!a0ni@YiVIWa3BP1}AJMaWsv6Qs=6owB5)S z7MS-r;LJAhJg80K)LK|0oNyFKJ&?d)Dsq4}g;Q@~eQ*M8p@Ys6^DbMM51I4A8N<=q z0^}TUo<2W30{J}wsc7iU1MZybXk-VoF`P%IFBU-Nyl_q<5DRZ?pf-gQb|{e6W-;te zqOoIZ4#5L}j??{xkpw(Q&EHY`km4YSm!PmH`<5wS-w8V-*v-Yl6eQ0?Ef5fJVe(57-1Z~Bj1Nzam83l1 zw98eSc=44#^rQt?6t&g6lZY(625#II%ow|TCqa{J5&H%i11gwGe6F!UHU1!oF1tJ1 zy}xz0pLToSbm1t{(P}3hKkW26z5TAn)^Ebp{isyna5QB$q{{Qf32>70o2lBXaDXRh zi;_qO4spPkih|PqO5L@?l3n_-9Xam!Q-xG9(MJIYqqwv8%aH2l3a%8ne-NbN{nRUd zb%(Z(UV=(@Ub=`>bq>H<9dY9a!WJwu;bDX81h2gk|1uQi3il zHliysB;x2@SXiu3x=?|td0R?gv&>x8#4ROL)J+|?GvFLk!GC(JXQ4N z>cbLmU{j9)L;c#xGmRRs{88+2Q9|B|;Kv*G+YHX*fbJ1$1qqCz)~{J-1hybZUX}(m z4Q9za!bkZj8S(BHm)W{ubkPN$beV9?HW8mwF@RYGb25no*Kov&$#x;Eo4x?}{vt zmD7(ztwbNM@330*+3iifrbq6>X%qv!tmCq?`#Q68=2dx*@?^~3DWz_?AEm5?I$;4*gs<(8@3L0K}6W&wuQ!H#)U z71_;}I#gVsIt54$4nd;w&^UDTflUJe<7q@t?OSwyYjEAL#yf_U5qbj!{~AedWlig} zXK{?|6Uv^WM-}tJD<9;93HNu5eX_SE0i9o@yIf?L)98hhtj$HbKi_QRhLLlzR7zI5 zMvS?Ye4=dLrA)$IzUQF>V;CiZiyWPJOPzG&OY>qbKa8AnFBehMj2B$Md!^%m^)^8N zE-@iKl%sIr=+8FKFg_UZCit&;gfHtb0D2t64}PNCMJGTsF$lfc5D;SWvv7sa^hgX9 zcnL2n&VwW@lNcVbFbX@ev_6cYPz^78=HN(_whd2ktsgU2?F9^(zr2yt9p2k-`&q!i z`^!rXU%LN0^yrMGCW1mgn{>H?t73wzljsb&tWhi7Ch z=Z`~%IjNDK-efj$TbgqfH(|TXZqmomwcIwft<+&>c$oQ7j|2qA85tde#<|IA=FbdE z(^vS@=1&$WkL85Mf3etGU|)wp1LNZsd6dVL4E0pplX>QEHNlYj%}{kGpSt9tk;?yKBvN)R7#E)v`&z57cKll7;SvFTPg~j>P(!zb& zDbzAN+{!H0%wH~OdSBvizWFnPBIoN~~fF*W~*_(If zj$|t<;+@6$Q*eIaIaF}m);!kjd?Lm7k~^QCUVe?+9|!hJZ+{$$+ui=?K>w2)hj;^7 zdzx=>a$o9jU3(mDaP(WwyF?r=n z(4XJ800Tap0S0{_C!a8w49DKYO*AhSY$}Uc5(^V43@?H)Uk|WB00(}KvgtaIc2%;t z7L$Z0oa^|(6w=BZ6bi+$O{Mc#% z^6Hx9?La}jnV!?zcY2+XH}KuC)7$RE`@5|cdSr07#kxib*%tsPZ~jD8RDse-E&Av% zxOQ)nN2p;_a3Ibek%L0|Xp4tDM~g@-ethvdN>4x*f`#;m!z0X%GJxDTc5mR9pL#rq zWi_NS24ir;5(#w38$lhQ0$)0aY-EXI4POeU)dFnIG;|MmJoy4@_*kUpB(W>vZh`;7 z!!JnPz!qWK@k5~I3AfVb0f7>Il6a(fr0v6yj>#@w{#^@JdC+>G$Wo`c2S&snlU^Pm z{!3>60n?_R=`#VJ_|OGJBJR!urrkPbD@$Y%sKt{Ez0b=t?atECk$`w?wZzRTU7FU> z=67Qds3~4&24Kp5jDp$3Gp}E_&@oTYUc1xYVtO~PDO6FrEyBNmxQB2iY;=PN9rEBp zXioC4}j!r#y^ivG$;0;Yc;{Ut0vok0& zM-Sh8=ff&=({Ww9Cn8AfAe+dM(UOWURYlH{boit{Fs6g!^#Rimn?nGUM+^|q59q!^ z>sHaMX-kxmEk&lP8KoY?Rhl44i!yz%lN-_c2k+*UJH?=12l=+ddlmnSw<)9myuP4I zF`H#7e)PjKAUOQ|3a1j;JQSA#^#Rj?eE?ilN|jhh=4JWbyHTh_T+)CMuo-fypM)1I z1>^;oLf}Fe3z60$Ntk*>D?e0MX0Y-i7$63lO0a=zH>5?0ibVVuT95hdFmSfSCl~e` z-Ytm`W6y{0Ic#7=CONwgGEUwVxT5zUzSzT;39Kw0yNn5=Z@xnTUYl3tQ}|};{syZ+ z_MXiKG?U%R3&H;r8|?1^?^GGTF_5l4=+QLDM(j9_h*Il?R|hRANy>O~Cx>TeKfO79 z(aMC(D|vl*puJ=Q(Uh&|!u(Q!F5O;J#ThQM6)a8a^-u&J)*qnY${s&xfsLDXD6cV9 zRxIch@dwz|qFxhPb1TYR{B^5|8LC{dz+S&Oe|hGlA32|bu_v&VE|Xtid;4}X3~cun z$T83`$v|yZsim_G(AZfQMU$syW`&b$g{{xA7ndXWp3cbXe6&qn+T>z z%otDFApZg_h@jyVX*oAl&)V&+95&$<(fp%a(T1U%^Bp{tOZ`1Ikqhr!9-_LX)*b9v z5rD5X^X%aUIv5VqfGW$7?_|Z0OkF^tbbZLEfpsGGYZ8SB-m~`a4)Q!lqr=Z_WN7P+m`kzZ z=(vF4n!{lf1K`rkKMTot;kl?8NN~n^xJEwIT`o^iE4V6!08EA=sw*)9G<@*_boaY- z?!AHL3=D5scIm2?X*;_G|lRid+Ck8@j7~H_|3ZSP-J2bpqiF>UfoLL^Zm4haYVARvt ztB!grqunA2K;vRi+&pQM&cGM}6=(`WeiD|llP^~aA$I$Euq18=2B>BdELv8tm}gBqnacKILa&DS1C8#M0jXn1W|#QC4B8a(l;M zItCFi^@p^eNZcJqlL)WDaptex8$<0XW3g&+U6I6&kWfc>aZY4Hk!G?GGqgy2MMj*s zj)mGmNYOH4Os*_9`_hc-$s>Uif(%nD*3A~R#|=8eM}QM#Uv7vtn+e(vH|~tYoN zI!%Uf?u-3Mg#uK5$*6$QYmXu?!R{r)lB*FQ!FkNU$VR~}ug$fl4~?%G1UspM+eC)g zPh#%^)9-OM^)9-&@JRtl#efQDc*>z!t6n=-JMXV{-%tEYKE>S!>dy`^fDW2gB%5ND zh))s;-0(n$Qd+oj#?2OsDHPnAc}^oHk?{L2vj z{5$SmJ94$OTB6*D&Ct%6M_!m>_Ac4N7L&2ncARatfd$&)Q+xC8Eu@zTEJY-}8CkLT z3189P$Duc+6^~&{1D_wExvYTr0CZemUpp>A z=|u5mH{hj`?hnUDFJGU%?CjvS{Wc6xGo&_1V9X*mG6gVpukoV9y^KB1)uHd}*iR|l z0EUUZc40}3P(SDoW~qT72^EY~FM!NAWbMNsxmC`d{q(r4cx zkew?{KB;B>A&e-zoS|~=5@Z#AVKDXL3D6#KK1`ty_!D3H$rpy>d_|cD?w|YT{`u0+ N{|~VGlH>qb1OQG3a54Y@ literal 0 HcmV?d00001 diff --git a/pkg/scanners/helm/test/testdata/mysql-8.8.26.tgz b/pkg/scanners/helm/test/testdata/mysql-8.8.26.tgz new file mode 100644 index 0000000000000000000000000000000000000000..ff8bd1ab402e355babfe8592158b633c4761061d GIT binary patch literal 40449 zcmV)EK)}BriwFn`iz{LP18sS8acnI(E;ueSHZF8waschUYkS*9k}x`-`73&9&sg$^ zpeV_5Mzfx?vg~B^B)0WRagww1>gu{oZj#@E?$OwdQ|HE=1b09shj#ym>jei8@j1?8O1jT?1h`U12^$n z?$rO$ixVhuz^-=S=T!at#@TjuzG;oTsTYpCaOiu`NQZ^)#5-WaXflaHXfyVvQR1i6 z>NriO$w9X}M1o74fuBNQ-+{7S@i$aR-Ae+d!^FFeYU@gN-E%%VAED=D82i(dUcI_G z`#(Q0KVdGraFcWkIu3ky5O`ZG8M*=g%MC}&bCVmEMr@XNECSG363ybFhvf+qZU8-v z#Q0fqlce6nVK2PIzYM(*)*M4KfyX9(g3xpThSY~Hw%FKnuWmX)ru_si#i8mT~yB#2Qo{NTkM z39_s~zp1OMj7Gn4cJ^CedY#?XXQJ`{0Fb(lK5F>?_U`Upf&cG3x#$14@vNG6YYe&v z|7(r>OD{iq3#Z~MtNdwWm*Z~K1DH+cB^zjA|_m#j+wtX%)S-R)klmtX%+p6=YQ|J!&TJYfHm zcw9{RE{Fzhz@}~tOaM0gqy=Ba%NMlaGJoP;dbtuO0SHteT7e+xv2^UF@E?%EuKgfj z5%7c9AK`niPzd7|T{UnWVw)oGJSrD9g!x-cyk`5E#cJTTr09mSq-TV)=Ali0}Cal!z(AP`n(&2yQM{vMjqpUu;krlu9ZusZd05}zpP3tw;yIpceR;`fd>1WW>j}Gl%f#TmTIC7Xzt&3Ie~!Hi%TE zBuk_QM*$>i>Vt5kcSdk#aL5KNu;8c0a9}t2wsKtwN&H2Gz_KI$i;d_fRyV#M$l@s2 z!I?&DhTu5Nj(+T2H24*8{)@!8b}YnkxFcGKfK_qIiajB(vyEgn=x~t$h|@zZfXg=l z;9>c0I_xE`SKP6Qf{oD@;5cBve=flJgFEnoB8UNhy>94u?>WG{k7?|_@TNg@qt@^t z{sUYu9rMo&cTTv<@#K;3UWQQu2V^#hU=fHF#<4O1W`r$dgmnMPdO{@BJ@qY@?mmAJjol3m#?FtmHmp- zyVSQJUoX%Xp4<5uoz4e21x6P&JY9{fsS!u*ds@r3-BgyAN0VH6`MOX8dNqpT?+t%4 znWaQY2oy1i*u#ONI5OHSG%^JaeG&!!(Cijpvnjo|R>|-9XH#$JU)(UE$e3r#=Z%iM z!R%5s<6kiTB8XrR$vBz?BP47=#kj<%+5w&Ep(ZY8+Llz zop@)Llc)s3%{FhPLay;zboy;FDSZ8jSd=gk=-o<^gNTIgLI z1Rqyt8v9q!k)o%r5a2$#u)9dUI$cD5n-)YjoOwP?X*~0AMsK1SyC%{L{0|;}Njy*N zhQMc{ctYDe!`&O`l(%sVP0V7DGoBxw{S!HK7ev+9hp&z~MPH&QCD6usjR1(gO7@(s z4mTc-{S??3;7P1dQn2K8F zd8p;WM{@_mH%=S@b%B(N`c|g?K)D=;*2m9%JnF;_I|f#dXCFYR_YtN9Q6LxtEEU+? zu7J8n7Sd^`F?|5b3mfe~&mHWzGZpZ3qOfz}$4T0WXQ9Nj4taJc(8s2fW9-5tP5MXJP4Umm`A_0pM)9#FG59JeETvyMZ4M3wVPuh^x2M=L$%6HU;cx?Mg9K z5vvKvfqD_el~5}@*&$cMpBHVGF6;=l4IxQtxl6*BUm}SCW-Q0riqY{rb)M%LDcka6sm#j}pFFLstbhw`J1e zRyop9?D9EY9VLMq2(cp#Ff#O1E8pTTnl3_v+_Pivx0#PKCIL~~zfs*L>PvtS)No^GyZI=SU zIgkm5IuawX2o|O2gTr%;v(&c^qmSe&1g$=9;eEX1gwp`(%;2nw)W)fY?@dGyh+Lo? zfDsS2RJfn5us$w|zP^7No#;EQO)#;HQ3)sr!vX7Fp(xoEa#lAZYgueYYu;KcV?az} z3T17YTmz427T`>+0G^{pgEP93)+XnLPiHDO=0YPCV*K8k^rz}5V3ZRD5N{wz0^esY zFaX>H*Z{|?BM%jGdf)Vh;Vy@--}9!NWPHFr@h|W^_%GnIVHft8v3lnX#}RA4fRz)0 zI5P(Fb=q`vFdDBI^btv@uiPoi{!)4mbL4CTq;1%|q^1kV%xH>>V)oXHibBq`PhyY| z0e$*`b;{qF0-1QJj_&eQrc`(%erCx|(@JGf=u8#aNH*C95ZT2n;LDCP0I@FlxkITJ zrcTU~_?o^$l@b_IF_4M- z@jV>Uqib&jxS_6l`>*&I{czL)Vg%eI&$b=BX(YTFXr7@5pUIA$BbsuoBIsy(qQ?;4!9j-1L#0g&O5zo&4 zn&rlf!!@U=Fw_-|aw=g^!?OchI(x9CS?}KtMpt*9WOVpj!qahaFMzF8 zz0omb483~RQ@{ytO(BIGsz+}#rw*#2} zH%XyweE_`d9Po3jsN+fLP((R7nuf@?4M)s_{E7D{d^Ek{s~vr8astbaf09PBjX}P# zZv%`Iw7TVOzB&xyq_{3>7Bu6h_yP;uvafS)fNxn>>MCByL;SQV#VB<`6?IB?A#C`} z3jkw)C55}!YU;~&(Yev5-7*sANT^f9pw2raD5dbGQQ*Z=1`uR>uDx6DWTTECrHRep zQ1EM@Wn^l?aspdb;tkB)fbww&5mfZi`sv>tjd4s0D?L$RxjIc&qR$Q zULkKjk_W$1%BpYuBKad!hd;CI4;1g6CBjYB+yW|m9&ys72wlU_K<9tO~qM(kJ z-iTffSZ{l~XPcNzKXfOiM^!D;$ndXaQ&l&%BYkL18;@tvqT>PS^wdgtsKzGC>);kZqiH zYa(@xnK7L(ZVEj2%HuY^WVwd+bVEOy3GjyF$RB1$Pkz;SPNnhEzz^}36SauLu^SHg zzi6I^D(=X*EJ^smJMhyq)3*s=wpcO)IRpTB1v>&_0d{e2fiQ~LHEdMo4Unof3;B5Y z(flGpfeIu;-aN>NSjHXwiUu4|V1hk_9@I=+v=~Md#NH&j;ugt30D!_l>=Jq|@JRni z^>y&bu^($?0f*0Owojdt<*7us_*!I0nTc0T`-+5eJ$^0o!@9J<;Ax0Dov@wDV;DmCw~$^#lmxC!AZxMlWUwhhbC9r^SZH&pCC&??g#>%_|XCs-uEkr^KwR7W|=cY_yR z;G(rp6oOcB%-_M%hnL7+>8*~IrmauC*r)A`S~#I!OErkP=V+G77SH+5rJB5O{Qxk2 zJ_f=bN5P1F%U{6CmTwkm7Wfjfi>%o*{-U|O&XYQ>CE4L?c$lg$p*`3{ze-vp!pka zxP&YX(>A5Q{hAgLdZMMXDZzag*O3li;T)xXtFJuY3YxF&+SM~(a}8F`d<`~SHS;yv zaK#iFZ878BT&tzGisd4Tm#&xmagOfcf6%wdOt>wWNtnW1BO%w)L8;lUQ4(W!d2ZVFu zNI$|Ai*FIso?$_jc`9Zg9I$q$GjTs6PlTgf%5UP6XK)(dX7uH~_=24HS?|e{wj8q2 zi2X{3io6W3estqZh4+$pyUp%T8g!w`QB-^ZFq<7F&3+nTR~W_?Koaj)2T$JMRBUlm z={`#~rr1xxp_Uy5u0NSyPotVnJ3>L2UB70%mfbyRMe^IS4s(IrgpFe&Emc`R8xS84 z4X00<6*5cRB42E8GEHw@_;E|S&nWI0HE>Eh2kb%3DH_YlX)OW9GMjQ7vfDB{4A@&z z>_taPW-6%LAR8moA+5NN(nn{wW9PKV8O`vimM+kYdz_2Lk1vFI?P$yJk>(zj|E<6J zhgL^e+O|7wTeUX4y&n6+vGj;c*yslBW`^8s9V2&eU6^cd778fX26rW-&0`ZU97R{* zEC|r|ztA0VZsw~IF-G@4!*LW5xoPcXy`!#>ZCp-wkXjFaI@=QNH!&JsW-Wd9{mU)< z%>KgOOug_7;~H)1lU8smub(s5W{y!ecS9myt*Q=$TQHlbg@L3X^zf`F8(x-CVWZcq z4bCudr$7nfr`S)uH_%NJ0`~u%SA<YZ3jhu;V6oA3N49TYC<)U}Ai;Oc!OFxgsnBm1w}c{IJ;(TOj0hOTolb!%^mrtQ_S>BOMGZrF$*gAu+LJ zwp>?@~*}Ad3%@5=C4I%=`(K8tec_Bn*2-IMlL?xUl>$@rYij( zv?9_VhP=%j9MXl*M2kwl_A2g&+2VE{D0fsoxL&w zG-`7;Uo`_(+3|!A4#XHk@1d9F!uZ6!As;W{rdTaPKCl_#yzrBFMq597J{w)~({N_! zGmjcMo`w3<>1yTCAUlUfj=KY9b5-pS zm{bnP;u5$d5#)5m1>IUD+?iWo4(uAozzUdc+MDHF11m>ibq=%+sM0-f&Oxo(L9p0; zrHi1smHdZ037X{5)J#Y-u+oRj!A2m@Mvjn(p@ALGQ~u=-KD3!bPuY{+4jk zD}kfnrdQSd${h7dh&glBt0dxv&Uz)CdV=H6{z(%?OFDdx!m?L zP*>r&SB9+`*FEjXv8?l683bkSdzq2R+zxyi)I}G*436*BuFH~zeEEv~Xx@C4{WNy# zD|V4{?5phN4te+$2a@;ks~pLioc)SWS>63I(EfuR{))Kzk9GO0CYvRk{xU+lcl)b# z`zy}EmvHz^hwuEhDTU`SIu>)G7@Ofcqk05m&YkqclBq`92{Z9pyW2549# zSYM|$pdb1@47DwTSZX&@an;6K)s!Z~2AT<_58%OFK*9sX94 zy+K!}5n%Z_dWvCT*aMo7f;)os*F{gZDE9x&*;YJohcwsthdIOeM$YR`V-L`wrE_L~ zq3Bjbu9-;-j3kWG{B&~mW03i4E(9^Yk!w``+SA_WxHB{=geS588{itB@|rZA7+>oJ zGvL+PFV8j0x*!4Jr4UTW%11Og0XaJ$bu#P^H$odsPEkV3B*KHC9LblpO8YsC;uuiW z5>hkCXxz@4ta-<13nrkDsuFSR*3LG5LWf~5RF+mz=x_iy>Z^A~rImZBVM!*+m>p zXglVJ;Vo?BL1ZHjwHG3%J9NIOauFMdv8?9YObMSzT+hiAZM%xb#)tZ$gIK4oQIRd6 zN`PL|y5w!~(QpX@&=C{HxDW{=!ToBGiF(i$p++%|3fKV26F(tFk*N=V|14%zOx=mv zhbUfJC~}Hx!+(@y!18X%?vHO>s3?kP-3?L875nZMwE#yb(d^h2wsDt{+uW*)2XEz{Ai*YKHc( zW8@0>miJkhD=|UP7L?N)1HR$$F;(J7N*QF`IPmnj2kB8ui2Wd+FS&d!Rodj7a!sh# z;s~VzzyTc6!jVz-zW+Mfe=`z85Fw3o8#&i z2G(z|Srh>#mwG4mE}=h=2+b)H;>H0A25c78=a9i@ZGQjf5p3d zf8$C?g%V&b7q6p~+G?sfIpRtD4uT7s(?NZYQ@=NhjaY?cWfzu8u z^gDPJFfF#*fUC)7bG+=Ogsg&P<*O=2Rj6#`M`m^8EV+7fGaIu-jm6{|F5@J{`d^rT z@Td-xnj=@ECp6uUqku`7;85yd$X4U1Ac~4fg%$1ECcZ4y_)8-9zkcD!< z6DR(s6ZjY2@MaizWO2$7XtOw#bZ%VNm0ZsmX{FVOUg`{{tXU)0W}#~Sa<06cGBtDQ zjF`p_#j8u(H6ycFCC>Ps43%5#D6`_urZGBgxH<6+@pt zx2?mcJL|*vGI3-uG&8_H#e?FfJd9MtkGq^>yjLi}up(ud9E%-g+ReqfIxA{NO@hAk z)A4LTp`S8bb#pBVFT2d}uERPVjQax<@3^xxdQV<(XvzmI!A>ov*(f*g!yvkRFAP6& zAHDDr)&?{aMx()d41Y4SG$Bf*(T7<@smmZ5xIx`#WpY?7;fEK|d(^H^Tt^(xp{<&N z0(3Ejjmpw4$`7KA$`2_5U9m!ToCUtD3n%pjo)ToeDfYQxhbqw zuR(>h!94IX4o1=%WWnvKOV>{t8X1-p15xur7W1x<1}G>G`Ekxy+8Oz9LQdb~dHg-O z1kPE1kvfR=#`UgUziv?I>iwRVZcxw<(G1T@*LqnWB*u?^)S!Es(B>rr5aepg3DZuB zHAm_SSh{%c^}d82lEA(4N|_4(O2H9}AIBaX3buII}}@z zx+YdThb}pl%0c&D+J96pZ7EYMk><5XE_svu(!Lf;`C8jyYYDII-u4>3wrd+NdTlp3 zoy_4Jx60$lf^OON>Z@|fwzs~7OSZkir5v*DO)l+@ZEyC=IAiyAMDWo$T;S}UZe?Dj zbvxDq-q@>L91Htfmo%_4Z|jowm+`eOX>vJF>yk#7_p>f(_)B?N@7}^f(N>5wad{Pm zsJyH`^ADrgixTbV-;uYeIX#VNEpKX<)1Qb^YB3yfOO`X^D?63KCb{LJ+6M3VMN`sw zOhNP}14tURFbTOPY1hTal|)he1%K5H2;ePz|)qAZ5Qz-D@7TOl(Ww8Ao9h3rj@_ zhW;*#zAOQ$hC-`57fom8ym_<9w?Zd~+Ce5hDl-}KCm!&r);I^9+Jk^!9l)6;maYSu zSxJt&5T?mi3J*=0cswt+=?;Qu(4DwspZy{l&hV@xqB~A~XEJ(_Rl?(yj=Ve)NVNXb z{qr?GJiaBPxt=R+uPaRheA6$y+3XD_O_oq z`4ih-B@kuL3?mW%D9g5!AKX{+{0=L+{g6+58}E=^v-Qm%x|biA}WKcsTY`C;}3cdaKpvDK)4*fA6OOec);dqq-^tBFv=C zB|!CVSq=~@@77vmw5i*gW!9*>HwsUrJX%U(Fcmfy9r;_WTg)(W@zZuNOfjBl2X zZR67LVvg(3$hcve0#}bvx`+^bQgeyWq27(6Bg5Y^TSxmBRnIalyuBz&Ph@fZOZoKJ zrU;v1s#~LEZcbi^>>#v1vltK@P0$KRrIlcAFde%v1s=*4yl?b&6Nc_>)%NGCn+y|+ z4?(1Bku9+-)O5Zxh3cg{Q!QEhnJzysq94_gwI7YJ(GYMMGRRu08Jkve738P&YZ+@x z|5q+rp|+oE-OBr8MI>{q+_Du|4BT6Wh#XipMCiHTl3eIsvtkK2^i3S{3Li9B22m7v zZfLYg1;jN%TbHIS1i;b6El#WY!Nb)h&}2nF7!9}ZdF+oyUTA3fDL17(AJ*D0ZFzE(vKTBMDINMP=WK(|c` zI!7#me|#aFc+k2CU8Y)&alZs1q<(2 z-(pnW#ME7Ye$U0){QWB*3ARYf{OjA$M_jhQ^NaOY%J(udT8hA}+poGMpKAlotg7zp z7HoQD-+ktL$2Vbx&aZ15w#4B13TWZZZ^Rij+;7GATk#)%E7rK*S~lYr{{Lsx|B3vX zA_zdz#dMcvPWmxjp^mXm|NCTjx1j%p;`jRBTX>HCd3buxULU@C2?YB)bB@qT&!IpR zpFg+w*B@V=o*ln=trz$a$ubJD%;Cw2t;FGUsugNIe#}k+GzjDt^eOq0UypqmSp)0f zHRr%XW;voH+un#he%xyP{(A=%fzpFuJdda$~_z!XPp3JOgN@3ku-09 zmaj(O97?Mak0REfaj3Km9eIPMJ8m-5Pw70*-kR9DhM*2 zgP~W zmm3Y@EVK144kL@Mw1$C{M1T}SLHlk*`%6Vzn(Z?Z=cQP=6gS7G?8*^_;_z^F`);63 z)LWs*KcHJ4c>C&KaKd?i`sU5~`;)`7v!C9azIgUX|anO8{d0gbZf zu+v!xQZ6zLGSxuNAp?GJn|E?X600)f8)(C>&%5BaH0s=>ix7YSS3H6Lq;L%E`1s2s ze(>zkYY(=QNAQL_=GyQ=UnEKzajTBpIuwUuY{y|HMx@GE`V4NM$z2zBHg8i6nIK@D z@%oo5>&)mnu`_Kyw2)f6HTPnLA?)S{U5s}~!|q^+n?|Ev`RcA6x4y6uo(mF9He`F& zAW@ra120bPWsG1(o7gVT&wA$Lan+0^MV4pOWFfc{58`CdYyCoaM$V$jtk0EUd9oQI zMT(OfwIKoK;{edk7OCJ-Dgdx%X(cWCc-Ws=iXj8|9w!X0JS)c(%P0|qA=f{$T#Qh8 zjbYCltw^AOE_{R~6Q&oe{pX}3AOe}JmW^x*ep%x~dr)75{e{MX{wz#bdh~XlI@|Dn zy$030(%I6O9YFnTdKtSTAUU%83Ez{Ak-^zqSQ*|zjZ z#?>L(PsA?v2p5hcn|!2M0lH+od5y_DU@FzRUg74|g!wPGfpL%tn-afGfy>LtW;g|C zhC&C<(y^l(f;@X@;D>V?ll#HR!G}?JoKE;CEyadoZ}{OfA}h~x_cANkQS3`+PXvjK z9na$rY+6cis|E?}x0nlelXX6God&l3Fq0tI!7~fz*2D{=o;0tKZ6{crPB$ICkL1do zYFNXf+Og_;@21U7gAClZNvQ+tK9_Cna2kmTz`YjnN3>nhnz4 znV6PF92t5`!6+a8bgrQnN4@OGjZw?Suu>ii8;vB23S68Pty(Im!eRo`j5xB2YrvLm z43&s`NKnbLKXS$ZwO>+#BV+vu7I~%0B#ux_OlCpqPXo^(B2)?rW$tDJ*-%aBAm;ny zsg@kUJQY1Axz4U~pttLWRscZT4YfbNv}&)>lh^;ghy4b1&ZsqAaTr3$(8RpR65C7r82T-3$xwbW*e`606n8{A zuwpqGfghS8ZI%qAg3-Hu3M`$Aw4bH!F*6LbWaNJIO_8loXzFV}DW7b!t=DSb(GE7* zrGYK6fq8OJJvZ5;N*WHujhs3&=v0yGMwz5EZmg6RG`7R70w+M3D!9AU1uFY!E&P57 zY9&po)?ViYocz6#R9+mZ{>v8B1_;&-s_Q3<^0l?H?c6B&SS5tS2ZvU*JL{Rn@?vL}k76 zxC3ORA-0CFnyPq;Sgaty3RJI>2!&#rC{Va}Ws30%)F@?V$*y40x)!uaHc_CeUJ2JP zBd_uu#-@5|Kwkn^^~6<%H(u~eXyfu!v?`@_DRI<{l#4rA1}TzHXi!5hraP7o-VF*b zdM;LhlMNUI$SU+Y+1zR=2?O8CL_taBoJnn4q>oE4rSW|mM-z!BT~8f&Au9higC(TCZTwXb?@viGNUwTlhE z_kfd6Y_PvKLUW%wO2y$HqA%*}*@Ta#iWcoU0pnh(umH=n?)rVfW`01WB?|%-{pQs}L)ozGCQ;A!m@hP2H#Um>3R5r`Y$d)&&r+@l~oBCyD{e^T1YBKXy)HRxQ zO;A{1X%}x-YF4=AI_3{E+t!<`Mb^9K|J`WW*C5qO2i?c)2(Pmd-MD3XJOcy2+~z46 zgtmaIn^>7-QMeN?Q%PUDG3Gf-oLH64YgiYlX==I_X`xI@Xq8xwqRAA^yWJpQvQ^yv zBg)tjUc#pd##_Dgc)Zmi7Q}dKr5caC@qxtOL(z_TYhU<+0yM_h%3ijMZD7G)BbdDVR%@ttv{onR>Z~sZ|{_n}|)1CYKzuS0(#H*>ki2`1bSvW52PN!qjy4U&^ zn78Dn#QdcPegCK@zy8H0(LtltRWX2?^}o}5`ZT}(pX}YQ|66%-Iw6Z`tvmIFBi8}D z>a{-j=n*3PIxtU{i$`*dM#-H8ma=kf8Bsk;-Ab6qN^N(0;@=z|#S3M0r|2|QP=@ik zB5tEzq@M3Ft5cQHpvDWC#f8fdKk;YL0}4a@MU64Y1o>c4`Eh(RW)Dj09U}Cb!rRaUCI0&+M#SP$=VN$uZbwpYv3zDiTtYEByL?g$dOd4p}S$Y$BpxKt8Z;`jWdl-916B$WvDil5^OA21d z2wt`k+Bmt6xP12XVF`qL2ZyKU^mf8;L_ELjGFGpx1pQs(s))B4_v8 z`fb_^mF-#4XoV0KTm9opAHrimd$dXo{(9cUG@GK?{27K-^e%5W^aW!hS=%g} zqcHTNZJ&DlTA)Z8W?kjcA$$qm($@nP0(=EemV8r2of9`sDdn)eMQOeyOIgQN-Jn6K zayu0Btno3Lth}L|j-UDNQDklM4M}E`uA%xi=6HjYW$dAIE`W2M14BG<(=lt8+}fc( zGtgTyc6XlaL+g2mn%wnnITNwi&dtG1>X=E%3e^|rH7bYzSJEmS9%>pl$ z`3`s~m`T zqmkDbKrCDbq23?|k6S^vKcGKpgVGofffG;Cs5~!PHp05asyAFT-_qZ%hWw3wmghCH z*)$5gIKSa*FINM+-pfj0k0>Er;3kR5D+{0U0%ERWJpH!>p+AR_`K><~KKz@|=8Gl`oJX&pMP6eUofz^)TmqmcM;#le%zt6phO^w0sbt z8t`q9e{1;t$cKaby?1l!T@=4s9nkC<>MV8PyeW1X6A#7FqKh*Xx9r4lJ`&t9IE z&Z4!!_0iX=Nox+OnsG^O0jj#Q1T`-$SzenXeQ|hx`26s!d}a(R%j}6Y6CUl$snB=4_}{c7N?oC{mi1=TZSQ7!Lw*q9|- zhIHo^k_|UYEUFT@o+2rI8@PEfCy%n1|9gIV`2O1;4!=7qA#}@W(`dZCER-+9s~_FC z0Ou-1F{Ay4B3ZTcYf)n9ogBTY{463C=cTbXAzyq4asMl5l9zJ~n7}@hP

ul_p1{67&WBl^1$Ra)POB)3U9&P}eX4cHO)L?tl(#nNzc}Y}7d9P)r zb1m`i?|P@7^FI5fIs@82e=z2_j!c%;B3Sxdp+-Sj%Xsd{uUQ0UnTl0l7Q9~uf5>HU zYEE)3rL1Nl=>ovjMMV`C2_d{SMu(Z<#LxOMaMKJ)q~;nPXeJKXi!bB+U&MD^7yKMFIPRG-;B_&I4rX9Lzqd)*wM}iIUxUc z6`HsLqsAwvi&IhavObpA#mO(E0<;QqrkKkfm z2v}_TE6Ye;6}bMyM*TS@nW~#|=&7Dbu|s^7xdmF%mzM+JWCN6%Q(F@6hMo&K*vUlh zg^#XO`A(Xm(wb7dbU1>YTxFP&&e$p!N8VYS*+35#4;o;)+s);*T;{W+jno#;YX!XL;W06$s|id-24st&5LWW z4aHKORRvn&|J!OyXCVvGq%%YxHN}jy#w@;_*+WQF+eVtUYO@<=QJ>8ta> z%pDHE)>|XbQ#oawjN#nJv#Nhe<3C*uJ7YL5f)JG~8vv><{_m6BeEj#PyHNZ-{?jcy z^JjN!6m&+>PU}9D(>yVf>XNYC#aw1)`m&>7hDO=Pm_KYN|2|-^VIJVkbT$b5Wc)fx z(QxDDa5Tc~F{rLVN10YGv02d^aL#lwMyfN>jJ{#bEq={MY+rhDH+U-e|82$p-F<3_ z|NG=V{`;*wYvTX+Z2u3>_AAZ|%clsfjxkk}BXrIkt713F5^icLnhi}mX06axdmu(& zD=De=M&7=*RaU^0tJGA9B&!FJJ9_R0E}X8gjrxK7pjBv#)1ui(CAhi%sJbU|vz^Bn z3210B+{m|Ltzn*<%Mda52%C)LjPwAfsCh_=DAB)KFQFPYU7Z7PwStl@^Q%pG(iS}h zib03wdL+M&Mjo~U2%dLF%BLh0jcQ5^`Kof@vQSR_Bov2ptZiGrDzs&8w(hLZnV0|K zDDYM$_=%_P{J&c`|L^VW-sgY2mB+Gcn1TL8=u)OW#{+jLUw)o1H3U^Hi5z_jgjXYQ zRwseIPaNM+YP=f}mg8B#b?F^oYd(cZQfl-LYjZoVg3_I;lX*11wY~DhZ15`x z#$qEdv`>QmXnf_S;w3SOk&OwnbGf@5Fivf=bR5MZuchnsFnY>3-^CJJ5TQU|7|?-FkiH4<7BYL4A5C0cT;z);Jat|t7G`} zS)eu2yUzo?_NScxuPy^<4gcTi_4af8e}DTv|JUt2CCc)0tKUt_LcMd@LhrYh|G=%K zBx9&argP*Bt*cmts9fFo@`lz|w%;w#^xalgvW$ek=Ht(Q{8PUFuPy^<_5Q!t>pkt| z&wu;7JNNtlZ9MZj{Cwejq4yj6x^jkUo6btJhT7Ww| zD^^~k^rBh)MG}mb)m|vYs5TF)N;2BCNrSLgmu9q9T_@`=n`pF5J-l41QG-XUA=zkY z`*$_nsJV?a+)8d|JDD@#XvMa2t6R$#NIIHl0{d&8;FgY3UeNF3zqkKk02E zSO?`kM`_leB}b`oH$GpM(#jdV&r^Dzr?h<98c^MWnM&(hzRy)!G7;;_R$A=iKRsWm zcHmn>#?oBtfB2lG3nltX0{6HB}aIXf>MUsK2Rnm$h6oSwgb{r1)K z2H6CyPM3`kBRZq_!{vz!DYEcaoc~ZCdyNaxn^R!$#g8Hnj z6>n$vxgqa!L*D0xyw44JOSvI64Z@1ELt0v1SANJ9otajZBQg)pyjddea8|{+M7=MP zR&llY4Ch%u>quN!()yyQ3w5%Y+t{~ZT0g4!)#TpGyYQ9U=dUvVUVXFc&A?aE|J}&J zX9sRQS@^7dEs}??1n9M8;;ZRzxm?Zp4w+b+ca z+TY&4kNqwImT z)ohv^yBBza41#3?nlp!BZJ*RYeHOuDJ1fc~*l?W9#SUTm8O8K)&Feg^MCC< zeR41V-^!E8|M{4bJdz}Lx1unT!Y{o15P&7`lXopA)i)F3??$j&E(X7uy1&glEwM}C zR@HV4!~%|HqzD}i_V>JSXbNP!wW@~d+nD!=%PQ_~twYJmHH794-G&tTKZzu+NdC8y zZJ>_-KPl>ec6;~w|66$qr*T{E#~Qtl%+%LR{Zksv^PcU46`vwBTQY4b~JdG4Yz zN}f9#n>-2GnCGf&Zlr$4IKKIm@P9z74~NQI;s3jPdqw`gw|~$7Z{x9O{qlY~`S*fE zUI`K@N&F=!2=hn}qu7gRwRZDchiGJ}Q$vXra1PAS+(vSn&0KX&i?n9jmcbtX(0f?Z z32{dwN{!$K3&Cv#yJ=S?R!m?2kxbrDz|U#qhnY^-W^CVVo}d+Y;RcyfQod=jbc(eL zc74sjNQSXH_0Ii?7tJ&stMR%CKqc)g2-P?Wqd1?|KzrE)B5NB9!V`PMKQIG{))+5* z%D>^KN)L#tCQI`kC|DIcJS%3jA)YAhG&@!7vN9b`Dbrj!AC6ofcTY}R*TdyMXY2(N z{}SlXTSFf;^56E8UOxZ#&XarlzuS2puoE}Md(i~g0pldVuE$=;1~Wex!J=odDnGcF zuvS_R*!kE`STdW^eIZMLYyyTh%zTML=`Du*BK5C4@`cdegFFKbgx)20q1l+m-i7}W z1jlP1%Kg`-!`_6!4TF`4H4!8>^4|4k?G_x}e) ze{^F%nkDS`#Y^aL8b`kZduloU$aA~A9K8A(c?Dnpr!Nm*yn5+OMr-M#a{c$7>_2(B zms|h6r@h_#^?w`BfB#pf)4@%#BOLp0&i>C2tkd~N3#gv|5bJ-$1~=?E_Zn=q_$Ocn zF1v7(l$O1Z2f-~6$=rbdg{Ml`-EWY!%;3BfO@Xg)7Y@NDP$nn;oB;>{EV)TiZ{n~Q zUII%MdW3poXe98M2=(3>#<0wxgDp1p+^d@o+B=}N!Du!lwgMxO%6MQ{t#@y(P@3_s zS#-gN^UDVMsvdN^*VosMxao=}o2}M)?2f#N8-D;9gR{BlBE5Ej9R}RB4@Yakg#)E= zI4F7(-%C|PAWwt{36%Fk^f3o0E<8LFGi*1y4tYzd*#HV8Zs>an#~@+u0t-#pDDo0= zw(=)aV3uxpLy3d{^)&h@MzJM|6AycK43C)H(|Q2h{D;54IBm81{XY5SKV&EdVD7?* z!I6mx7g2KVO#oxVu?t+^5wGbyFaDNQ{Sa0YYzdQ_PRs>6W`?MNu3FghahR$(1JonB zsdYApqBH>vPLZTB^|HydD;a4U#YXThxF?45*P7c=LP^7Yt!xN4$MsjWlsM{nA&C)v=pE=dk!-U?~6q2uPzT zHt|k^nPG#SWXtJv1_V6uOb~Ioq2Rp9HlBZfa=M8N7NKPL3)k$W2hh&a2sVG{?EHr_ zHuU1uzrZO$st%&dOIidtBBdvy@!kM;$S-1`my_5-KQBM=p@CMbql4RXdVl*XyrS{$ zIy=rb{yO;)*}|2NTmxoM$SiUl&hP{t+|a5HFJjUKzVR)` zEKMa}zntEYjk12W0Qv{FzrcVYC7z7Kkq8IvM zq_JN%9!%AbO*9cAdO=p1P&3(A`01vS{$;B`7o%Cs@lQkI-LzW&V2>Z4`_spd57-aT z7{k$up(bJ-_6Zf(6=x=IL%(doX7W>a1Z|`qeb#43TVTbB5_PiT3cV7!0NNzF0>~s) z=y;XqO8A;2(a@*Wpa_Q354OR(Npvz2RIhM|KOeB~$XifDZL9T(QGO`v^9eUn*f6Iw zHBFvRjC29)6SUhYdsxM{>KfE;pZh%W@JpxOB}`*{)91a50lpw+&Vx7sMtBiBy#00? z>K5A7(lz!A?Wz{}RSj2Jz#PG3z`h*2XcXFi_e-u_;rvNCx%$k2_e}dlTG2rDBDo!8`Cv^V#T>O!gpuMS}iTO!l@u%1qpC+6|l=fmcl=Y2~oTbEQU}K`> zy|7f_m?=s!>)k?jskd?MBfE8d3Tso~^qYCC@%c5e)|buZ!P7y*GR?tSWJ0Z=3FiMI z!Xftc;zv?$qE8F-I1gw742L2g^#y2oJ$PS%#+QwLKb`U&75{ISfjtTpZXGY_PnHz`i50VC%K?50{GN-orrp-53zTAd$ZLznz$*%4xZr9>{l^}Pk+xF_7GN?IC@~ED%E>|iX z38hu7Sb3KoTxKM;!il@Dzt;yljS@UWYs1YCR+hMJ@aKDANXButFW+Z_P2J0Wv6?*ec6!^Lc)PC;HVaPM z*Q)7*#S(1FUD@vP14BB#cpasb+d549>b*Hwb!Jo5&ZgebzqnyS!B>BFy8};A{t@r9 z!BP&re!iLAt4P?cae_i&CrxVfLN|OS$tRAaY?k+Rv)VSEL>wB@`Cyqdqux*fCv9j>t_x z1QL`4j+YLo#~j`Mv=?P%@m>v9-rV%fY9wCaKwZXdX|pRHkO_rR`5k*cp<3{LaX{H% zajHV|N(U?H;7-nO{me>8xm?kg)llD%9|Tgg94+j=a8eBte(fO&;3XQa>L^nM ze(-J*uH_nGMq(isSOsDIHSQCsb37mDyn6Cx?;po_ArCIB;%TzyF|Ih(|oW; zG7@y3aW%5|nQ3KnBi?|+6nftZ1zVCw-t7y^vQiC7tQ!0bH4@wA?Tk<`kB2lEaYMqaL&Z}uV7ttAWn)m zX_h0j0k~2(FA6xNI$0^y%>c*D9*2RC77eBFe-bt|v@=bk5yMKrf2mf@pci`@0B)Fs zlz=XI7ar$L#1bb{ha{fmi?*xf0k`lS1Mot0ehct|2}PQCo8!U-I??T8)&ux#I>I0v zDfHr9-W1A;X5LaMmWW~$pG=+5gb}BJ8j}PfMdRMQ@B;vzMr3Zod+rwjkM-{-cs#zWv3=U;|PU66v|06dKXFRo(Z)XaHS>b3x{BNgcM%p1;Ppo1gs_M4fbWX%Y(lth$d z!b5mHyAKDPbOrMPevy0^M>9>@`o!o3JAR?$6G}{l42`FtdBimsifHwHz;LA#lyCXlcOfxJm}NOgmEqT>?(uy^%JAb-~^MaBHr_e!dyORm^ONYkV5e zxCuCJkG>nc@B;VdjKY%Q1bkx0{5_h|yEh_WDaZGE#sUC7=@;LY13vX)pZKEySvGS* z?{5L(4DjA|0rCpKQywWH2RYEVh@fy=kR#v)xT^raaQ$Ewd*@?Vf8!_+#QuqW%ijYl z6d%?MIsmvk7vRZ^(q$C@KjZIj6Yx5W+cS{%bIk~@lFKljQo>c% z3HiJO&m0RA$(`q7O`L5H1_y#H*bPox_kcx1VwF2B-^Yu$EG~%!zTs$gmo4UUWTBHv`;~o4Z&+3R!v%;{sg>^DY^k!Dao-{r$%*U3cGM z#}|wY-L@#Ejm-0%1H4wvtOG9Z^{E3J#?i)4BE1AI3$E3W8b5PlyckSCHj`7{XMqH` zeq1KDW<1!y^3R-ryCrwy^?(l%*~U%z(ty`qiQ99K&j~oM$$0wm6+Fle+qj+o6#J?7 z2GfQEcPu9r|KFBP6!ZBvIF3S2b8f{pN$1H$1+BrGkWczG`ei6WV zW4TKstBu@(faCf2NbPBMoRguA4jWd|cS1XO4Y9i*;257_SRr6p0T-3l2YVC1wRCf3 zz<CXjuE^T0h}U#tpo7g-R=DxKvuwI8b2HGRe+Q2 z%F3Z{u;mvLhFT!60z6AJ3jg~OfSXA}`?+RJz;g*k{R@-g{e<=P1dSauSP*b6_2XWVb%`D}!78p%l&e=ln}^vUYQQ~-S9hlf|LPZ45m(8J<{O@1<) zuy8gRcySj*${`Rto_S;pR}(@aDB&X?dlalnh7W2q3k`IK1i1V0ZRnPR4uHEK=LcMn z&BPsHR7%x;L%snAl#OZ4(!6J4;O5LXKFWLMskoX7=|xmAf@gH4_VO= zz3+zQV<@9>-B0LDCY@czQwY2I65)JS<1h530`%B>6*0V2@?xPQIl^;7zZ4IzX)R<6 zK-FJB$I=}8r3*R(|86?r2=L3ct_=Ludu?S4ZaELAM&Pg5cWYI!H^=@Wj!+qt>vDyv z+!HeJSLqIApnge*sD|LL%qc2kMk{fPBJj%`qcq03GS{fH6~o^M`zv&i%J9DuBlQZ= z*#y6-o0I`xwWE~A+ScGImBW5RXQ>?cweC{Ojp0+d!&KJrJT6li_zj(=mV&<#_UCb& z%7I_wI#mUGlV?%zOWmie;8!|OEi;5>Lc^j?R5?DgYB#EeoM#zVs`ZYb5%|kERIMHS zdg57?Th)s9ELM5vT?zgjaIO;Im$~~G0&`;*t3;P5*X(4~5c|tGTG@uPQt<1szY=#V z%Q$XHWLuTf)#^vk2>iS0dL_WGr~SL>fF-~$bC)Os|E{=U3GmB|ZOg#FE6!LF`~vjV z;NKOOECGI*14SA5cf~PFfM1XOyXl@Kz^~^#chgBrfM3sf?xw4j0KcB|+)alq34Q_k z8u0Ik+m-;op7Y#I=Pdz#J@)UW3zq=Dp7Y#IM=k+=nKM@j_;<#gOM+j^dG4lDmju56 zeI@vJ#+}Ueo@XrDM%3Quycw%n^{ydIf z8Eor!{c3!#+r<6rP-aRI+3CCrcQAFszT`|aQqkUu6WBrnnH&4-aRpm+5}MsCF6|Iz zvF)jG32Op;Qw8}P-c_0of9?KN3t+#_$EphaReM=Ay;YyX)2a&mRr^|T&Qs!TRYE*9 z{#KR5vto}c0>8}Xss#Ky<8>v#Z@BBO-}lP6xohTQ(|p^yEe|Zq7EnHvM&Pf(4{HHT zFAV;Ays=g>f@a{);ggjCrDd64n{(Eua=)x1_)R>sD#2felT(>@RuTLr{#ljauiR0q zkqJCSNA??zhTXQnbyXCzl zz;7Thui1xdaWkb)<$hcy_$%?`%E7s;FIPh);mZ8EY~U~9(be>@*vP9ZXM$a4Q@E0+ zjP|)_*Xi@aqj}u97Ixms{IBYKuGpd;R#`!)r_5ROA?}i=tE8gwx^y{%codtn9mNy3-tAQLw8^9{CethvdO3Cla z@R+X!o!=0W{BpgIMXdRyl~04X<<%YNN9> z_CCbH2GK0eViC}F##;n1|JK{yvw~$EMD4{u zVOx;kuhM#^Fo+UyBsT~$4LFQ16GueyvQ(0R+$j_ekJ~ ztb=H_CbB^&Rdb=*yn{GfkxcYH49>m@Cqt#V+a}DAy zdDHUK^-j?{qizs)$){GR2<&618pK`ltS!65tR2K%@~@>qSp3{-2XU9YZ0DGXyX$K^ z2Y0%=9=DvF8ZGR*>vuZ`ce=aYw~McdJLH2q2Y0%=p15<&ggDx~ib7PulyRpce%*d% z6lSMF5ab?y!`Pjoy|`#ucz9&TsW(%M{}jz}YHh4M%MC)^VfKy2q@n=trBo5-hvJSC zUz-gnNtXvP9%R-Nb3Asto%%ueVTx9O=H;_F2w8-=sWes-xEF>}n6L|{WjV7oCRl8l z33$aIc<+K4^;Ki~AOr(r?ggG1w5gz~Xf8jg#2#5lVQ5ZWij{-l_XMyQ zvQpB4PIwuJp};u!rRS1^m`m>@2QiMqD9+o4WrNT@XBQr77%Lvb!V5by)YL{o;k5|Y zs2^Zr5XvZZh93u<)->IYA10|Apz$CufE!rl($pCn61EW1dP#Io(@J(j6y>)1c-m^= zq1hcoSDto*#3t@kQiX8i6aJJV?*jG%g65r}r~YMQoTk&{pxeEKUS|Ug@Z8N`(+b@u zRoHacx4^q5QH&e{So4J2(@X=;O;EzT;DbaMmhDO?j^W)5{X=BZROrUQ$XhWm4iG2WRwOUF(eMd=BU2~6YY8P^a`#m_A&Fmh8Dn)IHz)9HKpgIZzC zKo<6`PqK9Ig{~6KU!;#c_R)Qf4B%vEG&Hk+7+hwU8Qd^mnD3(Ynkh(d;~KHP4qYEG z{tx#0&H2j%hHh*;?*4F$_gy|x7Dg)fMqAh-{6^cjX_O>BT~tKEii1E#8zz59v-l{H zE_VRL2qpvM`eciNkTYiFzKNDm6K-uMN04lZDgGx+hQrSFwiBauy-~sWM=GHD7M>q~ zV8}MRU_g?vjgudfO=Dg2u_E?kufrRH!EPaJXdhS{+F<4fX$RJtJDR{*h9W4#c@fYN z0YLr*Q^vV~+rxm&z_37Yvjvm!uftbA@Sxn>Dw(L=7ltq*5~}nX($@aa4M~g1Ezi+R z-Y3HvqFK2zXejfOdFMiO?A*AMz(%S6V2>Z4`_spd57@t=837}4Bc1KvX-)jHq5asz z8*vr2oX;R)TL8jejJy8iPwOY?P z0=S4Nr_*JjcP+@8T0m5y=2+9IB9&6HD_kPnV3HzxqHVk?2TRt!41qjh!j;7qt&Cyp z6Wr`-7C=FF;0N%+OStE!!{_lW0h2+^nVRSk*k6POkW!S>7q|q9mMH*~@uV%~=BPKE z8wB51v<*YMmCZI3po=byL~lpCO)k{dFQ!!^4oGhLj>wU2>}a%@3;Ssl3cjW-aa zly*18Tpm#f4s=B3bi?P8)-dcEd@p+^$&o%TED(@cD6Jle3NB*tv3?@x_XHYsOv%BA<2% z4Nf(F&Kk+Cr?Nfi%6BaQsyYUwucK7#C|Tm1NCrfy9;FaOgfT7-Fyxj~C;|7KWZd7v ze}040B*IO9I+y?M^NNX=jSAH*0t;$U0cQgi2n-d`&B@-6vtAtb6YXTwkI|C%(HqXN zDcmmvI?*9Au|s3jjl$r@Xn=N8aZ^c+2v!aM8+bz(iOhr`nN8`w88u+i-zCK>E+RY< zKjaw{uiAn0kh3PdxZ$=NvoPc%-V%E?Nk6;^EP(rf=!7#lF(`~3FNZ{7l>yBQG8x2= z3xtk~O2`9zQLcbjNOsTxX3&wVPH~Gf;XX|ukcnRA3l3ZE1ugEed&Rkl$JJU!z#Fww z!KeY3w5t3t+aW3UK*BlA7Bz=L$q0^LQMPU}FK!OKzF^LLz{ia^ItdPw<$(c-J0^-1 zNY_j(vQo%Q91^fl&S}Zs4)D}Kvi1SBD>y2?ppyXo)IVi!Pn6Bg0q~w{_a>olNF>xO zKC)?isoI1`!G$iz2duZVyT^q$7%7OOF)}cTrz}8E-sL{W7IhbXony!`n`OSx$jgTg zqZ_utZ}IhJz!#85p|}Uor5~!9Ms)Zu~Ed15ky0gVg~^bAN@paYn(7fH#mf$ z{7%OaWaId4a3c=`4n~8r!~oHP^l&l^60*&Z`U`&UfljBh7^E$fa0Rb`>7$tQuye#Q zYE#Hf81*UG18up*!`@b=ufXN~4rKfp>PyCOs>W)UGu#}p(_-CbQq_g~TQ}Eg{&zATZt`oetYN2LO7IZFuNr7=;Gl37(H$cNuG6?E>Xz7 zD(NV7lMhK(o^oLU2rX9^_#7}4b-~pUo2j;6Y0O2&;++sqhQR>Wvnhd%` z1cczRRRi{_=R9G5!GHES>;tMal(KPm%6&9D7 z4m5em!@ZLX%@oq#1FGM1qad;SsHP5x3&L7UqQHaGH6n1BDlc&Tnn0eB+b}X`07qTN z#I}jYXTSg47Cuuyw-4|K^gtlP&!5}cxPESb{@muA?k8LVCO!m`x0K_Ryoew}A}(zH ziZ~@f2g6#3NgzQ26E6CRM8=kF77ahQXIv>OI20WC9whCQs^K^(BEkkZ-{iwIJ_y9 z@eC+HMxzc&qumFxw+<>nj8sj?feyJ{Yj(!T1O%zw5m}LPTN<%UZ7^HKR8V~Bn<Dh)Ev{aLve(_{fpian4>U1CFKZ8=yUGw=yb7%)^42|4+y$tz~-+R8o@^I(z2 zgT5D29clBQa6&sb`9CbF3X%}#{gLMee0lJ($XyO^kHkt9Qp=-+6r_BB#FX9<1GDaJ z6`L4ef`QbP>D4=^C+tuM9Y0BCfKNWMx6{iQZ=NqvYX*S8N`;k%M=G&!2*Cwaxvq5K zITK|&=52P@jFy_+n7~iiMA1R^C#;S%DtAFmTP>BQ7k&&p4Q~cv)}slXw7kT^Jh(1Z z=}tAJvf6EM8IT&0iaa{Z6uzVbQ#Br;tpwjxPPh^dI|bUi7DrF1xhFOAEEGZ?SNa2M z6Ivg_i#&|rpbiS^TUka^-x_jd>3zk!(>HI<-=7?wo&EIY^u;fH*Z3s%KpZfg!*C*J z1CZO&St>X{06Q3(9sm;F=NfsRv2;@SvS>hJtYXZDj+f>^91vk)e&cLA+pSiIoq1PC zin=Q7+9US|eqRBo#!w~GsprPL=?nKNqU^7ziHbrVQItqw;{FO>gz}0XM^K!FMkfh- z%=yM+nuW(n0O!xg{1iIyfUYpSB#c#E42ViXZ6Iq&OX0^PD90r7Z0WEA%e?SEN<~ng z_bJZx*kxZF|F82Ul}$DkRkqD2#iFRjl~4|i=}041U4uwii{eW+groULBVAgR#t{yi z>5GV(M7c^P%T_DD00Ke4O>>CvyYX-gLIi0D^eX)7bV$1s17Sf+jfa|eH-PC9nj5Wi z5dUCMw2VbS5uyNh`RlD?nDE}#+^ zSC`l5K9Egx_=`=1h2#UNRwkL#4^z_l1F`kZji-;-I`0C15K{m>?5YHeG@AND9eN|$@+FVN!v%4fes*l)+H1)=#qpaT z;Kc<4(&og!MER6oaN)V0bnnEMVL4p+(JYZ|ICLXMgWl0)DcZhB_&AVD7F%@L0LGH} zxXXLkEpY=$Y6ZM(6-rATS|NhcHKV*seaF2SW|OJ5*?U(Gy{0(_3hDx*fvu(s$mn`u zS5#JW~sts1R#1t_0mDW;ZGX9^YZOKaeXd82oS;#Q?Ed?&>!10v0UYQ6h{iVR7#8h?_Is%0x#>_Sjn0aS1;ed0k`$;2M}yRenQX~yu2hPw!n`8 zlCe>*BbgC%i3>p8f&tEtqJ265>?&nGO#LpO`o?S+B#!_Ec@m_vJ6^H3OWIYN!=psI zCI=DmtzaG~{kt*kX$2vfIItCV)egeQO z^Z=z?oT3yQ`&*>57k5zC*Vm3qP&y#9f>=bQlI{=3M=xKWz3l8D&AsK6q}DeG=fYHg z9J1VN6qnu0STI+#$VJ&)V$y z;o0%oR_mwZ^MAg1d(M73JUu;peSZA%jJ-K!M{i!gI6gmq^BR7A%MM@vi+z9m`o$LW zd?|S=knAlE~bYtq7c>*Ec{Enz9q%F9?b&SL5uWvLKRaO zj`OEG@Q5ozzssfnPG&CCLDK0^s5ZPnTB46_c=q@9=wEp9|876sefp=~-p=+u6x!M8 z{b{@RbZ4*kC$_z^QC2)N%sm32EZa_ga9_y-n+8a-%A5`P2ok!``;x$q`eDu;$monN z&c@#G199fo1Hqe-y2*SAx11PGqQF7)qvrwBPLVB?*}1q`B=`?ld$--ftH@CnBbA3pdOP>)|27^i)&Y?f5Y=S_OM)F6QfSMa`a&!@0C^aGPSwwz(}VxD zMp+D`19k|T9M`gF;yf`HjQoatLa5J!=n}TlftOyRVoylKLS=-<-E+gp8HFA$f09fw zxDisj(;7#(W|mQ;wEQ7LE}PJWbJPxFnCzqr;6{nEHUJj&!I_4atq{#Jh)R#I(XaCe@h<4)zbGo z{^)L6Ur29U*sdv)pqTHZn{qOI?nm5+}4xRw-*Z*xizyIES%&z>&0htBh`44yh zXLzTM>YfAkxcm8YtA)i{ryiMexumXPx`hR|3nZIh24zuI8_Y%?YjcWnG&15g-Pl5# zZH)Y3%Gw}3wORYyH*es-eD8rzloqS~{P47m|NX!1%}v$;NC=$YfA2_h;F=z=%9Lyi zfswtK;R*GO=aYcHjxWPVy?)7Wxa3_luY`98(x#PCroH5fA}KCQYgZV-0fTWn@CSn~ zGZ)cFxWl7Bt-Tg^t=b#La8xmAk{xjzH(J7eunYEy{We3Jh1^%H4ySPW`r1PsBuH&7 z`Jq#s$T8l>mP}-$dBg_RJCD2vtRnZL=F^BR$T;ZdXd@uc@n6wgR?OZK6Q|}3S3ArM zUoC5yK|XnWO#TJ)eq$6QQT8cDPG6gG4M%Ix3s5brx60{do>qAcqg8?L3fvP5)U4CG zIfJz3^MMAJCO<^=eH40Os^)@pQLm6_x!*g0`y&_9^Ix!bn8MpOd)RoMG!W#lA1WZ& z!L!UP7CO#8sImnhv??^FO;!YSan_t+9T&A+c!8evAX|--tjhzM+>QKP@VhQ&VxU)G zaT8W(=%Y16%6i-8G8F`kI*ARCm|)H`q;2Ml;;nR;Ua+=1o#t$cRiy*=XXskPe4#sy zzvubPLs+|8>>;=A0{j;zB@fX<&ftjIiF#IcROmQM9-;G1>-gEvL~m z3Zl!K?~&b_2pLC7<~7oMA6wmOGQ3LG4Z7C1SWz-l&w{Rl8_yV`=e}=gViPX z(aRtjxPh^_@*nj!^nw+oI$AMW&)RRfK{`h|oZn})9odTPptNOuQHx$y(?g@9iHOh< zN3a8u8LPUulg>aRbw@p*J8GxdoK1x*nQG6Nxx||Q1stKVSqu&QchA}Bb++5Zoxhw8 zoK4jr?cS`-yX;=|&=arMHqlyEw!Jc32_HH!@Mc)b`Q0@|*3+eNl|_SShOf*e)%oxZ zNd>D!-7t>AU!y@oq{-5^Nm~OH*P5Y97F`{BG8@W<7}6>u^rY%^sTLGrQSekPgQ%&xQhSEqHzuH73Ryp!A8+Dc!$+%8VJJ)-4&>IbR- zwB>AbM*#Eefz1dEZHbV~!t9KP8_1$OwSWKoxy_g|2Xtgu1_TR(b7g&#b7bsYdLKz4 zMt)tMJ3Nn!vL&Kh4`YDrQbP@8&zm}SwCd|4FeOtMIzQ?n?hw@pfU_GLOzyl z?x7D<`1qV0fnskGT~S<4PdK)?sT)L>j}nG1Hj`Osn~leh8;T#?sVdQ6HE3DGO#AnZ zt9RR-zx}fL_}%~e`IpC^KR*PTHqgU9!R()&?`+TKPbG{t0Af1nZ_eq8uoFrq5FXa~w>8%F`c-nt;{%>iLgL0AAqEMDoYt)`_2 zi9Rf)hquf~5e1v(6&LPwr_ydvxvi4hu!uRA?+soF28W)_yr#o`Kmei(tu$^vGUz^W zaokC_kfri?*PQg=@9QXhgbssobS+sw_A8)7|8DhH*58Rsj->K(rIgp#p)}L&=TSx1 zaKA>dPvNy)5exdw1zK1Z>ldzmj+3{-#ZuQlJ|q|gPNC2C&!KcIK4ULWPv4yK(9pCV zMU-k8NGT>PWa~G#$Tcrar(NFZGa)G%F98gef3i@h-W7DjBZSAE9*bm)psas{&(MO+=sWgKY2}PJSH< zQ`%AUBFc-INfsF5pL3PO?Ws|$NRXcxX7ssR_gVMPd{2%1#{*igSN_}Eds6WK+}+u| zm;Y|#nM3~5j{H2%mx!+_EC~wYhoBw$2V@Gyr49Od3al=(nvwDkw9HGhHov`-B1PG5 zZ1$k!G#PIlWhUGK9o47Gysj6P(c-PeRhW5I0VDpKrSOe*P}4Q0_KQq$H_2O&5Q#FZ zID}^#XL9SXJ@*L4B!MK!edt2q-b~ZDr zHt(pLkhlSrCS+>wkthi~#A%+^bP5!9$vR2tIFeLl>ERjb`f%Qiw~mz%&0j5}Ymp|Q zQv_;yOMn1oHgr2Ot(i@lk33oqa*!GwQ!mmxob0-Da2Q~$j{ZG7L~<{EaneR{Snl2x zL|#}KD<}2bp|FO$E%4M6x%es%cF2?E-^N7C66MAvVoI&$R83-Pp9IGf`y6U&MM+uq}y(n%XLZJvvsQ4;=iEo%y zBKszO<@J*%nl@5vQz;@H+FdC|xt1G2l$2xFYhypEcJ~;wi_t3zRJMVJWHSuGw<*Ku zh-wb{S<7I@DR8Ppg^^xce+9!N-?!U$`O0%6;z!s5OKPm^IW8`z|Zn( zO)hsOPGEA!(hWO5IqF=bUH!sVTzxldvq8zs|FyFVfVkoPW;AZIR@f|-DmN|Q9cVMp zw*j3LvhO@kiT0glHh&Hh-wsrK4oibPMM}0c;q@;pwc`Y+m!`5^=tKGeSOG9BSp}+O zgFbqz=oL1zZx=!2ru%#O@3K20)nbRAki;xIW%a4kJOi-2XUKuD{O zXyYWG)&^dEo~!6p-coW_HFlHly&GmU-OeLIq_N>wAu+P~M%7etjbE+MX5x?Z)X4wB zPj9XI|6Z>s|L<l zjq)n=UDPS|C$k`OF-P~Lw3p|sWz-qmm^EbPc-RoZ_qHyV^cQ(v7r<%Jv(^mX$*w_Efo=;WVRyK{%f1H-?tYZ zl&cVTF~fWfzs=l$r^p7rO@nn>KY6@$DxYE>2ziz}neuFDJ9&0$#J z@%jwQk|qqcr%ea*eHsz@mFc*Kdp}cf5#Gr-9Dx_mR^uEC77mh17Ltzonm|itF6*_n;=GLiTHQvq*V4ez%)_A}35L@*Lxt0jCj7VQ4EtVgpIW=kt zpEAOfF=DVAp^DDR3A;RAl9%~WM7ogFlP}!(; z_4vQ3$qK!8axG z7_3rWsGSM_2AkT^EQ2hS7zwUHzSe-9PHxnnRTq4@raJ7U@uI7I58h0QKGUSp!WLGzp2$n4=B=gLcP$7qntqnwaKu(utT*Rrp(7kU z^K_r>u4GA5xOvR(R(SnT-(w}rj(txnZvLf3bwD!=$l==s&phAx;N3K0I}~AT39iE< zl)-;Vu)Ak1x5HY{(bwt{CXF(rH?ZUwRJDGA(`+g##Rkw|kb2V>u4Ubrfd>mK$E;zR zwG~^$9$&$6T^fpOmUjpX`V)@4k5#oEDxk}a=W3UmY@&X(DJMQk01kr&#*MT^erf@x z1iulCFL)o+VRqntdZUxV&PAnWFPUFe54{vpmSa9nMud-;vUn=j-0BskZakp z7QbeEnjMi89Y=ZhaRGB$uJ&Dcyh0^;pfB8st>ZLY*@^kRvuQIBW79^7BnhzzIrPTl1~SISh!@dj?|Ff_kQ}qQY<_q|(&rL|dNucmZ~bliR06 zp9~d1XlzR`+8RU_BGtVv@}Q8@??Mp|ilgcj=jKO?yF3p=U)FJM{UN+y8?))OSn89A9q_(gwr;6> zGW7id4=$^Kp=eI|zJD|t z$r#SGfoENMcBWGpI3fy=SOBl0NxK27Atq0Cf-N`6ycVK`n`O7o%rT$mZM1%LM%lu|Q^ctirPlJ-aSWE6_9;K(4Fos3SfQjHL9@8h4D3`pv2E z63uIKgi~mtnv4xo?D1K3rAiek@vUMl>9gSdMrvTA$YxHhB=9jnvkl^e@De>=?7-+A;hy!z3N zWf~{%Bl_ftyk5M4&!3+?+&~|SVHyx}`7QgS<+W*hKD98Khw$!D;3<|?l;)P*VkHP^~2 zG@R*ia5Um#N1H*Xo!!2R6(}xY+FE7WhSE2KUn$RCPT{VKiP=;=6>jadcJw)x=bI@v z9Y)=qcg+nm!qkt&rD$})9{o9aBs6z;7b^4BJ*>W?$O6I6&?IIs5>v?;X_u z89C$(Mx5epQl(B?EIM{K4%0&)v(4@UIwDwTjPMThn!>=<>IrG!t!UbO>E$ z!AUnoOu2mY)=&Ii^jp}gLClPQ{(1D*ho6$MyYpoKpv}Y0{d9Qx`uO#C2kbN(J;0cP z$Pc^c28!fb;MmWf5AY3^`TTiviwR*PRI$%_Kqr2E9Na+nI80Ptkmr+o75SqgwnfI1 zDWzSZ2;?Bo`_Ke8npM9DTid>KV{e?jd;zoLNjv+)4E)g<0cT4- z`;`_~Hp^5;8K!1ENv9b%$?9Sl!6uJALL%-z{gEtOe0z!L7n* zPng3sYKWK3wJ;jIO}zLd_R!&_Ni*mg!Yk!klGby&U0rr6%IueOeVQg-BvxSTDpD!r z?fxkOVcv&??pQbFM}ZI?l+5%LC;|K&1LIh^C3&^gJQjk5;g*^QT8Ot@2Uw;7D}J-v z`ehd|^*mq#)a_3T`I2P27IQ+~KF?~NSMJn)IQ!@az6)a7#Vq77h``Ac*cg_-_s>#L;t!m=M z%FTu{-`q>?W}o!tlOfw_FblepuzW`3z$$QWXd999m6$!-bluD1U0R3p`JM~s`c@z% z2qi0su#k6t_34ACne+TAvrVV@7pV189#&H3-?{?3!!`LFl% z>HYcdR-Q%9f3J`bUOZp<=}!{aojLo-ftH^9X!R_2>XUt}=gen8>0Bp1Szk1V3BwmY z^yS*JANkyPIQCJKI*YB>X?cLO;`x>U*7Boct{vQHu|mnQ5jD~!N5@$E)HZsxw1eBP zG&9lKn2Fc5_3MwX)%DTa2V+zNiIfeeSfbeK9X%4~XQ}j9Y!oa%8Veff;1sX;bZkIi zj)s8L31tzd)B?fTPROQVq@H2NfI+t?Uy~9-V!DXrm8v1~_V)Vlc>)QY`?R&S(5^s2 z$)YN!K1=xqM3~iD1Db3wRb%WwMUm5F2V9vhXNAH*6Sx;p=E!C(?Jc6nY1qt))i`nx zbIkjKDx9)bmQ>)#X79NAMrWtr2j!ec=7x% zv3L3R)hmGnmE9dBkUrXSO%MlrNnMcM)tVGSMfA?65EAuQq7X9MDp3ekt8vUGO4K;D zN~PSO%9ToYO(BHwnb6}WSAQg4-4ce0`bGiq?*zixM>yy?$9v zX7lJvbSP@hqHEB8aUFyX!ktu$30C(Xpcdm5^hLHzud1DTWgJpgZpb-U)~}Px?PzyO zCkMhx1C?CHmgUXZRcwe#fO{_A$0Mf86s zoDk0b&krlt{%JIJXB2=LOy%motjuy6K&`L!=>O~po=XL&HL;`{kfkN74p5z1ReAl8 zTi+@Q1oCZ?>E>$s&}x$AvF+o@tPD`v~p2gTy$M`z30SdoiS5eidh zBrv2+wF`VDgTPaBDoKh%5Qf(~}oJm(`n6v)H^jh^h=zN-v`u4Pqjw6b& z){GmL5@^1Y`U?~LK~ILT+Q`qT28&%!wP_Bg;xJg+$Gi=%SRJMfaZZX^Oet2;(2{yF zeX_m|)mT9!oO23F#L6{lXI6VWXgs2|aF49$TXoxA&p}95E-uXqx@Gzrn-)aIY*z@3 zTeYf^VJT@Bb|HI@Yv3fhu(}v4F~!}ln2N3ASsi!N5j`iT%ePw7)Qb6?-rIC>mS;WN ze#PWcI;I+uDItkzZkg-!P*v{BsG;Z3|H6U7Unme@jsNfd?)FaJ|99^`{@?99twj{T zr$`BZNin_J>J30SncW%fFM(68_@&Rw>3n$)>rwX>kl9Qz%d0ih$(Gt?8jWBeU1v&V za}80@K{xjQ?TvoK#5{H-oza+9#kt~j+B^lexgW6Z-tD+$_V;GNy_Kw##*>YU~KV>{>{ErqWKcbIVa%g=wB%vRu1uvf$E= z=eEbwd;j`X*L8|0&S0zdYWt=zpZdw(#*$|<-sqiFI`JKk=Mj)cF+Tdnm?oW!*f`gC z%37HeQoik%PcyHh&sjC$_&FyPr4w-9{?}hCW*$X@1t6f+Pc?QtV+^) zZ>zPQXO8o~zOBur1*kdy@9pny=k33qZr}TV-paGc@n0N9Vl)=l!C&-9?i-XcSt9KaJtZ zqLctd8=s~|#t}~G{BhyTvnb8Alh;@-nxZRPab>=nT%eW%(==4=wyt%;ymzwHrVO~I zy(!*yUls1T^=y^69K=>;6oKI!&EnV#GnHc7EFx4^l5-gO6bn7gl@?R0p9dp5!Lak} zusq~NIB6gJ9R3_iYs5b%KZllcPDwgX$5c#Z7xm`(pEplQ{R(VNCFpTSBlf6+j-Ct; zThAV1alr);3c#I%XRwSkvf0<(Ndy)!aW_siHu>Kfd%?uN41uStq>mc;fBQ*3{!8!4 z?#}ML{C^wI19sx3$f+oqFQ0VWc0xA5EV0PNr|$3r?DL7!dH~xYrmC7vG3_n<0fZ8e z%?q2j>2M6Cw^;1K+0nle+h+D2)(bQcdY9ZvdV?z_gf%MruT6)&34Y%*B(29zay^Cr4%`p$-*f_h|9?;vn42HX5_bIJC3HBAqhGyY+H(An=XQBH zc-3;Q63n^V{fcxFw)KDd^6$cHZ>bLxv*&;q~Bn@c{VE4}X7g+G_RteUz+QBMo`@B%39nU1J8ez8)J5qTLFNEkj!cvu+_{(q0k?&u`)pZja>GR< z)`r;#BKA`h!&!&}0cbi7QMJ<=rr8ROMaM+dj( z^#1l&ctzvgb#|QXzw$bG$n_8*ND_G42b6Qo^G6N>p3s?JkVsN+uUtQ%Z{p-9ma?D` zvVIvxQ;$xDBmd&UBdHz4@C?)k?C=7PXs{V0pq~&b+{S?jL&O05sXw0pb;&UH>6pkG zKcCpkk5is*^EjN&()tRmPo3(g?)$2$0VJ$1mb|Ml<%A$NI7xJo_F-^5Q#Y(xe8KRO z!08P?|N1s;L9@Df^|x={w1u7}%d`Fb@U)Hp{lD$aKC6NRR?WzO1Yg`XBvV*s93=&u zz$Xbv7yky@vH_+bfySpGS%#^CC({u1O*$!qgrlT@ll4JDi|NRnx&uFGxST8tWuf)e z{IRXX`o*^VGFn9k`eY9^JUhtb#eUw;4_C}Saz_vxk<)9eSY9k|@Xc}<_{bln?-Aa5 z3qu2~2h?++0wVEJ*hv5)r_c8^CNsg8!a0ni@YiVIWa3BP1}AJMaWsv6Qs=6owB5)S z7MS-r;LJAhJg80K)LK|0oNyFKJ&?d)Dsq4}g;Q@~eQ*M8p@Ys6^DbMM51I4A8N<=q z0^}TUo<2W30{J}wsc7iU1MZybXk-VoF`P%IFBU-Nyl_q<5DRZ?pf-gQb|{e6W-;te zqOoIZ4#5L}j??{xkpw(Q&EHY`km4YSm!PmH`<5wS-w8V-*v-Yl6eQ0?Ef5fJVe(57-1Z~Bj1Nzam83l1 zw98eSc=44#^rQt?6t&g6lZY(625#II%ow|TCqa{J5&H%i11gwGe6F!UHU1!oF1tJ1 zy}xz0pLToSbm1t{(P}3hKkW26z5TAn)^Ebp{isyna5QB$q{{Qf32>70o2lBXaDXRh zi;_qO4spPkih|PqO5L@?l3n_-9Xam!Q-xG9(MJIYqqwv8%aH2l3a%8ne-NbN{nRUd zb%(Z(UV=(@Ub=`>bq>H<9dY9a!WJwu;bDX81h2gk|1uQi3il zHliysB;x2@SXiu3x=?|td0R?gv&>x8#4ROL)J+|?GvFLk!GC(JXQ4N z>cbLmU{j9)L;c#xGmRRs{88+2Q9|B|;Kv*G+YHX*fbJ1$1qqCz)~{J-1hybZUX}(m z4Q9za!bkZj8S(BHm)W{ubkPN$beV9?HW8mwF@RYGb25no*Kov&$#x;Eo4x?}{vt zmD7(ztwbNM@330*+3iifrbq6>X%qv!tmCq?`#Q68=2dx*@?^~3DWz_?AEm5?I$;4*gs<(8@3L0K}6W&wuQ!H#)U z71_;}I#gVsIt54$4nd;w&^UDTflUJe<7q@t?OSwyYjEAL#yf_U5qbj!{~AedWlig} zXK{?|6Uv^WM-}tJD<9;93HNu5eX_SE0i9o@yIf?L)98hhtj$HbKi_QRhLLlzR7zI5 zMvS?Ye4=dLrA)$IzUQF>V;CiZiyWPJOPzG&OY>qbKa8AnFBehMj2B$Md!^%m^)^8N zE-@iKl%sIr=+8FKFg_UZCit&;gfHtb0D2t64}PNCMJGTsF$lfc5D;SWvv7sa^hgX9 zcnL2n&VwW@lNcVbFbX@ev_6cYPz^78=HN(_whd2ktsgU2?F9^(zr2yt9p2k-`&q!i z`^!rXU%LN0^yrMGCW1mgn{>H?t73wzljsb&tWhi7Ch z=Z`~%IjNDK-efj$TbgqfH(|TXZqmomwcIwft<+&>c$oQ7j|2qA85tde#<|IA=FbdE z(^vS@=1&$WkL85Mf3etGU|)wp1LNZsd6dVL4E0pplX>QEHNlYj%}{kGpSt9tk;?yKBvN)R7#E)v`&z57cKll7;SvFTPg~j>P(!zb& zDbzAN+{!H0%wH~OdSBvizWFnPBIoN~~fF*W~*_(If zj$|t<;+@6$Q*eIaIaF}m);!kjd?Lm7k~^QCUVe?+9|!hJZ+{$$+ui=?K>w2)hj;^7 zdzx=>a$o9jU3(mDaP(WwyF?r=n z(4XJ800Tap0S0{_C!a8w49DKYO*AhSY$}Uc5(^V43@?H)Uk|WB00(}KvgtaIc2%;t z7L$Z0oa^|(6w=BZ6bi+$O{Mc#% z^6Hx9?La}jnV!?zcY2+XH}KuC)7$RE`@5|cdSr07#kxib*%tsPZ~jD8RDse-E&Av% zxOQ)nN2p;_a3Ibek%L0|Xp4tDM~g@-ethvdN>4x*f`#;m!z0X%GJxDTc5mR9pL#rq zWi_NS24ir;5(#w38$lhQ0$)0aY-EXI4POeU)dFnIG;|MmJoy4@_*kUpB(W>vZh`;7 z!!JnPz!qWK@k5~I3AfVb0f7>Il6a(fr0v6yj>#@w{#^@JdC+>G$Wo`c2S&snlU^Pm z{!3>60n?_R=`#VJ_|OGJBJR!urrkPbD@$Y%sKt{Ez0b=t?atECk$`w?wZzRTU7FU> z=67Qds3~4&24Kp5jDp$3Gp}E_&@oTYUc1xYVtO~PDO6FrEyBNmxQB2iY;=PN9rEBp zXioC4}j!r#y^ivG$;0;Yc;{Ut0vok0& zM-Sh8=ff&=({Ww9Cn8AfAe+dM(UOWURYlH{boit{Fs6g!^#Rimn?nGUM+^|q59q!^ z>sHaMX-kxmEk&lP8KoY?Rhl44i!yz%lN-_c2k+*UJH?=12l=+ddlmnSw<)9myuP4I zF`H#7e)PjKAUOQ|3a1j;JQSA#^#Rj?eE?ilN|jhh=4JWbyHTh_T+)CMuo-fypM)1I z1>^;oLf}Fe3z60$Ntk*>D?e0MX0Y-i7$63lO0a=zH>5?0ibVVuT95hdFmSfSCl~e` z-Ytm`W6y{0Ic#7=CONwgGEUwVxT5zUzSzT;39Kw0yNn5=Z@xnTUYl3tQ}|};{syZ+ z_MXiKG?U%R3&H;r8|?1^?^GGTF_5l4=+QLDM(j9_h*Il?R|hRANy>O~Cx>TeKfO79 z(aMC(D|vl*puJ=Q(Uh&|!u(Q!F5O;J#ThQM6)a8a^-u&J)*qnY${s&xfsLDXD6cV9 zRxIch@dwz|qFxhPb1TYR{B^5|8LC{dz+S&Oe|hGlA32|bu_v&VE|Xtid;4}X3~cun z$T83`$v|yZsim_G(AZfQMU$syW`&b$g{{xA7ndXWp3cbXe6&qn+T>z z%otDFApZg_h@jyVX*oAl&)V&+95&$<(fp%a(T1U%^Bp{tOZ`1Ikqhr!9-_LX)*b9v z5rD5X^X%aUIv5VqfGW$7?_|Z0OkF^tbbZLEfpsGGYZ8SB-m~`a4)Q!lqr=Z_WN7P+m`kzZ z=(vF4n!{lf1K`rkKMTot;kl?8NN~n^xJEwIT`o^iE4V6!08EA=sw*)9G<@*_boaY- z?!AHL3=D5scIm2?X*;_G|lRid+Ck8@j7~H_|3ZSP-J2bpqiF>UfoLL^Zm4haYVARvt ztB!grqunA2K;vRi+&pQM&cGM}6=(`WeiD|llP^~aA$I$Euq18=2B>BdELv8tm}gBqnacKILa&DS1C8#M0jXn1W|#QC4B8a(l;M zItCFi^@p^eNZcJqlL)WDaptex8$<0XW3g&+U6I6&kWfc>aZY4Hk!G?GGqgy2MMj*s zj)mGmNYOH4Os*_9`_hc-$s>Uif(%nD*3A~R#|=8eM}QM#Uv7vtn+e(vH|~tYoN zI!%Uf?u-3Mg#uK5$*6$QYmXu?!R{r)lB*FQ!FkNU$VR~}ug$fl4~?%G1UspM+eC)g zPh#%^)9-OM^)9-&@JRtl#efQDc*>z!t6n=-JMXV{-%tEYKE>S!>dy`^fDW2gB%5ND zh))s;-0(n$Qd+oj#?2OsDHPnAc}^oHk?{L2vj z{5$SmJ94$OTB6*D&Ct%6M_!m>_Ac4N7L&2ncARatfd$&)Q+xC8Eu@zTEJY-}8CkLT z3189P$Duc+6^~&{1D_wExvYTr0CZemUpp>A z=|u5mH{hj`?hnUDFJGU%?CjvS{Wc6xGo&_1V9X*mG6gVpukoV9y^KB1)uHd}*iR|l z0EUUZc40}3P(SDoW~qT72^EY~FM!NAWbMNsxmC`d{q(r4cx zkew?{KB;B>A&e-zoS|~=5@Z#AVKDXL3D6#KK1`ty_!D3H$rpy>d_|cD?w|YT{`u0+ N{|~VGlH>qb1OQG3a54Y@ literal 0 HcmV?d00001 diff --git a/pkg/scanners/helm/test/testdata/nope.tgz b/pkg/scanners/helm/test/testdata/nope.tgz new file mode 100644 index 0000000000000000000000000000000000000000..a47332d93877da0074012f36f0100b4f8c7decb4 GIT binary patch literal 114 zcmb2|=3oE==C|hzg_;Z)SPs~z>K6zpJhT=1.18-0" .Capabilities.KubeVersion.GitVersion)) }} + {{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }} + {{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}} + {{- end }} +{{- end }} +{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1 +{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1beta1 +{{- else -}} +apiVersion: extensions/v1beta1 +{{- end }} +kind: Ingress +metadata: + name: {{ $fullName }} + labels: + {{- include "testchart.labels" . | nindent 4 }} + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + {{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }} + ingressClassName: {{ .Values.ingress.className }} + {{- end }} + {{- if .Values.ingress.tls }} + tls: + {{- range .Values.ingress.tls }} + - hosts: + {{- range .hosts }} + - {{ . | quote }} + {{- end }} + secretName: {{ .secretName }} + {{- end }} + {{- end }} + rules: + {{- range .Values.ingress.hosts }} + - host: {{ .host | quote }} + http: + paths: + {{- range .paths }} + - path: {{ .path }} + {{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }} + pathType: {{ .pathType }} + {{- end }} + backend: + {{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }} + service: + name: {{ $fullName }} + port: + number: {{ $svcPort }} + {{- else }} + serviceName: {{ $fullName }} + servicePort: {{ $svcPort }} + {{- end }} + {{- end }} + {{- end }} +{{- end }} diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/service.yaml b/pkg/scanners/helm/test/testdata/testchart/templates/service.yaml new file mode 100644 index 000000000000..86baf148215d --- /dev/null +++ b/pkg/scanners/helm/test/testdata/testchart/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "testchart.fullname" . }} + labels: + {{- include "testchart.labels" . | nindent 4 }} +spec: + type: {{ .Values.service.type }} + ports: + - port: {{ .Values.service.port }} + targetPort: http + protocol: TCP + name: http + selector: + {{- include "testchart.selectorLabels" . | nindent 4 }} diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml b/pkg/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml new file mode 100644 index 000000000000..f728deb2a6bb --- /dev/null +++ b/pkg/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml @@ -0,0 +1,12 @@ +{{- if .Values.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "testchart.serviceAccountName" . }} + labels: + {{- include "testchart.labels" . | nindent 4 }} + {{- with .Values.serviceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +{{- end }} diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml b/pkg/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml new file mode 100644 index 000000000000..a391ef1c462f --- /dev/null +++ b/pkg/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Pod +metadata: + name: "{{ include "testchart.fullname" . }}-test-connection" + labels: + {{- include "testchart.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": test +spec: + containers: + - name: wget + image: busybox + command: ['wget'] + args: ['{{ include "testchart.fullname" . }}:{{ .Values.service.port }}'] + restartPolicy: Never diff --git a/pkg/scanners/helm/test/testdata/testchart/values.yaml b/pkg/scanners/helm/test/testdata/testchart/values.yaml new file mode 100644 index 000000000000..4acdf3c931bd --- /dev/null +++ b/pkg/scanners/helm/test/testdata/testchart/values.yaml @@ -0,0 +1,86 @@ +# Default values for testchart. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +replicaCount: 1 + +image: + repository: nginx + pullPolicy: IfNotPresent + # Overrides the image tag whose default is the chart appVersion. + tag: "" + +imagePullSecrets: [] +nameOverride: "" +fullnameOverride: "" + +serviceAccount: + # Specifies whether a service account should be created + create: true + # Annotations to add to the service account + annotations: {} + # The name of the service account to use. + # If not set and create is true, a name is generated using the fullname template + name: "" + +podAnnotations: {} + +podSecurityContext: + {} + # fsGroup: 2000 + +securityContext: + {} + # capabilities: + # drop: + # - ALL + # readOnlyRootFilesystem: true + # runAsNonRoot: true + # runAsUser: 1000 + +service: + type: ClusterIP + port: 80 + +ingress: + enabled: false + className: "" + annotations: + {} + # kubernetes.io/ingress.class: nginx + # kubernetes.io/tls-acme: "true" + hosts: + - host: chart-example.local + paths: + - path: / + pathType: ImplementationSpecific + tls: [] + # - secretName: chart-example-tls + # hosts: + # - chart-example.local + +resources: + {} + # We usually recommend not to specify default resources and to leave this as a conscious + # choice for the user. This also increases chances charts run on environments with little + # resources, such as Minikube. If you do want to specify resources, uncomment the following + # lines, adjust them as necessary, and remove the curly braces after 'resources:'. + # limits: + # cpu: 100m + # memory: 128Mi + # requests: + # cpu: 100m + # memory: 128Mi + +autoscaling: + enabled: false + minReplicas: 1 + maxReplicas: 100 + targetCPUUtilizationPercentage: 80 + # targetMemoryUtilizationPercentage: 80 + +nodeSelector: {} + +tolerations: [] + +affinity: {} diff --git a/pkg/scanners/helm/test/testdata/with-api-version/.helmignore b/pkg/scanners/helm/test/testdata/with-api-version/.helmignore new file mode 100644 index 000000000000..0e8a0eb36f4c --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-api-version/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/pkg/scanners/helm/test/testdata/with-api-version/Chart.yaml b/pkg/scanners/helm/test/testdata/with-api-version/Chart.yaml new file mode 100644 index 000000000000..22dab35d32f4 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-api-version/Chart.yaml @@ -0,0 +1,24 @@ +apiVersion: v2 +name: with-api-version +description: A Helm chart for Kubernetes + +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.1.0 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +# It is recommended to use it with quotes. +appVersion: "1.16.0" diff --git a/pkg/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl b/pkg/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl new file mode 100644 index 000000000000..cab726131dc5 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl @@ -0,0 +1,62 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "with-api-version.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "with-api-version.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "with-api-version.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "with-api-version.labels" -}} +helm.sh/chart: {{ include "with-api-version.chart" . }} +{{ include "with-api-version.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "with-api-version.selectorLabels" -}} +app.kubernetes.io/name: {{ include "with-api-version.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create the name of the service account to use +*/}} +{{- define "with-api-version.serviceAccountName" -}} +{{- if .Values.serviceAccount.create }} +{{- default (include "with-api-version.fullname" .) .Values.serviceAccount.name }} +{{- else }} +{{- default "default" .Values.serviceAccount.name }} +{{- end }} +{{- end }} diff --git a/pkg/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml b/pkg/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml new file mode 100644 index 000000000000..a0a54cbc232b --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml @@ -0,0 +1,11 @@ +apiVersion: {{ $.Capabilities.APIVersions.Has "policy/v1/PodDisruptionBudget" | ternary "policy/v1" "policy/v1beta1" }} +kind: PodDisruptionBudget +metadata: + name: {{ include "with-api-version.fullname" . }} + labels: + {{- include "with-api-version.labels" . | nindent 4 }} +spec: + selector: + matchLabels: + {{- include "with-api-version.selectorLabels" . | nindent 6 }} + maxUnavailable: 0 diff --git a/pkg/scanners/helm/test/testdata/with-api-version/values.yaml b/pkg/scanners/helm/test/testdata/with-api-version/values.yaml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/.helmignore b/pkg/scanners/helm/test/testdata/with-tarred-dep/.helmignore new file mode 100644 index 000000000000..50af03172541 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-tarred-dep/.helmignore @@ -0,0 +1,22 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml b/pkg/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml new file mode 100644 index 000000000000..bd163a944cae --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml @@ -0,0 +1,14 @@ +apiVersion: v2 +name: with-tarred-dep +description: Test With Tarred Dependencies +type: application +version: 0.1.1 +appVersion: "1.0" +sources: + - https://github.com/test/with-tarred-dep +dependencies: + - name: common + repository: https://charts.bitnami.com/bitnami + tags: + - bitnami-common + version: 1.16.1 diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/LICENSE b/pkg/scanners/helm/test/testdata/with-tarred-dep/LICENSE new file mode 100644 index 000000000000..261eeb9e9f8b --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-tarred-dep/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz b/pkg/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz new file mode 100644 index 0000000000000000000000000000000000000000..6a2df2e15b934a29b7c000cbd1271dff601e65a9 GIT binary patch literal 14613 zcmV+wIqJqAiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PKBhciT9U=zP|%s7t3eb|#`EJ8@<_o4xBenHit%PJHcja%Q{d zwjmOd(53)30LsxMxxf7$yh!k+hb=#{Yko*%5-1c3RfR&KP>9%c%4qLsg18LlXqx5Br4CsXdvHi2tE~ z?Y63&`$ir_DU(PNM#Vt~AVLz48J{1(F&%LvB+nw5aoj;E`GC0~j2^&jw}a9&|8o!y z_QQTB#vi3r79SEopISY~Zo_e}<9_iyN8%mgxWf zV1K`=|6lAq=>J=Jwzi<62Di50SSwqOf6oZVu~M@~$GxN1AQ?*pAR0r;S%ihaB2?x7 zQ(TY~@Fz4)6Z}E3QfF(cBn1<2L=g&ZBlw>i6z;IXSv6QMMLIogz?U+Oof(twkAo%OMcL4-u z9tvPPAwoj%>hLsB|Nd97vjg3)UppF`&!4-d!KE#zDk;Y!%sGx^PbhR24TJtv-wj!Wm9*BpH8?`P34;`z?*-g3-Iiz~y76ggt=3 zNXw0kpcy>VCo%&3JA|rH*QV{c{<@mh2t9x}5x5N1@XI<-O^p&%OYa4=*BUTov7>)a z{ic$vKOQNIEBY{E)09z6Wj+@8ljP_}#k4=5Ax=a9;Rj=|SHGy*RN_xEfJZBjlZS%q z_M;pLIC!b5qAZaR1P~S<^0GA~yehg+J5WdT>Z-Y*x)N$9c!>h*q`i>FY0>0?9^CEf zSm=r(oR-LIskj4QfD#(3?hg88F|`a$nS^aTP37Dp+D^47q?}MWf&isyc##b;r&wYU z64uKr9l&=Y@F`zw&eQKzJ@e=S&!560EwzgN^+Bm(^vEbUt?X>QMs-wcn4&2bDT;7O zZ#HnbE*R#;3$oT5k}PEj8_&O2+P6%f2@`64g4dkZHYc2=N$zfhvzP^MPdL|~Z@bMx zxH%F0lh2te%ir~-uR->%D0wT(U8Tvo%w137R*|;_61Fknx*bErw6p;`#G5HMXNGtZ zzRU{Q{Q9rH{*O?Kh9udb2Vn8~e=yj6{;ab8-|s(v_OSlHjpqvMe>uS*5g%g--{+bG z?CwCTIF{H-X&k$MR7`7Rl+T>wgbh(rlAr2FABJCWL8-1+OpC1dx0@2fBOlN2i`t=s z;u>g$?8~ifWiu_Tv^G(rgVL0CkLqKYY0q}z1d)hB^m@}vbL2CHbyIo z-RaH%YpPGx{4Fu1GXj`ZjrM zs0_5#Nac!djyz@$dy46*!sk_3nMi|Yx6V{3O~pnDB_xt~lqF(iHRad1O;Xve-vyBf z9dj(iiUe|{TZrQ(A6sm}Cl5kiYK0=sSiT#EZEpC_*KW8;oo+-6KTz$q=dRONheyD% z4We05s`wB^Yv`Nn&-bRmOimal|Iz_mYmsnK_05s;==Alf1dMpR2K}zjdMG6Y+pk5s zmYSQR+TqFZ8Rjz*;T36zQsOGCPQv~sNXYS+2+7yyzt*~&Bjeu{YcncQ0w*kn!%Q*} zp#<|4sX9r~8W!o^y5!9LoBj-|D7RaqmBpOUac{P(i!32-twI}FmsVbVI0Q(Z>8V1E zOD1q^Vn3~80EmFSaPN;{=MXN@93+Eif}@L;(wE3YdlD)+i?ayFkWopJyue<#7xv+j z$Vp{It-x3+KZVN)i6)>(jR+NhD4;mj&PkL&@cHZ403Z)?4Ldg4RG~$L-!>9}fa5X# zq#e6JsE`=Nz((2)J8H)PzJ7%vj!-7BflTOFv#09!1vUY)t}LPO?|b2%ntifFDhvQY zK&Zrggd(gdGGZBx1Hha!#mP_JGZyf)OE!5;lG;U`i;NX0&NEo;+F6 zsLs-qaVb1pOY-1RCiuKP`?=r!^FMc<{QQ5v{`1M#ua5%QE|CYmfMjQqD@s^Ty@I3J zXA!sEwXf{I<5TzlYR1}F?AITb$A9e&o((Gg-)ArO_8$DdxACmz|J7>#{e-h@JTaQb zD4HxCa}+B%&V>5c?8Bu<2ptY@V`D&0kOT!%E3hFp<-$(qCxOQJz-&~`P(osD47^bd z^!sq|vbJqf6cRSFCR7b4IMg8X?Iiu}d%cU4A~^{q3msPdegJ{M5yvwB@fJ;S0Ktgh zB-T|-(QdyV6!UWp!@*5qI9LP2L5^V!6)VYRkZ=xO)G_h9m2b5*j7XgXJLOGnWfPpb zxM9~Iq=+LEebt3(YtRKd6?DPX4B-cb02_Iw)ih1$SSZ6m&v(k^c&a8JqN9}!>k$c; zj6RkiC!AfnNm8}zHbT2_l>aK)J3$;%=?b+Qh|5S?Y~NK$r8nMxV}$CB`Z^W-%}GS# zhWgH83{P=_k-*`x_{rc=+JAVYakMl&cnQHT^yKjD?Bl!B*JtqN^z_~70sMnyMjlhn zW+cWiXBmem{u~80qhAE(t#2h$Q#uZ8u z-S?-!k{BY6V@xGMiGV4ZD+Zt#$Hp*|6FhfsFL0`CMo5*BBBbigj3wC=LzEyg6&peO zuNjvU%z@Am<5R<(5u&PnHq=LXVn8xKRui*Gl+M+RF~uAuy$QSQO4gH{Wy1uE31hN5 z!O7GF$#;nm85X^*qVrwDe%;FnNpLqG7hB$rjJQG3$_>_5-zl5&bAz$~b^ic8Nj*J~DCU=GKcA@V+-X=@l1XaU4h*1}6bX{yGp^fqi{#PVOAQq!hWrE2y|dP|6q5p4UB zqaK(YJswrE98@qZHLP24N4NFCjtU+nG&aZoSS=!vR7%ILw3RvRrt$?$UXzTCvwCGw zxP-kM@LA5}_N!aiDV^+omj>FEXbL)utw0|HXp6_9CST?3}$Wv4&@p-(@zPq1@e z#O&6)bB~1euFbWLKMnT34c*w>0${2AKiI9F|Lwnc@c-V*vrhc)-?ZOCVi=<6LP-Rs zYD&@$93qtQl_E)o2{zkuC`tSP;lq<-xBDb~*Kd7}vJ7Zj$+d|vTin(YYQNgQ?6(uf zW%14aRrODvc-}}4X6}d#_3R341c(CuM1-DTZAsU6zqV>8uiZPQG6elfsTkO19B4Kj zV!oq5stMj!3*lJ8B}oz(VvrnBp(HQ{$si^pef(I$98D95vC_f{i`?CbtUnYOS7;I{ z@fg=!ov#()d_U*=3C=&eO-LvExjDgTy92Z~IQhEB+kHQHxnw82DF&@^L!5Eg#=n<< zH$^M$)JwI87MTXTkvG$T&kDtNHISw6dSt8XAp1=pI&K39kSbXILQsj?v4Fq~Wl(Fw zo&0G;pcKh&N#y_%D$CGzif^NYA-Vso`rS3BS+(Ba7sX-?Gv(q{-H8b)kX)GoHZnlz z0cA42pQjk~eum;_yTgp*6%>QpjXtqZ0yBp%W7&E&kI}XpB4qxwc7#kVL}r8%ffomB z7~)(l%EET-C_zGKZF4Pp9#^2PN!NPy#Q{j`llR7{n-e#}Tz033qOsnQup3%lvNWwd z-~Vt1f{ZDSJE{U1kx1E4pt`80Q(_i$mN0(x&M6)hX}$c`l%|SVBzl<|x1J?(Zz~_r zt`BDU;40WZ8qjJQz(N*lJr7cgH_qv4uj7l?a|>E^yOc&%O_2B=&!Gf5s1hPdG9fWH zTOnMj`IV~W#XWuX5M9#v)a`$J829%4|ARrz{(rXr{P~0Ze;dyl{(sMw7ZTAJ@z@;r z7Om>Q@{%iVbU^(Oc5dUepJ%pn(F7;cP)tnj!Ua|IcU!04p59=lk+rMbcDggv>gA1U ziMzvjxrPn8ZT9Hd!{3r=&ora<&`&5*9EHN3k@wHM>urBAi^?UI(YADG{<`E{msAh> z^IL!F`rk0c2G##k|Ihy3^Q!)T{t*9nJI@;W|IMdVX((mUQtKemfyO#@bgP>3bxgM? zVvj=Wx>}n!^HY_4H^ZEh7{eE3w$TXo_w-LPJ+({yp)ePD2)bZX?0e;kRijz9BxFjg6G~2qfNe{v*Kf~M zS?vi+akLY5jz=yQ%mB%Q>^<6(!oj8KuEXdR@)q00G*<;{3rWW(?qkj?-O3TfT7i}5 z?k^!9Q5zAkGQWCw=HxI?%2Q{KYw7LU2v)hU1kJR&4zn#@AunY@!Ygl_F2Knb7O&9-60@+>2E-;OPr0d8 zMW-6~S{9+kuh$l(MyQ(>sly~;mwr@jk>|;VIWt3^7@H$)(=3ssTNEnN0_d|s5t|Nm z^r_3AzkE`*d7>H=Z2q-X!?u?wk+wubgZCQRrfsUz2f1uavL7(~SQJEDb%JT4M1>)0 ze#+_;74Ec^@zZhbti?UIVE@(ae{MC z?w~sxv;A8pq}@EuiNB+(3{iA(iFjOWz>|c?d8je?BcUU9y?ar%T>nkWnyR${@lWv- zlr;AU=>9XB%+>62iD80fsv9%RQBtL;P@3J(j&sxd*<~KBnJ~-MereY_pFcis4FE$Sm$P{4J+*%YYi zoeMl)N$uz)lGo5WCJE&Q)^W_igKD`Q)pCL7%|`;?dOjyr$Y~5+b-94+(agD20XTSR zlDwA$w?S^osb*`EQ?5}CYpg?5E&hno)EyM?A}ao|{Tf@fIqmOiJLaE#(PMA}%7SCY!&$fVg-@xr@G2VUK>xh#bc*b|HKhjLoj z>KWQW!@1hsUIQh2V06 zIW`gBj>bfQkW8`rB3Y zCVo$H3I`#4BuN4(i7sG7xRB=TFP50wpg=31u>odKHDdx@9b;7JONF!fVtYEaycq*W zG-lJBa0ZMSPUylb#uAKZ8PvxK)~7ZlQ%gq>^XZV8{kauD7aoiTL#Hwx#*w2{vpzp7 z5Q&Q`?2K{a{7)hqTL1a7Gs@7d9a8S7abrBzM^SC66YVH-x6Ij7(z>CtG%MyEsOiB7 za6S*!xYQ`9?c))k1{PLrv5dx$QH4dmHQ;#9QTMU1H4AQVPQ4sgv$ud`Lo-GG7R~IM zk&3pE5cxr1czl893!9NHL5ksQJ!V8yXW~L6ozKva6306ofZY&I^0V!(t22VCLQ)V< zDa=A#g+tnrr!1=r@z=4RT0nD=MZOB~O$SBN39=P3{IuF{Y-Kbjsx8>q>Aet6wP+-B zPg*MEvOPWoiZ7ic)hTL+&x*ZBab$cuwhNEVhsPz|VGt@BR(&hbTRQ#Mr=5Iae3WB$ z@DhT+O`eaOY*t1(odF2Ov2yOTC!j=adXMJx>lVaX4vGovg$3L%+2>-XyCeMT3vkH-bMkpjHh>)s_0!yS??(87Z~n8 z!!+`*5Agf#oinT)p28Fj6C9Tan+w!SUy9mJ{XBAEmnWWhR&%E`bDT3J9r;|^6ivp! zp%Hi(H881v0@W3Rb?amEp&@mMNy*ZTh__1FR0^1OTpH!Md|E5CmRM!* z&m_-#vYoI?%wb#25r;`SK`_|aF@L_zrkIlm20O5wrD?s?7n?`r*Pu;qb$JmHQq#e0 z74R5;IyD>hhe`4Sk=i>I{PN33j}{R8X8>Q6(IzB^=c@uQR$hATWhqgVC<&aHWTR1n z8%3{K>C`%|2=z9b*3er3vN@Q)v}8Mi-v9EvzyGlRcPr2O z`Ct4T1!sm=K%e<()ZdnwB2}O`c#@~*(wpK9zId;|s>xs~O%=;K`$6NVoxUFMu=KH8 z&)ib==GvqHwQByoUF8;EF0Qn2i$v?{dM*2D8~?7ifl5VoDsE*rhx=vBp~qi>UjjeR zf^SzWt7{CIX3Sc9^Dvp*zo)MMi);>?ngN#R|Nefz7XR_=+3th>zl~=t{ZC`eb8n(? z*5}tZ=!&@|a;u6>X4iqz%b`1x-0{71iE%m4iR`NR2-TY1)ye{-0hP?IlTpS+a}#%ew9 zR@ZhWbAYk=T6kw{76!Xkhc1KP1fxnh zz6?HVmv^X2m_z?xzkUti3k;FK`_G`OliXFEG_Nv}#DM7xW{8{ZGZS8dOx*p_k*iR_ z44WH@>Aah71TPmk$+ofBHwySBq#4ecg8LIVG_R#kf&yVcxS z)i(S|q7_<^4y&!iHW-Ix2s;f1vHr90UA97LiOZ11)H~vA>LyDwf=fqk;l@oKDq;Rw zOQvdo3$K;xjorrbMlcMDxEm{bcFQU7D(l%uz8n6g?^LdAFXYASXyz@|;&nVY-G+^t z)P&u0JR{gd;$tcax8V?M1dqQHkFB2)2nBZ;@uS5%GDQmT48u#0* zn%r@@Lw2Uduh@GuRWw9c;F~qOy^e<7u5T$O0~og@tw&UgkPi1rfHK;(rB`9O_D#x4 z+|C7_56XO~qjlEh#8P|JXfNA+y#b$p<9LR-z~8Hv2QR_?X-k#4%r9CUJCGZ29+SydeGeDPwYxwr_;y`%DpyVLSIs1TN?} z0575Uj?F1r&EEaR>;?ao&=`U)|K*qXkB`2H3EF+We-M~-qaP1X-yXmH+X0*wy@wb} z97!C*wlc}R#Nl7R9;i2}%-63wPr;fTG{5`8B=j>|=E)p5R^7zZhR>lHBXOPD_HM9L zUr?q4!!Vr@&Zs#yp@XyAZ=kTW!CbgYD0Bdy&ePJht#nl@U9V)v^IbelA^%ODn*R49 zSbD!kNC-q@j;=KsEYbhZ`-A<8{(rImaQ^Rho;CEpjReAQ)ibzx)xj0 zu8T5jicAml!1SUecW4fYqAuf#tRcKMtG)x%-_|*DRpNSU(8|%5h80Z#!Gf$Et17{> zA%H6ZGG!|e;^9vM^AnCo2#GoHbSgehT^9z+W%VBNWtYF+24v(tm zesl)f>KQV<$GYP_T7sCQl;x8`wa7kEbzZF#6JHS#QnX7Bwy;3amNfwLfTTfwFUzW37S zcVfwySEa0JILlui!aN_ICF{Q_;snLRYq|oLtp8r@?^g3azSw*Au>QN1=PK*J9~D=; zezoz{pX0WBv-WcxZC&~4)6dsi^|>}~X3f_?(zRB6uD%6_GC8^OqOSt0dC7-(G$9g4 zGUJV#YE^{1;#EkQoU|_;D{$0At17fC8&4UNElWr4eF{pgHo#HSuM0ErU}X07()!Dn zuZ!!S;xr+VW|6k;)JxQ%-qR& zM%34mO(W*Nzgsul68{gQV|K&-pF#iG-d@fBv;T1Z<5r$4`F|8wyngj>9FPC9c_)x% zx%=Y;@_XCv2{LG3(--77bwduJI(gS~2-*4@aR`-RwK#+pyK%}OTHH8Gol2EXwL6vW zn?vX|l4yuTwSH`0-TnomO)=%bjtGvnpM#;Bxamxyp=ouY7U>%N1O5 zg<4wXjI$+IV0u%&E3oY5DWx~)fwPD__}*4EHa6#b)5h0uBg@7W@V<&qz=Hj@eQ&lC zYmEC8y%86m59x{#UnOL&9Pi~rd5HFUi1wf)6zL_bDpM6P5jGozRC(q1O2vrOu7pzkM>plON}tD z>s``7@40VjWhX|v>2{MYjGGa!YkMw?Tz%v5T+L?mH#eHAy9je_c)#PguKFzY|D;UF zF~`?*0xgOE8thm7KfC?G!}-74d9LFBIWb&u6HXwH;qHwmsKBY+9aNOLo=?bY>t-B7 z%>-V{JLG}5W*V3VNR2L{e07+4g0Ay?Fb&wm~*FlwziiMcKV#DdY|0T3qOlVljGUKSDzj)$v$B>~$Rq3z>7J z5x80R!s4FYCr888%;y+KB*yt2cUU2&W<+~gki~P+oL6VIOyJPna8%04eSzm)HtM=> zi{`b~OY?PRaeME-!`9ruOQ^-#O<@%!-a4r)?m^j{t}J3&?o?_l<+bCnr{NTRL;liQ zHbuAZY$%WVo%x!U&!FphB9_DRCOSi?)@y#(V9;tK%L8VcSIRDt`F3?oJzSc7NIT{3 zELcyswy`eY4ZEtmF0RQjS94k|0CY{yl|NYb+kNGn9W|41%^_$pD{s&lvf1Uq%00Rf z?NJo{Dc=xXu0gb|To=s+d<(ver8SeWe5X@WrFu!)%G$MAsJX=(&?LUGdNoy2Hg~^a zE^h44=Cqqn=ruK673vaD8`gJzYw6WhUgO&K8)lc*J}qIH7M4gWV(#<9qH=fRKVIzr zp$H8csoz$Dh}XN#TGYn{MT^Yz485OblTm%`t$WXzNUqn zarV}Txtd$nR9np>+Zu3J0&;=pB20DJ3Lt&u1mfghIR1dSXmQb%fO%EInW+<)yO7#_ z*iu=Td=A@1@SDC8snU<2DTrccX{~w%{Ci{9oNKVn-QH?fuM!7Vbrv^Afz^WqVoGl5UmLi1jD^Szr33kqE)lCVVWb`NUa zcZI)e+S+|(mz@`XR^hel-3>@J8|CN8f#-#Nc*(kKFp?$KW2+be>B0-g~)ed zG`%(c8scN&C@PFS=M=b+N!YeF1N_HJ7(Clqc_bRmj%)CLBRtR9<Wv4=nrvyt#531U0w_J5u>AiUn!UEs>zpM`m0|BZEO8J zn8BJz-G1Mi$t|r7Hrv`DZ*3Kmug%gk*B&?0rl||{E08P00I4_GyIwXsIo?m~wbqh3 zOGIN$#}*W!o`n+5(KIQYb}SZqE&i0ktZ4P8G`A~Yj49?w;<&f^F-v<3{Id2`5-j6D>Rj*Mh!Z2%iVKD^#!S3x^ zAfs>{p)AWz)n|Fti`ol`Ydc(NOlsFL&qoymzTYVfip9^4JA~=Isqc2rYWv)eW+*=~ zs>H!gWu+oxERQnIF)f^mK~QzvAR6bI!)lC*|HQS_Mm1gOJ$plE9}xG0BwNrZ)-dqC zST0AVh+*x6U+6E?MI(MEexVI39n-kMT#cNiK)BNq=vDRr81Rq36OZd^Ex(`{#W6hY zc9mJAmRm0$sp3`(C@c_-L$wMkSfdzy@0}xHjS}}`U1RP4aDtO58B@mb=G$0e|M#Eo zR`37a-GA}o!T!IEXA4e{RGLaC-ZmVdrX32yj3lwr;wg$Q&=`xbvjy)bM1aWBlyNCQ zOmLFum=~BL8BGWsKLw5xB+1N9o5g!X&(rIMOT&ACAr-CR}vFF_As}uR*^P4*$h_`d{~AGVZB=+@E4b zdj+5&iY~HLuYE-4NhmJU&XaJ6E;>&_IZZoH{$FPcJ|Iq5CgAw>8_@|<&VIv@?1Uu7 zsAtM?_FE^MiHOCx_YHX^to)z8Ieh)&n{XQ6L>r6cf4~1?@S-aJ{k@0$KezI1!7Cyu zni3}sKae3;R?Cr^$U2=v`1$WRnU-Jwb9*9XDh_%*HEo1q(u-LndXl9i61~I}{Z&te zqPL~W>}db@n6oTZD#dUeBy3C~7-D&emBO)zn0jHSv$X~9fB4Jm(@y97{9IYqofxN> z#+XI~TkBoTtp^sKp69JA^#BIpr|^@Dx?zGy#^(p%5VwukHp7Tbd-k_#Ot&~hV$yj8 z3Vp~MoLz#y@;1;lv1bahkCCR2hfLST*4FpKJu0Gmi{>LLo{rAu+eCbHcG+ zehVV8=ybZE+T#>UEMO20{`|*I*8tcHcfg^C`uVT45d2G?U9KbiQ;KKg+&vs@j*%7dW?4f^tfQ zl*MYf$42rTzQ8G#8K;0iy#V#i5_N9YjHHQNU*OyiN&f4*cY*ay2fiqg zcFtiTDig%x5-RG&ZKIO1*b${hm6XK}lb-rbC27JeMO~mSy7}Kgyl*yw))c~jmrh@`l7Un2K>>EJUWL;ecNg9yxLtm`btAe z#MQdrQ^#&ccH7J`Q9_iCbVqi`2;2_6*8&is*X;t!olx;fcy6Wwu#5q%ct)Jj-`FrL zX6^GPJW-l*mU5!Kpm|X}D>(>P822^sEatu{s;lDZLAXIYg+vmMvP9r2qO0QRL%2CS z3DxIXt}*To!g|D97f(J!Q3H}|;CcG$@CY~-EaMSg2i-O0C~ih`Wjv2gUtfWk>*ATR zghcZuR9D4Q&15*rIFWM;>?ZJ}SY9%IaZ*QjRXi<(^>ADpPn41~%x5IRSJg|0C&v!L z4UYRHMc3rdzcWt$#i&FHoUj-UGs#4Rl4a(@y86@l)6{CDjI6^`m{1sEc2D#gN-tez z#ay@-?)`B+TDIeQF0@i#MX6cxt z7#F7%%nqK?ea8|mNs<7jBGYFw8jqZe$qZBDy1vBvjRC1FN@TncAsmn3yx21|EPn1x zNwXJ6Y-@7D*(D$f8bR%S(C>w0sS!hiF(AE^4~w-*wSj8Xhx_2geB`eNJ5XNQSZO?- zqVe*j%x(DwuKX;Mk=t@9T>0@im*_Du|20~YYF@#Hbw*i|fG(*$fb|iuyrYYpGA4t3 z(c=P^C}NCG;|=%lEj=z5a*2@ zc+H{<%nc91DIOCc`5f#qhPP;n1#HJSsY5TX`oq{&y#31KQ9j{?^_3pdmD#c+jk-J*O<~!M>A`-%RQMFB(BoQuecNh;?NLX)}uyJp< zKlpR6|DreeQxB!2>(v%rLc4?RV87?7^*NR3Q@*VbVF67xH6x4AaO8OwOWL|qug-GI9yvr-S#3trC8MXJ#{cj`cucsIkGlei3O8Bdhs zKFVIoBaV&1ek0ENxQ(cl)_1ozd*cY){mMovyXd)(pz zudzXQE%N2rg!S_EE@`x=haUNgEdXQGpEr;%KPALCa4b2&Gpu9B$p}!wF^cC9>6s%A z{dIilU|H8@oh(xb0vk0Me<+7vIU>8Z2SNSaZm{<8&*Ca$~4K7>d_c; z!;0#c$d5?t!>mdfB0`JZpVGc?*20SBNMLyu$j|xa=Zis1SeC?KP%w#lTr*GeMa78Y z-!n{QGIy1u(xT)1(Ycy6)oR3Y?N5>{_ZHpkZqddVeFgWz4YJ(EK>NJcaMISy>aQ}A z#DM9Ha7L%*04*o_G=etJO-KYXCc~QHV1{Lz&fKpv$=AD4IkTI)%k|QoniCXbJd9-i13dL2pADAq}K(7u|Bwi>>>Qvc9n5k)~*M&v8>JT z1Sq3jSK9AJ-Kf66^Fg_D+LUhUp4D0(inIS$2=TxBaJ73px8L@2g`*F5Z#mp``}gSa z70Z&89RB9>3Q1;elIcoz*-FWwLVBH4H0O3Uduq3S)w(`U8Bt!(A5(~s(7v{L5#9LI zJ9=n>5ywSL#DrbCY15lZLpW25nGb$ysef*7xpXVKpF6N*{K|Qb&YQ;16=_cn2FDV( zom&uy2~q~EiLKOk3hR)|Jdw^iRvv}#;=lMN!YDVeNfnEr?v=NJwiV! zD6d~t^1EMsSnJlA#j{&T-GWr7=Y{O#krYmVjLS@~m4?{F&MSPh3yw$|AtZoNn_+3r zGDCn&V6a%&Rg>ERG-SFVeY9-+N|~fY?o(cK{Pa!An;N~w@zYtM?s)VR4)J7iw=<=c_D z6~-+>LLeG*R2sW=$L+{L*^u6>k6(`G9gks-$8C+{vN?~@F}uFaqM*EfwE@Y4MT2OO zz3ZvZU~#*XpLWH3M4S>jUhaok!RK-%bj^amBs#q~=@_5s)x_2B-soKcQ)w-~)(tt&p%z$IHaDq^Z-q zq-NyeBM-LdE_3N z%Oz^0F@15Tm$kam+ASd$I)G4dH~u6!0zyZOPt8dQlN(IBeLWZ5f4meR@H!(qyU zx7!^0L{N0>bnKqE-tJz;n}3@yVN&-N2uk?Cl5C308|P2qc;t95=mt;0Z|1GeS{4L0 ztyN5f!c#yzR|Jhro-sj~QfwuRgKp62peVvZ{K#Uw4BbOr5~eIJuC8pwyVTGrM)5~Z zBz~s{K#+eeZS0Kv3$M|_pZ-QVofOH*GPox;U-YmT$hZV4Pc^rQogM3#LEkD3hoS2h zc!aGGU<)(_G}2por^#zJ=h<=;Si!bBN!^lO`uUt)@R*AQ^gBD9Tj&>f{v%3LLbci` z!Lsl3RDcGiwE`~i+{XDf1Y@g@gtj4J@Hd-p3yE#A^5v**fsgDKKyFp4Lhh2L@9{h* zrJ|+outF?Tn(CuHiLP^VgTPb}ofcKgqP8&o&=Ly?6?1_=Upu8v$7xdZI1EFZuo~0ZLC0B= zF16&z;n~^8cc-sAg^@XP8@~q~-}W@5)K`~{%3l_c%a63pQ;xSqdA8p0_sHfT_n!+q zU)bV>m*afZ2Hgg9ensBbubO_HEk6OiCTm5a%EG@)nxN!_w*vO|-TOCZq5Nd@RK%!Y z3BJx%BmMsEUN+JnT;aNr74I8add1weFCNijb39TSX~cM>W=RHp(J!3;@C`uBo(W}FH@}_JLfJlLp zky=(Eo*%GsBzyDjhp^Lm^5mnAd9;XMVvd2>T*TPkA5ta4|9 zjyYbO^x7r6RxE&0VmoDm$#(R*H$qhLR18z5sZ^|XjUz}=_uNlcW}@~wUHC~+!Pr4I zyYP7pr`cXuLTiM6?`_%AU5#AVa`_T$RDz#2MotmT*G{llbTA;LvJ7H5c#uTfOzeMwrtJP(U_R=~l7CTam8e?uxW$O%2MQSH4 zBqV1e!(7MK)nF}Wm#L{z)&%K!MBOSP4ory8CKzom~QjFjR+bPhy_42#v9 zG&Z`GBFm>D!W41B@|31|J^&=jwPS{bYThX8OjvEXvf%MvvNJ*_rMBI@|koO2%f0dt3R>ZYFRyLIR5& zD<$?59h<>(eUJ=hdx}|p5_ps)kPvzy3@^BrRg0y3?=q8&6O@P^$ATp@+*Qc!>VNmT zDHF0=AY#V3=fPX0Ck@4Qnh#R~cU2#Bd)?V?S2Fu^ucv3TA4r6$T1g(UbZ#z`{kMpv z^S`LsYu8Cjr(?^-kkQy)hkBTzXo4Mtr{HoL?uLEXRs{n4DcJc-NAb%P%{A*~I_8K7 zBhmO%gi|f?5u2t7G0SE*Sy&KZ=N}dT8%m^wTw&>EH(LsjG**J5$3d@md3hNkjb+IA zxR;nxqW8n`(VMqtZ@Rl`+Wv_qScqbR9L~+$7@1`3=n|Cn7;|j2I-zjMiPX1^2sV0e*5|%rOX literal 0 HcmV?d00001 diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/renovate.json b/pkg/scanners/helm/test/testdata/with-tarred-dep/renovate.json new file mode 100644 index 000000000000..a78e667b7736 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-tarred-dep/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": [ + "config:base" + ] + } \ No newline at end of file diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml new file mode 100644 index 000000000000..003d08eb745d --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml @@ -0,0 +1,62 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "common.names.fullname" . }} + labels: {{- include "common.labels.standard" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: {{- include "common.labels.matchLabels" . | nindent 6 }} + template: + metadata: + labels: {{- include "common.labels.standard" . | nindent 8 }} + spec: + containers: + - name: metadata-service + env: + - name: METADATASERVICE_UPSTREAM_API_URL + value: '{{ .Values.upstreamAPI }}' + - name: METADATASERVICE_OIDC_AUDIENCE + value: "{{ .Values.oidc.audience }}" + - name: METADATASERVICE_OIDC_ISSUER + value: "{{ .Values.oidc.issuer }}" + - name: METADATASERVICE_OIDC_JWKSURI + value: "{{ .Values.oidc.jwksuri }}" + - name: METADATASERVICE_OIDC_CLAIMS_ROLES + value: "{{ .Values.oidc.rolesClaim }}" + - name: METADATASERVICE_OIDC_CLAIMS_USERNAME + value: "{{ .Values.oidc.userClaim }}" + - name: METADATASERVICE_DB_URI + valueFrom: + secretKeyRef: + name: {{ template "common.names.fullname" . }}-dbconn + key: uri + image: "{{ .Values.metadataservice.image.repository }}:{{ .Values.metadataservice.image.tag }}" + imagePullPolicy: Always + volumeMounts: + - name: dbcerts + mountPath: "/dbcerts" + readOnly: true + ports: + - name: http + containerPort: 8000 + protocol: TCP + livenessProbe: + httpGet: + path: /healthz/liveness + port: http + initialDelaySeconds: 5 + timeoutSeconds: 2 + readinessProbe: + httpGet: + path: /healthz/readiness + port: http + initialDelaySeconds: 5 + timeoutSeconds: 2 + resources: +{{ toYaml .Values.resources | indent 12 }} + volumes: + - name: dbcerts + secret: + secretName: {{ template "common.names.fullname" . }}-crdb-ca + defaultMode: 0400 diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml new file mode 100644 index 000000000000..45cd321ca9a9 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml @@ -0,0 +1,36 @@ +{{- if .Values.ingress.enabled }} +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: {{ template "common.names.fullname" . }} + labels: {{- include "common.labels.standard" . | nindent 4 }} +spec: + {{- if and .Values.ingress.ingressClassName (eq "true" (include "common.ingress.supportsIngressClassname" .)) }} + ingressClassName: {{ .Values.ingress.ingressClassName | quote }} + {{- end }} + rules: + {{- range .Values.ingress.hostnames }} + - host: {{ . }} + http: + paths: + - path: / + {{- if $.Values.ingress.publicPaths -}} + ( + {{- range $index,$path := $.Values.ingress.publicPaths }} + {{- if $index }}|{{ end }} + {{- $path }} + {{- end -}} + ) + {{- end }} + pathType: Prefix + backend: + service: + name: {{ template "common.names.fullname" $ }} + port: + name: http + {{- end }} +# tls: [] +# hosts: +# - hollow-metadataservice.mydomain +# secretName: hollow-metadataservice-example-tls +{{- end }} diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml new file mode 100644 index 000000000000..18c39c058dcd --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml @@ -0,0 +1,17 @@ +{{- if .Values.crdbCA }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ template "common.names.fullname" . }}-crdb-ca + namespace: {{ .Release.Namespace | quote }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +type: Opaque +data: + ca.crt: {{ .Values.crdbCA | b64enc | quote }} +{{- end }} diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml new file mode 100644 index 000000000000..06c93061d08c --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml @@ -0,0 +1,17 @@ +{{- if .Values.dbconnURI }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ template "common.names.fullname" . }}-dbconn + namespace: {{ .Release.Namespace | quote }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +type: Opaque +data: + uri: {{ .Values.dbconnURI | b64enc | quote }} +{{- end }} diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml new file mode 100644 index 000000000000..fdb8b82d76f8 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ template "common.names.fullname" . }} + labels: {{- include "common.labels.standard" . | nindent 4 }} +spec: + ports: + - name: http + port: 80 + protocol: TCP + targetPort: 8000 + - name: https + port: 443 + protocol: TCP + targetPort: 8000 + selector:{{ include "common.labels.matchLabels" . | nindent 4 }} + type: ClusterIP diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/values.yaml b/pkg/scanners/helm/test/testdata/with-tarred-dep/values.yaml new file mode 100644 index 000000000000..7a86583f54e3 --- /dev/null +++ b/pkg/scanners/helm/test/testdata/with-tarred-dep/values.yaml @@ -0,0 +1,30 @@ +metadataservice: + image: + repository: ghcr.io/metal-toolbox/hollow-metadataservice + tag: "v0.0.1" + +ingress: + enabled: true + hostnames: + - metadata-service.mydomain + publicPaths: + - $ + - metadata + - userdata + - '2009-04-04' + +oidc: + audience: "" + issuer: "" + jwksuri: "" + rolesClaim: "" + userClaim: "" + +replicaCount: 1 +resources: + limits: + cpu: 4 + memory: 4Gi + requests: + cpu: 4 + memory: 4Gi diff --git a/pkg/scanners/helm/test/values/values.yaml b/pkg/scanners/helm/test/values/values.yaml new file mode 100644 index 000000000000..6f637160ffa9 --- /dev/null +++ b/pkg/scanners/helm/test/values/values.yaml @@ -0,0 +1,3 @@ +--- +securityContext: + runAsUser: 0 \ No newline at end of file diff --git a/pkg/scanners/json/parser/parser.go b/pkg/scanners/json/parser/parser.go new file mode 100644 index 000000000000..3489f0dc661c --- /dev/null +++ b/pkg/scanners/json/parser/parser.go @@ -0,0 +1,89 @@ +package parser + +import ( + "context" + "encoding/json" + "io" + "io/fs" + "path/filepath" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/detection" +) + +var _ options.ConfigurableParser = (*Parser)(nil) + +type Parser struct { + debug debug.Logger + skipRequired bool +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "json", "parser") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +// New creates a new parser +func New(opts ...options.ParserOption) *Parser { + p := &Parser{} + for _, opt := range opts { + opt(p) + } + return p +} + +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string]interface{}, error) { + + files := make(map[string]interface{}) + if err := fs.WalkDir(target, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + if !p.Required(path) { + return nil + } + df, err := p.ParseFile(ctx, target, path) + if err != nil { + p.debug.Log("Parse error in '%s': %s", path, err) + return nil + } + files[path] = df + return nil + }); err != nil { + return nil, err + } + return files, nil +} + +// ParseFile parses Dockerfile content from the provided filesystem path. +func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) (interface{}, error) { + f, err := fs.Open(filepath.ToSlash(path)) + if err != nil { + return nil, err + } + defer func() { _ = f.Close() }() + var target interface{} + if err := json.NewDecoder(f).Decode(&target); err != nil { + return nil, err + } + return target, nil +} + +func (p *Parser) Required(path string) bool { + if p.skipRequired { + return true + } + return detection.IsType(path, nil, detection.FileTypeJSON) +} diff --git a/pkg/scanners/json/parser/parser_test.go b/pkg/scanners/json/parser/parser_test.go new file mode 100644 index 000000000000..2af3936d6124 --- /dev/null +++ b/pkg/scanners/json/parser/parser_test.go @@ -0,0 +1,51 @@ +package parser + +import ( + "context" + "testing" + + "github.com/liamg/memoryfs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Parser(t *testing.T) { + input := `{ "x": { "y": 123, "z": ["a", "b", "c"]}}` + + memfs := memoryfs.New() + err := memfs.WriteFile("something.json", []byte(input), 0644) + require.NoError(t, err) + + data, err := New().ParseFile(context.TODO(), memfs, "something.json") + require.NoError(t, err) + + msi, ok := data.(map[string]interface{}) + require.True(t, ok) + + xObj, ok := msi["x"] + require.True(t, ok) + + xMsi, ok := xObj.(map[string]interface{}) + require.True(t, ok) + + yRaw, ok := xMsi["y"] + require.True(t, ok) + + y, ok := yRaw.(float64) + require.True(t, ok) + + assert.Equal(t, 123.0, y) + + zRaw, ok := xMsi["z"] + require.True(t, ok) + + z, ok := zRaw.([]interface{}) + require.True(t, ok) + + require.Len(t, z, 3) + + assert.Equal(t, "a", z[0]) + assert.Equal(t, "b", z[1]) + assert.Equal(t, "c", z[2]) + +} diff --git a/pkg/scanners/json/scanner.go b/pkg/scanners/json/scanner.go new file mode 100644 index 000000000000..6612b6d8f477 --- /dev/null +++ b/pkg/scanners/json/scanner.go @@ -0,0 +1,170 @@ +package json + +import ( + "context" + "io" + "io/fs" + "sync" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners" + "github.com/aquasecurity/trivy/pkg/scanners/json/parser" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + debug debug.Logger + policyDirs []string + policyReaders []io.Reader + parser *parser.Parser + regoScanner *rego.Scanner + skipRequired bool + options []options.ScannerOption + sync.Mutex + frameworks []framework.Framework + spec string + loadEmbeddedPolicies bool + loadEmbeddedLibraries bool +} + +func (s *Scanner) SetRegoOnly(bool) { +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "json", "scanner") +} + +func (s *Scanner) SetTraceWriter(_ io.Writer) { +} + +func (s *Scanner) SetPerResultTracingEnabled(_ bool) { +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetDataDirs(_ ...string) {} +func (s *Scanner) SetPolicyNamespaces(_ ...string) {} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.skipRequired = skip +} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +func NewScanner(opts ...options.ScannerOption) *Scanner { + s := &Scanner{ + options: opts, + } + for _, opt := range opts { + opt(s) + } + s.parser = parser.New(options.ParserWithSkipRequiredCheck(s.skipRequired)) + return s +} + +func (s *Scanner) Name() string { + return "JSON" +} + +func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { + + files, err := s.parser.ParseFS(ctx, fs, path) + if err != nil { + return nil, err + } + + if len(files) == 0 { + return nil, nil + } + + var inputs []rego.Input + for path, file := range files { + inputs = append(inputs, rego.Input{ + Path: path, + FS: fs, + Contents: file, + }) + } + + results, err := s.scanRego(ctx, fs, inputs...) + if err != nil { + return nil, err + } + return results, nil +} + +func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { + parsed, err := s.parser.ParseFile(ctx, fs, path) + if err != nil { + return nil, err + } + s.debug.Log("Scanning %s...", path) + return s.scanRego(ctx, fs, rego.Input{ + Path: path, + Contents: parsed, + }) +} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return s.regoScanner, nil + } + regoScanner := rego.NewScanner(types.SourceJSON, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return nil, err + } + s.regoScanner = regoScanner + return regoScanner, nil +} + +func (s *Scanner) scanRego(ctx context.Context, srcFS fs.FS, inputs ...rego.Input) (scan.Results, error) { + regoScanner, err := s.initRegoScanner(srcFS) + if err != nil { + return nil, err + } + results, err := regoScanner.ScanInput(ctx, inputs...) + if err != nil { + return nil, err + } + results.SetSourceAndFilesystem("", srcFS, false) + return results, nil +} diff --git a/pkg/scanners/json/scanner_test.go b/pkg/scanners/json/scanner_test.go new file mode 100644 index 000000000000..c66c49c8d5f7 --- /dev/null +++ b/pkg/scanners/json/scanner_test.go @@ -0,0 +1,77 @@ +package json + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_BasicScan(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/data.json": `{ "x": { "y": 123, "z": ["a", "b", "c"]}}`, + "/rules/rule.rego": `package builtin.json.lol + +__rego_metadata__ := { + "id": "ABC123", + "avd_id": "AVD-AB-0123", + "title": "title", + "short_code": "short", + "severity": "CRITICAL", + "type": "JSON Check", + "description": "description", + "recommended_actions": "actions", + "url": "https://example.com", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "json"}], +} + +deny[res] { + input.x.y == 123 + res := { + "msg": "oh no", + "startline": 1, + "endline": 2, + } +} + +`, + }) + + scanner := NewScanner(options.ScannerWithPolicyDirs("rules")) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + + assert.Equal(t, scan.Rule{ + AVDID: "AVD-AB-0123", + Aliases: []string{"ABC123"}, + ShortCode: "short", + Summary: "title", + Explanation: "description", + Impact: "", + Resolution: "actions", + Provider: "json", + Service: "general", + Links: []string{"https://example.com"}, + Severity: "CRITICAL", + Terraform: &scan.EngineMetadata{}, + CloudFormation: &scan.EngineMetadata{}, + CustomChecks: scan.CustomChecks{ + Terraform: (*scan.TerraformCustomCheck)(nil), + }, + RegoPackage: "data.builtin.json.lol", + Frameworks: map[framework.Framework][]string{}, + }, results.GetFailed()[0].Rule()) +} diff --git a/pkg/scanners/kubernetes/parser/manifest.go b/pkg/scanners/kubernetes/parser/manifest.go new file mode 100644 index 000000000000..3f809a6f9145 --- /dev/null +++ b/pkg/scanners/kubernetes/parser/manifest.go @@ -0,0 +1,33 @@ +package parser + +import ( + "fmt" + + "gopkg.in/yaml.v3" +) + +type Manifest struct { + Path string + Content *ManifestNode +} + +func (m *Manifest) UnmarshalYAML(value *yaml.Node) error { + + switch value.Tag { + case "!!map": + node := new(ManifestNode) + node.Path = m.Path + if err := value.Decode(node); err != nil { + return err + } + m.Content = node + default: + return fmt.Errorf("failed to handle tag: %s", value.Tag) + } + + return nil +} + +func (m *Manifest) ToRego() interface{} { + return m.Content.ToRego() +} diff --git a/pkg/scanners/kubernetes/parser/manifest_node.go b/pkg/scanners/kubernetes/parser/manifest_node.go new file mode 100644 index 000000000000..1f82ca1e3680 --- /dev/null +++ b/pkg/scanners/kubernetes/parser/manifest_node.go @@ -0,0 +1,140 @@ +package parser + +import ( + "fmt" + "strconv" + + "gopkg.in/yaml.v3" +) + +type TagType string + +const ( + TagBool TagType = "!!bool" + TagInt TagType = "!!int" + TagFloat TagType = "!!float" + TagStr TagType = "!!str" + TagString TagType = "!!string" + TagSlice TagType = "!!seq" + TagMap TagType = "!!map" +) + +type ManifestNode struct { + StartLine int + EndLine int + Offset int + Value interface{} + Type TagType + Path string +} + +func (r *ManifestNode) ToRego() interface{} { + if r == nil { + return nil + } + switch r.Type { + case TagBool, TagInt, TagString, TagStr: + return r.Value + case TagSlice: + var output []interface{} + for _, node := range r.Value.([]ManifestNode) { + output = append(output, node.ToRego()) + } + return output + case TagMap: + output := make(map[string]interface{}) + output["__defsec_metadata"] = map[string]interface{}{ + "startline": r.StartLine, + "endline": r.EndLine, + "filepath": r.Path, + "offset": r.Offset, + } + for key, node := range r.Value.(map[string]ManifestNode) { + output[key] = node.ToRego() + } + return output + } + return nil +} + +func (r *ManifestNode) UnmarshalYAML(node *yaml.Node) error { + + r.StartLine = node.Line + r.EndLine = node.Line + r.Type = TagType(node.Tag) + + switch TagType(node.Tag) { + case TagString, TagStr: + + r.Value = node.Value + case TagInt: + val, err := strconv.Atoi(node.Value) + if err != nil { + return err + } + r.Value = val + case TagFloat: + val, err := strconv.ParseFloat(node.Value, 64) + if err != nil { + return err + } + r.Value = val + case TagBool: + val, err := strconv.ParseBool(node.Value) + if err != nil { + return err + } + r.Value = val + case TagMap: + return r.handleMapTag(node) + case TagSlice: + return r.handleSliceTag(node) + + default: + return fmt.Errorf("node tag is not supported %s", node.Tag) + } + return nil +} + +func (r *ManifestNode) handleSliceTag(node *yaml.Node) error { + var nodes []ManifestNode + max := node.Line + for _, contentNode := range node.Content { + newNode := new(ManifestNode) + newNode.Path = r.Path + if err := contentNode.Decode(newNode); err != nil { + return err + } + if newNode.EndLine > max { + max = newNode.EndLine + } + nodes = append(nodes, *newNode) + } + r.EndLine = max + r.Value = nodes + return nil +} + +func (r *ManifestNode) handleMapTag(node *yaml.Node) error { + output := make(map[string]ManifestNode) + var key string + max := node.Line + for i, contentNode := range node.Content { + if i == 0 || i%2 == 0 { + key = contentNode.Value + } else { + newNode := new(ManifestNode) + newNode.Path = r.Path + if err := contentNode.Decode(newNode); err != nil { + return err + } + output[key] = *newNode + if newNode.EndLine > max { + max = newNode.EndLine + } + } + } + r.EndLine = max + r.Value = output + return nil +} diff --git a/pkg/scanners/kubernetes/parser/parser.go b/pkg/scanners/kubernetes/parser/parser.go new file mode 100644 index 000000000000..57b3a663283d --- /dev/null +++ b/pkg/scanners/kubernetes/parser/parser.go @@ -0,0 +1,137 @@ +package parser + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "io/fs" + "path/filepath" + "regexp" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/detection" +) + +var _ options.ConfigurableParser = (*Parser)(nil) + +type Parser struct { + debug debug.Logger + skipRequired bool +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "kubernetes", "parser") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +// New creates a new K8s parser +func New(options ...options.ParserOption) *Parser { + p := &Parser{} + for _, option := range options { + option(p) + } + return p +} + +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string][]interface{}, error) { + files := make(map[string][]interface{}) + if err := fs.WalkDir(target, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + if !p.required(target, path) { + return nil + } + parsed, err := p.ParseFile(ctx, target, path) + if err != nil { + p.debug.Log("Parse error in '%s': %s", path, err) + return nil + } + files[path] = parsed + return nil + }); err != nil { + return nil, err + } + return files, nil +} + +// ParseFile parses Kubernetes manifest from the provided filesystem path. +func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) ([]interface{}, error) { + f, err := fs.Open(filepath.ToSlash(path)) + if err != nil { + return nil, err + } + defer func() { _ = f.Close() }() + return p.Parse(f, path) +} + +func (p *Parser) required(fs fs.FS, path string) bool { + if p.skipRequired { + return true + } + f, err := fs.Open(filepath.ToSlash(path)) + if err != nil { + return false + } + defer func() { _ = f.Close() }() + if data, err := io.ReadAll(f); err == nil { + return detection.IsType(path, bytes.NewReader(data), detection.FileTypeKubernetes) + } + return false +} + +func (p *Parser) Parse(r io.Reader, path string) ([]interface{}, error) { + + contents, err := io.ReadAll(r) + if err != nil { + return nil, err + } + + if len(contents) == 0 { + return nil, nil + } + + if strings.TrimSpace(string(contents))[0] == '{' { + var target interface{} + if err := json.Unmarshal(contents, &target); err != nil { + return nil, err + } + return []interface{}{target}, nil + } + + var results []interface{} + + re := regexp.MustCompile(`(?m:^---\r?\n)`) + pos := 0 + for _, partial := range re.Split(string(contents), -1) { + var result Manifest + result.Path = path + if err := yaml.Unmarshal([]byte(partial), &result); err != nil { + return nil, fmt.Errorf("unmarshal yaml: %w", err) + } + if result.Content != nil { + result.Content.Offset = pos + results = append(results, result.ToRego()) + } + pos += len(strings.Split(partial, "\n")) + } + + return results, nil +} diff --git a/pkg/scanners/kubernetes/scanner.go b/pkg/scanners/kubernetes/scanner.go new file mode 100644 index 000000000000..a50c9db30c1d --- /dev/null +++ b/pkg/scanners/kubernetes/scanner.go @@ -0,0 +1,176 @@ +package kubernetes + +import ( + "context" + "io" + "io/fs" + "path/filepath" + "sort" + "sync" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners" + "github.com/aquasecurity/trivy/pkg/scanners/kubernetes/parser" + "github.com/liamg/memoryfs" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + debug debug.Logger + options []options.ScannerOption + policyDirs []string + policyReaders []io.Reader + regoScanner *rego.Scanner + parser *parser.Parser + skipRequired bool + sync.Mutex + loadEmbeddedPolicies bool + frameworks []framework.Framework + spec string + loadEmbeddedLibraries bool +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(bool) {} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.skipRequired = skip +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "kubernetes", "scanner") +} + +func (s *Scanner) SetTraceWriter(_ io.Writer) { +} + +func (s *Scanner) SetPerResultTracingEnabled(_ bool) { +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetDataDirs(...string) {} +func (s *Scanner) SetPolicyNamespaces(_ ...string) { +} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +func NewScanner(opts ...options.ScannerOption) *Scanner { + s := &Scanner{ + options: opts, + } + for _, opt := range opts { + opt(s) + } + s.parser = parser.New(options.ParserWithSkipRequiredCheck(s.skipRequired)) + return s +} + +func (s *Scanner) Name() string { + return "Kubernetes" +} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return s.regoScanner, nil + } + regoScanner := rego.NewScanner(types.SourceKubernetes, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return nil, err + } + s.regoScanner = regoScanner + return regoScanner, nil +} + +func (s *Scanner) ScanReader(ctx context.Context, filename string, reader io.Reader) (scan.Results, error) { + memfs := memoryfs.New() + if err := memfs.MkdirAll(filepath.Base(filename), 0o700); err != nil { + return nil, err + } + data, err := io.ReadAll(reader) + if err != nil { + return nil, err + } + if err := memfs.WriteFile(filename, data, 0o644); err != nil { + return nil, err + } + return s.ScanFS(ctx, memfs, ".") +} + +func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, dir string) (scan.Results, error) { + + k8sFilesets, err := s.parser.ParseFS(ctx, target, dir) + if err != nil { + return nil, err + } + + if len(k8sFilesets) == 0 { + return nil, nil + } + + var inputs []rego.Input + for path, k8sFiles := range k8sFilesets { + for _, content := range k8sFiles { + inputs = append(inputs, rego.Input{ + Path: path, + FS: target, + Contents: content, + }) + } + } + + regoScanner, err := s.initRegoScanner(target) + if err != nil { + return nil, err + } + + s.debug.Log("Scanning %d files...", len(inputs)) + results, err := regoScanner.ScanInput(ctx, inputs...) + if err != nil { + return nil, err + } + results.SetSourceAndFilesystem("", target, false) + + sort.Slice(results, func(i, j int) bool { + return results[i].Rule().AVDID < results[j].Rule().AVDID + }) + return results, nil +} diff --git a/pkg/scanners/kubernetes/scanner_test.go b/pkg/scanners/kubernetes/scanner_test.go new file mode 100644 index 000000000000..8614925f18f0 --- /dev/null +++ b/pkg/scanners/kubernetes/scanner_test.go @@ -0,0 +1,733 @@ +package kubernetes + +import ( + "context" + "os" + "strings" + "testing" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_BasicScan(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/example.yaml": ` +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello +`, + "/rules/lib.k8s.rego": ` + package lib.kubernetes + + default is_gatekeeper = false + + is_gatekeeper { + has_field(input, "review") + has_field(input.review, "object") + } + + object = input { + not is_gatekeeper + } + + object = input.review.object { + is_gatekeeper + } + + format(msg) = gatekeeper_format { + is_gatekeeper + gatekeeper_format = {"msg": msg} + } + + format(msg) = msg { + not is_gatekeeper + } + + name = object.metadata.name + + default namespace = "default" + + namespace = object.metadata.namespace + + #annotations = object.metadata.annotations + + kind = object.kind + + is_pod { + kind = "Pod" + } + + is_cronjob { + kind = "CronJob" + } + + default is_controller = false + + is_controller { + kind = "Deployment" + } + + is_controller { + kind = "StatefulSet" + } + + is_controller { + kind = "DaemonSet" + } + + is_controller { + kind = "ReplicaSet" + } + + is_controller { + kind = "ReplicationController" + } + + is_controller { + kind = "Job" + } + + split_image(image) = [image, "latest"] { + not contains(image, ":") + } + + split_image(image) = [image_name, tag] { + [image_name, tag] = split(image, ":") + } + + pod_containers(pod) = all_containers { + keys = {"containers", "initContainers"} + all_containers = [c | keys[k]; c = pod.spec[k][_]] + } + + containers[container] { + pods[pod] + all_containers = pod_containers(pod) + container = all_containers[_] + } + + containers[container] { + all_containers = pod_containers(object) + container = all_containers[_] + } + + pods[pod] { + is_pod + pod = object + } + + pods[pod] { + is_controller + pod = object.spec.template + } + + pods[pod] { + is_cronjob + pod = object.spec.jobTemplate.spec.template + } + + volumes[volume] { + pods[pod] + volume = pod.spec.volumes[_] + } + + dropped_capability(container, cap) { + container.securityContext.capabilities.drop[_] == cap + } + + added_capability(container, cap) { + container.securityContext.capabilities.add[_] == cap + } + + has_field(obj, field) { + obj[field] + } + + no_read_only_filesystem(c) { + not has_field(c, "securityContext") + } + + no_read_only_filesystem(c) { + has_field(c, "securityContext") + not has_field(c.securityContext, "readOnlyRootFilesystem") + } + + privilege_escalation_allowed(c) { + not has_field(c, "securityContext") + } + + privilege_escalation_allowed(c) { + has_field(c, "securityContext") + has_field(c.securityContext, "allowPrivilegeEscalation") + } + + annotations[annotation] { + pods[pod] + annotation = pod.metadata.annotations + } + + host_ipcs[host_ipc] { + pods[pod] + host_ipc = pod.spec.hostIPC + } + + host_networks[host_network] { + pods[pod] + host_network = pod.spec.hostNetwork + } + + host_pids[host_pid] { + pods[pod] + host_pid = pod.spec.hostPID + } + + host_aliases[host_alias] { + pods[pod] + host_alias = pod.spec + } + `, + "/rules/lib.util.rego": ` + package lib.utils + + has_key(x, k) { + _ = x[k] + }`, + "/rules/rule.rego": ` +package builtin.kubernetes.KSV011 + +import data.lib.kubernetes +import data.lib.utils + +default failLimitsCPU = false + +__rego_metadata__ := { + "id": "KSV011", + "avd_id": "AVD-KSV-0011", + "title": "CPU not limited", + "short_code": "limit-cpu", + "version": "v1.0.0", + "severity": "LOW", + "type": "Kubernetes Security Check", + "description": "Enforcing CPU limits prevents DoS via resource exhaustion.", + "recommended_actions": "Set a limit value under 'containers[].resources.limits.cpu'.", + "url": "https://cloud.google.com/blog/products/containers-kubernetes/kubernetes-best-practices-resource-requests-and-limits", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "kubernetes"}], +} + +# getLimitsCPUContainers returns all containers which have set resources.limits.cpu +getLimitsCPUContainers[container] { + allContainers := kubernetes.containers[_] + utils.has_key(allContainers.resources.limits, "cpu") + container := allContainers.name +} + +# getNoLimitsCPUContainers returns all containers which have not set +# resources.limits.cpu +getNoLimitsCPUContainers[container] { + container := kubernetes.containers[_].name + not getLimitsCPUContainers[container] +} + +# failLimitsCPU is true if containers[].resources.limits.cpu is not set +# for ANY container +failLimitsCPU { + count(getNoLimitsCPUContainers) > 0 +} + +deny[res] { + failLimitsCPU + + msg := kubernetes.format(sprintf("Container '%s' of %s '%s' should set 'resources.limits.cpu'", [getNoLimitsCPUContainers[_], kubernetes.kind, kubernetes.name])) + + res := { + "msg": msg, + "id": __rego_metadata__.id, + "title": __rego_metadata__.title, + "severity": __rego_metadata__.severity, + "type": __rego_metadata__.type, + "startline": 6, + "endline": 10, + } +} +`, + }) + + scanner := NewScanner(options.ScannerWithPolicyDirs("rules")) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + + assert.Equal(t, scan.Rule{ + AVDID: "AVD-KSV-0011", + Aliases: []string{"KSV011"}, + ShortCode: "limit-cpu", + Summary: "CPU not limited", + Explanation: "Enforcing CPU limits prevents DoS via resource exhaustion.", + Impact: "", + Resolution: "Set a limit value under 'containers[].resources.limits.cpu'.", + Provider: "kubernetes", + Service: "general", + Links: []string{"https://cloud.google.com/blog/products/containers-kubernetes/kubernetes-best-practices-resource-requests-and-limits"}, + Severity: "LOW", + Terraform: &scan.EngineMetadata{}, + CloudFormation: &scan.EngineMetadata{}, + CustomChecks: scan.CustomChecks{Terraform: (*scan.TerraformCustomCheck)(nil)}, + RegoPackage: "data.builtin.kubernetes.KSV011", + Frameworks: map[framework.Framework][]string{}, + }, results.GetFailed()[0].Rule()) + + failure := results.GetFailed()[0] + actualCode, err := failure.GetCode() + require.NoError(t, err) + for i := range actualCode.Lines { + actualCode.Lines[i].Highlighted = "" + } + assert.Equal(t, []scan.Line{ + { + Number: 6, + Content: "spec: ", + IsCause: true, + FirstCause: true, + Annotation: "", + }, + { + Number: 7, + Content: " containers: ", + IsCause: true, + Annotation: "", + }, + { + Number: 8, + Content: " - command: [\"sh\", \"-c\", \"echo 'Hello' && sleep 1h\"]", + IsCause: true, + Annotation: "", + }, + { + Number: 9, + Content: " image: busybox", + IsCause: true, + Annotation: "", + }, + { + Number: 10, + Content: " name: hello", + IsCause: true, + LastCause: true, + Annotation: "", + }, + }, actualCode.Lines) +} + +func Test_FileScan(t *testing.T) { + + results, err := NewScanner(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true), options.ScannerWithEmbeddedLibraries(true)).ScanReader(context.TODO(), "k8s.yaml", strings.NewReader(` +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello +`)) + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 0) +} + +func Test_FileScan_WithSeparator(t *testing.T) { + + results, err := NewScanner(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)).ScanReader(context.TODO(), "k8s.yaml", strings.NewReader(` +--- +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello +`)) + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 0) +} + +func Test_FileScan_MultiManifests(t *testing.T) { + file := ` +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello1-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello1' && sleep 1h"] + image: busybox + name: hello1 +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello2-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello2' && sleep 1h"] + image: busybox + name: hello2 +` + + results, err := NewScanner( + options.ScannerWithEmbeddedPolicies(true), + options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithEmbeddedLibraries(true)).ScanReader(context.TODO(), "k8s.yaml", strings.NewReader(file)) + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 1) + fileLines := strings.Split(file, "\n") + for _, failure := range results.GetFailed() { + actualCode, err := failure.GetCode() + require.NoError(t, err) + assert.Greater(t, len(actualCode.Lines), 0) + for _, line := range actualCode.Lines { + assert.Greater(t, len(fileLines), line.Number) + assert.Equal(t, line.Content, fileLines[line.Number-1]) + } + } +} + +func Test_FileScanWithPolicyReader(t *testing.T) { + + results, err := NewScanner(options.ScannerWithPolicyReader(strings.NewReader(`package defsec + +deny[msg] { + msg = "fail" +} +`))).ScanReader(context.TODO(), "k8s.yaml", strings.NewReader(` +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello +`)) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) +} + +func Test_FileScanJSON(t *testing.T) { + + results, err := NewScanner(options.ScannerWithPolicyReader(strings.NewReader(`package defsec + +deny[msg] { + input.kind == "Pod" + msg = "fail" +} +`))).ScanReader(context.TODO(), "k8s.json", strings.NewReader(` +{ + "kind": "Pod", + "apiVersion": "v1", + "metadata": { + "name": "mongo", + "labels": { + "name": "mongo", + "role": "mongo" + } + }, + "spec": { + "volumes": [ + { + "name": "mongo-disk", + "gcePersistentDisk": { + "pdName": "mongo-disk", + "fsType": "ext4" + } + } + ], + "containers": [ + { + "name": "mongo", + "image": "mongo:latest", + "ports": [ + { + "name": "mongo", + "containerPort": 27017 + } + ], + "volumeMounts": [ + { + "name": "mongo-disk", + "mountPath": "/data/db" + } + ] + } + ] + } +} +`)) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) +} + +func Test_FileScanWithMetadata(t *testing.T) { + + results, err := NewScanner( + options.ScannerWithDebug(os.Stdout), + options.ScannerWithTrace(os.Stdout), + options.ScannerWithPolicyReader(strings.NewReader(`package defsec + +deny[msg] { + input.kind == "Pod" + msg := { + "msg": "fail", + "startline": 2, + "endline": 2, + "filepath": "chartname/template/serviceAccount.yaml" + } +} +`))).ScanReader( + context.TODO(), + "k8s.yaml", + strings.NewReader(` +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello +`)) + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 0) + + firstResult := results.GetFailed()[0] + assert.Equal(t, 2, firstResult.Metadata().Range().GetStartLine()) + assert.Equal(t, 2, firstResult.Metadata().Range().GetEndLine()) + assert.Equal(t, "chartname/template/serviceAccount.yaml", firstResult.Metadata().Range().GetFilename()) +} + +func Test_FileScanExampleWithResultFunction(t *testing.T) { + + results, err := NewScanner( + options.ScannerWithDebug(os.Stdout), + options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithPolicyReader(strings.NewReader(`package defsec + +import data.lib.kubernetes + +default checkCapsDropAll = false + +__rego_metadata__ := { +"id": "KSV003", +"avd_id": "AVD-KSV-0003", +"title": "Default capabilities not dropped", +"short_code": "drop-default-capabilities", +"version": "v1.0.0", +"severity": "LOW", +"type": "Kubernetes Security Check", +"description": "The container should drop all default capabilities and add only those that are needed for its execution.", +"recommended_actions": "Add 'ALL' to containers[].securityContext.capabilities.drop.", +"url": "https://kubesec.io/basics/containers-securitycontext-capabilities-drop-index-all/", +} + +__rego_input__ := { +"combine": false, +"selector": [{"type": "kubernetes"}], +} + +# Get all containers which include 'ALL' in security.capabilities.drop +getCapsDropAllContainers[container] { +allContainers := kubernetes.containers[_] +lower(allContainers.securityContext.capabilities.drop[_]) == "all" +container := allContainers.name +} + +# Get all containers which don't include 'ALL' in security.capabilities.drop +getCapsNoDropAllContainers[container] { +container := kubernetes.containers[_] +not getCapsDropAllContainers[container.name] +} + +deny[res] { +output := getCapsNoDropAllContainers[_] + +msg := kubernetes.format(sprintf("Container '%s' of %s '%s' should add 'ALL' to 'securityContext.capabilities.drop'", [output.name, kubernetes.kind, kubernetes.name])) + +res := result.new(msg, output) +} + +`))).ScanReader( + context.TODO(), + "k8s.yaml", + strings.NewReader(` +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + capabilities: + drop: + - nothing +`)) + require.NoError(t, err) + + require.Greater(t, len(results.GetFailed()), 0) + + firstResult := results.GetFailed()[0] + assert.Equal(t, 8, firstResult.Metadata().Range().GetStartLine()) + assert.Equal(t, 14, firstResult.Metadata().Range().GetEndLine()) + assert.Equal(t, "k8s.yaml", firstResult.Metadata().Range().GetFilename()) +} + +func Test_checkPolicyIsApplicable(t *testing.T) { + srcFS := testutil.CreateFS(t, map[string]string{ + "policies/pod_policy.rego": `# METADATA +# title: "Process can elevate its own privileges" +# description: "A program inside the container can elevate its own privileges and run as root, which might give the program control over the container and node." +# scope: package +# schemas: +# - input: schema["kubernetes"] +# related_resources: +# - https://kubernetes.io/docs/concepts/security/pod-security-standards/#restricted +# custom: +# id: KSV001 +# avd_id: AVD-KSV-0999 +# severity: MEDIUM +# short_code: no-self-privesc +# recommended_action: "Set 'set containers[].securityContext.allowPrivilegeEscalation' to 'false'." +# input: +# selector: +# - type: kubernetes +# subtypes: +# - kind: Pod +package builtin.kubernetes.KSV999 + +import data.lib.kubernetes +import data.lib.utils + +default checkAllowPrivilegeEscalation = false + +# getNoPrivilegeEscalationContainers returns the names of all containers which have +# securityContext.allowPrivilegeEscalation set to false. +getNoPrivilegeEscalationContainers[container] { + allContainers := kubernetes.containers[_] + allContainers.securityContext.allowPrivilegeEscalation == false + container := allContainers.name +} + +# getPrivilegeEscalationContainers returns the names of all containers which have +# securityContext.allowPrivilegeEscalation set to true or not set. +getPrivilegeEscalationContainers[container] { + containerName := kubernetes.containers[_].name + not getNoPrivilegeEscalationContainers[containerName] + container := kubernetes.containers[_] +} + +deny[res] { + output := getPrivilegeEscalationContainers[_] + msg := kubernetes.format(sprintf("Container '%s' of %s '%s' should set 'securityContext.allowPrivilegeEscalation' to false", [output.name, kubernetes.kind, kubernetes.name])) + res := result.new(msg, output) +} + +`, + "policies/namespace_policy.rego": `# METADATA +# title: "The default namespace should not be used" +# description: "ensure that default namespace should not be used" +# scope: package +# schemas: +# - input: schema["kubernetes"] +# related_resources: +# - https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/ +# custom: +# id: KSV110 +# avd_id: AVD-KSV-0888 +# severity: LOW +# short_code: default-namespace-should-not-be-used +# recommended_action: "Ensure that namespaces are created to allow for appropriate segregation of Kubernetes resources and that all new resources are created in a specific namespace." +# input: +# selector: +# - type: kubernetes +# subtypes: +# - kind: Namespace +package builtin.kubernetes.KSV888 + +import data.lib.kubernetes + +default defaultNamespaceInUse = false + +defaultNamespaceInUse { + kubernetes.namespace == "default" +} + +deny[res] { + defaultNamespaceInUse + msg := sprintf("%s '%s' should not be set with 'default' namespace", [kubernetes.kind, kubernetes.name]) + res := result.new(msg, input.metadata.namespace) +} + +`, + "test/KSV001/pod.yaml": `apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + capabilities: + drop: + - all +`, + }) + + scanner := NewScanner( + // options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithPolicyDirs("policies/"), + options.ScannerWithPolicyFilesystem(srcFS), + ) + results, err := scanner.ScanFS(context.TODO(), srcFS, "test/KSV001") + require.NoError(t, err) + + require.NoError(t, err) + require.Len(t, results.GetFailed(), 1) + + failure := results.GetFailed()[0].Rule() + assert.Equal(t, "Process can elevate its own privileges", failure.Summary) +} diff --git a/pkg/scanners/scanner.go b/pkg/scanners/scanner.go new file mode 100644 index 000000000000..4d940d029d42 --- /dev/null +++ b/pkg/scanners/scanner.go @@ -0,0 +1,21 @@ +package scanners + +import ( + "context" + "io/fs" + "os" + + "github.com/aquasecurity/defsec/pkg/scan" +) + +type WriteFileFS interface { + WriteFile(name string, data []byte, perm os.FileMode) error +} + +type FSScanner interface { + // Name provides the human-readable name of the scanner e.g. "CloudFormation" + Name() string + // ScanFS scans the given filesystem for issues, starting at the provided directory. + // Use '.' to scan an entire filesystem. + ScanFS(ctx context.Context, fs fs.FS, dir string) (scan.Results, error) +} diff --git a/pkg/scanners/terraform/executor/executor.go b/pkg/scanners/terraform/executor/executor.go new file mode 100644 index 000000000000..5a22e042d585 --- /dev/null +++ b/pkg/scanners/terraform/executor/executor.go @@ -0,0 +1,269 @@ +package executor + +import ( + "runtime" + "sort" + "strings" + "time" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/rego" + adapter "github.com/aquasecurity/trivy/internal/adapters/terraform" +) + +// Executor scans HCL blocks by running all registered rules against them +type Executor struct { + enableIgnores bool + excludedRuleIDs []string + excludeIgnoresIDs []string + includedRuleIDs []string + ignoreCheckErrors bool + workspaceName string + useSingleThread bool + debug debug.Logger + resultsFilters []func(scan.Results) scan.Results + alternativeIDProviderFunc func(string) []string + severityOverrides map[string]string + regoScanner *rego.Scanner + regoOnly bool + stateFuncs []func(*state.State) + frameworks []framework.Framework +} + +type Metrics struct { + Timings struct { + Adaptation time.Duration + RunningChecks time.Duration + } + Counts struct { + Ignored int + Failed int + Passed int + Critical int + High int + Medium int + Low int + } +} + +// New creates a new Executor +func New(options ...Option) *Executor { + s := &Executor{ + ignoreCheckErrors: true, + enableIgnores: true, + regoOnly: false, + } + for _, option := range options { + option(s) + } + return s +} + +// Find element in list +func checkInList(id string, altIDs []string, list []string) bool { + for _, codeIgnored := range list { + if codeIgnored == id { + return true + } + for _, alt := range altIDs { + if alt == codeIgnored { + return true + } + } + } + return false +} + +func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, error) { + + var metrics Metrics + + e.debug.Log("Adapting modules...") + adaptationTime := time.Now() + infra := adapter.Adapt(modules) + metrics.Timings.Adaptation = time.Since(adaptationTime) + e.debug.Log("Adapted %d module(s) into defsec state data.", len(modules)) + + threads := runtime.NumCPU() + if threads > 1 { + threads-- + } + if e.useSingleThread { + threads = 1 + } + e.debug.Log("Using max routines of %d", threads) + + e.debug.Log("Applying state modifier functions...") + for _, f := range e.stateFuncs { + f(infra) + } + + checksTime := time.Now() + registeredRules := rules.GetRegistered(e.frameworks...) + e.debug.Log("Initialised %d rule(s).", len(registeredRules)) + + pool := NewPool(threads, registeredRules, modules, infra, e.ignoreCheckErrors, e.regoScanner, e.regoOnly) + e.debug.Log("Created pool with %d worker(s) to apply rules.", threads) + results, err := pool.Run() + if err != nil { + return nil, metrics, err + } + metrics.Timings.RunningChecks = time.Since(checksTime) + e.debug.Log("Finished applying rules.") + + if e.enableIgnores { + e.debug.Log("Applying ignores...") + var ignores terraform.Ignores + for _, module := range modules { + ignores = append(ignores, module.Ignores()...) + } + + ignores = e.removeExcludedIgnores(ignores) + + for i, result := range results { + allIDs := []string{ + result.Rule().LongID(), + result.Rule().AVDID, + strings.ToLower(result.Rule().AVDID), + result.Rule().ShortCode, + } + allIDs = append(allIDs, result.Rule().Aliases...) + + if e.alternativeIDProviderFunc != nil { + allIDs = append(allIDs, e.alternativeIDProviderFunc(result.Rule().LongID())...) + } + if ignores.Covering( + modules, + result.Metadata(), + e.workspaceName, + allIDs..., + ) != nil { + e.debug.Log("Ignored '%s' at '%s'.", result.Rule().LongID(), result.Range()) + results[i].OverrideStatus(scan.StatusIgnored) + } + } + } else { + e.debug.Log("Ignores are disabled.") + } + + results = e.updateSeverity(results) + results = e.filterResults(results) + metrics.Counts.Ignored = len(results.GetIgnored()) + metrics.Counts.Passed = len(results.GetPassed()) + metrics.Counts.Failed = len(results.GetFailed()) + + for _, res := range results.GetFailed() { + switch res.Severity() { + case severity.Critical: + metrics.Counts.Critical++ + case severity.High: + metrics.Counts.High++ + case severity.Medium: + metrics.Counts.Medium++ + case severity.Low: + metrics.Counts.Low++ + } + } + + e.sortResults(results) + return results, metrics, nil +} + +func (e *Executor) removeExcludedIgnores(ignores terraform.Ignores) terraform.Ignores { + var filteredIgnores terraform.Ignores + for _, ignore := range ignores { + if !contains(e.excludeIgnoresIDs, ignore.RuleID) { + filteredIgnores = append(filteredIgnores, ignore) + } + } + return filteredIgnores +} + +func contains(arr []string, s string) bool { + for _, elem := range arr { + if elem == s { + return true + } + } + return false +} + +func (e *Executor) updateSeverity(results []scan.Result) scan.Results { + if len(e.severityOverrides) == 0 { + return results + } + + var overriddenResults scan.Results + for _, res := range results { + for code, sev := range e.severityOverrides { + + var altMatch bool + if e.alternativeIDProviderFunc != nil { + alts := e.alternativeIDProviderFunc(res.Rule().LongID()) + for _, alt := range alts { + if alt == code { + altMatch = true + break + } + } + } + + if altMatch || res.Rule().LongID() == code { + overrides := scan.Results([]scan.Result{res}) + override := res.Rule() + override.Severity = severity.Severity(sev) + overrides.SetRule(override) + res = overrides[0] + } + } + overriddenResults = append(overriddenResults, res) + } + + return overriddenResults +} + +func (e *Executor) filterResults(results scan.Results) scan.Results { + includedOnly := len(e.includedRuleIDs) > 0 + for i, result := range results { + id := result.Rule().LongID() + var altIDs []string + if e.alternativeIDProviderFunc != nil { + altIDs = e.alternativeIDProviderFunc(id) + } + if (includedOnly && !checkInList(id, altIDs, e.includedRuleIDs)) || checkInList(id, altIDs, e.excludedRuleIDs) { + e.debug.Log("Excluding '%s' at '%s'.", result.Rule().LongID(), result.Range()) + results[i].OverrideStatus(scan.StatusIgnored) + } + } + + if len(e.resultsFilters) > 0 && len(results) > 0 { + before := len(results.GetIgnored()) + e.debug.Log("Applying %d results filters to %d results...", len(results), before) + for _, filter := range e.resultsFilters { + results = filter(results) + } + e.debug.Log("Filtered out %d results.", len(results.GetIgnored())-before) + } + + return results +} + +func (e *Executor) sortResults(results []scan.Result) { + sort.Slice(results, func(i, j int) bool { + switch { + case results[i].Rule().LongID() < results[j].Rule().LongID(): + return true + case results[i].Rule().LongID() > results[j].Rule().LongID(): + return false + default: + return results[i].Range().String() > results[j].Range().String() + } + }) +} diff --git a/pkg/scanners/terraform/executor/executor_test.go b/pkg/scanners/terraform/executor/executor_test.go new file mode 100644 index 000000000000..f35e4a7f2391 --- /dev/null +++ b/pkg/scanners/terraform/executor/executor_test.go @@ -0,0 +1,125 @@ +package executor + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/providers" + "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" +) + +var panicRule = scan.Rule{ + Provider: providers.AWSProvider, + Service: "service", + ShortCode: "abc", + Severity: severity.High, + CustomChecks: scan.CustomChecks{ + Terraform: &scan.TerraformCustomCheck{ + RequiredTypes: []string{"resource"}, + RequiredLabels: []string{"problem"}, + Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { + if resourceBlock.GetAttribute("panic").IsTrue() { + panic("This is fine") + } + return + }, + }, + }, +} + +func Test_PanicInCheckNotAllowed(t *testing.T) { + + reg := rules.Register(panicRule) + defer rules.Deregister(reg) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +resource "problem" "this" { + panic = true +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := New().Execute(modules) + assert.Equal(t, len(results.GetFailed()), 0) +} + +func Test_PanicInCheckAllowed(t *testing.T) { + + reg := rules.Register(panicRule) + defer rules.Deregister(reg) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +resource "problem" "this" { + panic = true +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + _, _, err = New(OptionStopOnErrors(false)).Execute(modules) + assert.Error(t, err) +} + +func Test_PanicNotInCheckNotIncludePassed(t *testing.T) { + + reg := rules.Register(panicRule) + defer rules.Deregister(reg) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +resource "problem" "this" { + panic = true +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := New().Execute(modules) + assert.Equal(t, len(results.GetFailed()), 0) +} + +func Test_PanicNotInCheckNotIncludePassedStopOnError(t *testing.T) { + + reg := rules.Register(panicRule) + defer rules.Deregister(reg) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +resource "problem" "this" { + panic = true +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + + _, _, err = New(OptionStopOnErrors(false)).Execute(modules) + assert.Error(t, err) +} diff --git a/pkg/scanners/terraform/executor/option.go b/pkg/scanners/terraform/executor/option.go new file mode 100644 index 000000000000..5470146321e1 --- /dev/null +++ b/pkg/scanners/terraform/executor/option.go @@ -0,0 +1,103 @@ +package executor + +import ( + "io" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/state" +) + +type Option func(s *Executor) + +func OptionWithFrameworks(frameworks ...framework.Framework) Option { + return func(s *Executor) { + s.frameworks = frameworks + } +} + +func OptionWithAlternativeIDProvider(f func(string) []string) Option { + return func(s *Executor) { + s.alternativeIDProviderFunc = f + } +} + +func OptionWithResultsFilter(f func(scan.Results) scan.Results) Option { + return func(s *Executor) { + s.resultsFilters = append(s.resultsFilters, f) + } +} + +func OptionWithSeverityOverrides(overrides map[string]string) Option { + return func(s *Executor) { + s.severityOverrides = overrides + } +} + +func OptionWithDebugWriter(w io.Writer) Option { + return func(s *Executor) { + s.debug = debug.New(w, "terraform", "executor") + } +} + +func OptionNoIgnores() Option { + return func(s *Executor) { + s.enableIgnores = false + } +} + +func OptionExcludeRules(ruleIDs []string) Option { + return func(s *Executor) { + s.excludedRuleIDs = ruleIDs + } +} + +func OptionExcludeIgnores(ruleIDs []string) Option { + return func(s *Executor) { + s.excludeIgnoresIDs = ruleIDs + } +} + +func OptionIncludeRules(ruleIDs []string) Option { + return func(s *Executor) { + s.includedRuleIDs = ruleIDs + } +} + +func OptionStopOnErrors(stop bool) Option { + return func(s *Executor) { + s.ignoreCheckErrors = !stop + } +} + +func OptionWithWorkspaceName(workspaceName string) Option { + return func(s *Executor) { + s.workspaceName = workspaceName + } +} + +func OptionWithSingleThread(single bool) Option { + return func(s *Executor) { + s.useSingleThread = single + } +} + +func OptionWithRegoScanner(s *rego.Scanner) Option { + return func(e *Executor) { + e.regoScanner = s + } +} + +func OptionWithStateFunc(f ...func(*state.State)) Option { + return func(e *Executor) { + e.stateFuncs = f + } +} + +func OptionWithRegoOnly(regoOnly bool) Option { + return func(e *Executor) { + e.regoOnly = regoOnly + } +} diff --git a/pkg/scanners/terraform/executor/pool.go b/pkg/scanners/terraform/executor/pool.go new file mode 100644 index 000000000000..9ea1f6907468 --- /dev/null +++ b/pkg/scanners/terraform/executor/pool.go @@ -0,0 +1,299 @@ +package executor + +import ( + "context" + "fmt" + "os" + "path/filepath" + runtimeDebug "runtime/debug" + "strings" + "sync" + + "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/terraform" + types "github.com/aquasecurity/defsec/pkg/types/rules" +) + +type Pool struct { + size int + modules terraform.Modules + state *state.State + rules []types.RegisteredRule + ignoreErrors bool + rs *rego.Scanner + regoOnly bool +} + +func NewPool(size int, rules []types.RegisteredRule, modules terraform.Modules, state *state.State, ignoreErrors bool, regoScanner *rego.Scanner, regoOnly bool) *Pool { + return &Pool{ + size: size, + rules: rules, + state: state, + modules: modules, + ignoreErrors: ignoreErrors, + rs: regoScanner, + regoOnly: regoOnly, + } +} + +// Run runs the job in the pool - this will only return an error if a job panics +func (p *Pool) Run() (scan.Results, error) { + + outgoing := make(chan Job, p.size*2) + + var workers []*Worker + for i := 0; i < p.size; i++ { + worker := NewWorker(outgoing) + go worker.Start() + workers = append(workers, worker) + } + + if p.rs != nil { + var basePath string + if len(p.modules) > 0 { + basePath = p.modules[0].RootPath() + } + outgoing <- ®oJob{ + state: p.state, + scanner: p.rs, + basePath: basePath, + } + } + + if !p.regoOnly { + for _, r := range p.rules { + if r.GetRule().CustomChecks.Terraform != nil && r.GetRule().CustomChecks.Terraform.Check != nil { + // run local hcl rule + for _, module := range p.modules { + mod := *module + outgoing <- &hclModuleRuleJob{ + module: &mod, + rule: r, + ignoreErrors: p.ignoreErrors, + } + } + } else { + // run defsec rule + outgoing <- &infraRuleJob{ + state: p.state, + rule: r, + ignoreErrors: p.ignoreErrors, + } + } + } + } + + close(outgoing) + + var results scan.Results + for _, worker := range workers { + results = append(results, worker.Wait()...) + if err := worker.Error(); err != nil { + return nil, err + } + } + + return results, nil +} + +type Job interface { + Run() (scan.Results, error) +} + +type infraRuleJob struct { + state *state.State + rule types.RegisteredRule + + ignoreErrors bool +} + +type hclModuleRuleJob struct { + module *terraform.Module + rule types.RegisteredRule + ignoreErrors bool +} + +type regoJob struct { + state *state.State + scanner *rego.Scanner + basePath string +} + +func (h *infraRuleJob) Run() (_ scan.Results, err error) { + if h.ignoreErrors { + defer func() { + if panicErr := recover(); panicErr != nil { + err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) + } + }() + } + return h.rule.Evaluate(h.state), err +} + +func (h *hclModuleRuleJob) Run() (results scan.Results, err error) { + if h.ignoreErrors { + defer func() { + if panicErr := recover(); panicErr != nil { + err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) + } + }() + } + customCheck := h.rule.GetRule().CustomChecks.Terraform + for _, block := range h.module.GetBlocks() { + if !isCustomCheckRequiredForBlock(customCheck, block) { + continue + } + results = append(results, customCheck.Check(block, h.module)...) + } + results.SetRule(h.rule.GetRule()) + return +} + +func (h *regoJob) Run() (results scan.Results, err error) { + regoResults, err := h.scanner.ScanInput(context.TODO(), rego.Input{ + Contents: h.state.ToRego(), + Path: h.basePath, + }) + if err != nil { + return nil, fmt.Errorf("rego scan error: %w", err) + } + return regoResults, nil +} + +// nolint +func isCustomCheckRequiredForBlock(custom *scan.TerraformCustomCheck, b *terraform.Block) bool { + + var found bool + for _, requiredType := range custom.RequiredTypes { + if b.Type() == requiredType { + found = true + break + } + } + if !found && len(custom.RequiredTypes) > 0 { + return false + } + + found = false + for _, requiredLabel := range custom.RequiredLabels { + if requiredLabel == "*" || (len(b.Labels()) > 0 && wildcardMatch(requiredLabel, b.TypeLabel())) { + found = true + break + } + } + if !found && len(custom.RequiredLabels) > 0 { + return false + } + + found = false + if len(custom.RequiredSources) > 0 && b.Type() == terraform.TypeModule.Name() { + if sourceAttr := b.GetAttribute("source"); sourceAttr.IsNotNil() { + values := sourceAttr.AsStringValues().AsStrings() + if len(values) == 0 { + return false + } + sourcePath := values[0] + + // resolve module source path to path relative to cwd + if strings.HasPrefix(sourcePath, ".") { + sourcePath = cleanPathRelativeToWorkingDir(filepath.Dir(b.GetMetadata().Range().GetFilename()), sourcePath) + } + + for _, requiredSource := range custom.RequiredSources { + if requiredSource == "*" || wildcardMatch(requiredSource, sourcePath) { + found = true + break + } + } + } + return found + } + + return true +} + +func cleanPathRelativeToWorkingDir(dir, path string) string { + absPath := filepath.Clean(filepath.Join(dir, path)) + wDir, err := os.Getwd() + if err != nil { + return absPath + } + relPath, err := filepath.Rel(wDir, absPath) + if err != nil { + return absPath + } + return relPath +} + +func wildcardMatch(pattern string, subject string) bool { + if pattern == "" { + return false + } + parts := strings.Split(pattern, "*") + var lastIndex int + for i, part := range parts { + if part == "" { + continue + } + if i == 0 { + if !strings.HasPrefix(subject, part) { + return false + } + } + if i == len(parts)-1 { + if !strings.HasSuffix(subject, part) { + return false + } + } + newIndex := strings.Index(subject, part) + if newIndex < lastIndex { + return false + } + lastIndex = newIndex + } + return true +} + +type Worker struct { + incoming <-chan Job + mu sync.Mutex + results scan.Results + panic interface{} +} + +func NewWorker(incoming <-chan Job) *Worker { + w := &Worker{ + incoming: incoming, + } + w.mu.Lock() + return w +} + +func (w *Worker) Start() { + defer w.mu.Unlock() + w.results = nil + for job := range w.incoming { + func() { + results, err := job.Run() + if err != nil { + w.panic = err + } + w.results = append(w.results, results...) + }() + } +} + +func (w *Worker) Wait() scan.Results { + w.mu.Lock() + defer w.mu.Unlock() + return w.results +} + +func (w *Worker) Error() error { + if w.panic == nil { + return nil + } + return fmt.Errorf("job failed: %s", w.panic) +} diff --git a/pkg/scanners/terraform/executor/statistics.go b/pkg/scanners/terraform/executor/statistics.go new file mode 100644 index 000000000000..5c2dd1784ea2 --- /dev/null +++ b/pkg/scanners/terraform/executor/statistics.go @@ -0,0 +1,91 @@ +package executor + +import ( + "encoding/json" + "fmt" + "io" + "sort" + "strconv" + "strings" + + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/olekukonko/tablewriter" +) + +type StatisticsItem struct { + RuleID string `json:"rule_id"` + RuleDescription string `json:"rule_description"` + Links []string `json:"links"` + Count int `json:"count"` +} + +type Statistics []StatisticsItem + +type StatisticsResult struct { + Result Statistics `json:"results"` +} + +func SortStatistics(statistics Statistics) Statistics { + sort.Slice(statistics, func(i, j int) bool { + return statistics[i].Count > statistics[j].Count + }) + return statistics +} + +func (statistics Statistics) PrintStatisticsTable(format string, w io.Writer) error { + // lovely is the default so we keep it like that + if format != "lovely" && format != "markdown" && format != "json" { + return fmt.Errorf("you must specify only lovely, markdown or json format with --run-statistics") + } + + sorted := SortStatistics(statistics) + + if format == "json" { + result := StatisticsResult{Result: sorted} + val, err := json.MarshalIndent(result, "", " ") + if err != nil { + return err + } + + _, _ = fmt.Fprintln(w, string(val)) + + return nil + } + + table := tablewriter.NewWriter(w) + table.SetHeader([]string{"Rule ID", "Description", "Link", "Count"}) + table.SetRowLine(true) + + if format == "markdown" { + table.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false}) + table.SetCenterSeparator("|") + } + + for _, item := range sorted { + table.Append([]string{item.RuleID, + item.RuleDescription, + strings.Join(item.Links, "\n"), + strconv.Itoa(item.Count)}) + } + + table.Render() + + return nil +} + +func AddStatisticsCount(statistics Statistics, result scan.Result) Statistics { + for i, statistic := range statistics { + if statistic.RuleID == result.Rule().LongID() { + statistics[i].Count += 1 + return statistics + } + } + statistics = append(statistics, StatisticsItem{ + RuleID: result.Rule().LongID(), + RuleDescription: result.Rule().Summary, + Links: result.Rule().Links, + Count: 1, + }) + + return statistics +} diff --git a/pkg/scanners/terraform/options.go b/pkg/scanners/terraform/options.go new file mode 100644 index 000000000000..93c50d6ddd60 --- /dev/null +++ b/pkg/scanners/terraform/options.go @@ -0,0 +1,211 @@ +package terraform + +import ( + "io/fs" + "strings" + + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/state" + + "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" +) + +type ConfigurableTerraformScanner interface { + options.ConfigurableScanner + SetForceAllDirs(bool) + AddExecutorOptions(options ...executor.Option) + AddParserOptions(options ...options.ParserOption) +} + +func ScannerWithTFVarsPaths(paths ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionWithTFVarsPaths(paths...)) + } + } +} + +func ScannerWithAlternativeIDProvider(f func(string) []string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithAlternativeIDProvider(f)) + } + } +} + +func ScannerWithSeverityOverrides(overrides map[string]string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithSeverityOverrides(overrides)) + } + } +} + +func ScannerWithNoIgnores() options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionNoIgnores()) + } + } +} + +func ScannerWithExcludedRules(ruleIDs []string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionExcludeRules(ruleIDs)) + } + } +} + +func ScannerWithExcludeIgnores(ruleIDs []string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionExcludeIgnores(ruleIDs)) + } + } +} + +func ScannerWithIncludedRules(ruleIDs []string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionIncludeRules(ruleIDs)) + } + } +} + +func ScannerWithStopOnRuleErrors(stop bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionStopOnErrors(stop)) + } + } +} + +func ScannerWithWorkspaceName(name string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionWithWorkspaceName(name)) + tf.AddExecutorOptions(executor.OptionWithWorkspaceName(name)) + } + } +} + +func ScannerWithSingleThread(single bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithSingleThread(single)) + } + } +} + +func ScannerWithAllDirectories(all bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.SetForceAllDirs(all) + } + } +} + +func ScannerWithStopOnHCLError(stop bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionStopOnHCLError(stop)) + } + } +} + +func ScannerWithSkipDownloaded(skip bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if !skip { + return + } + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithResultsFilter(func(results scan.Results) scan.Results { + for i, result := range results { + prefix := result.Range().GetSourcePrefix() + switch { + case prefix == "": + case strings.HasPrefix(prefix, "."): + default: + results[i].OverrideStatus(scan.StatusIgnored) + } + } + return results + })) + } + } +} + +func ScannerWithResultsFilter(f func(scan.Results) scan.Results) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithResultsFilter(f)) + } + } +} + +func ScannerWithMinimumSeverity(minimum severity.Severity) options.ScannerOption { + min := severityAsOrdinal(minimum) + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithResultsFilter(func(results scan.Results) scan.Results { + for i, result := range results { + if severityAsOrdinal(result.Severity()) < min { + results[i].OverrideStatus(scan.StatusIgnored) + } + } + return results + })) + } + } +} + +func severityAsOrdinal(sev severity.Severity) int { + switch sev { + case severity.Critical: + return 4 + case severity.High: + return 3 + case severity.Medium: + return 2 + case severity.Low: + return 1 + default: + return 0 + } +} + +func ScannerWithStateFunc(f ...func(*state.State)) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithStateFunc(f...)) + } + } +} + +func ScannerWithDownloadsAllowed(allowed bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionWithDownloads(allowed)) + } + } +} + +func ScannerWithSkipCachedModules(b bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionWithDownloads(b)) + } + } +} + +func ScannerWithConfigsFileSystem(fsys fs.FS) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionWithConfigsFS(fsys)) + } + } +} diff --git a/pkg/scanners/terraform/parser/evaluator.go b/pkg/scanners/terraform/parser/evaluator.go new file mode 100644 index 000000000000..cc3b24557e3d --- /dev/null +++ b/pkg/scanners/terraform/parser/evaluator.go @@ -0,0 +1,511 @@ +package parser + +import ( + "context" + "errors" + "fmt" + "io/fs" + "reflect" + "time" + + "golang.org/x/exp/slices" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/terraform" + tfcontext "github.com/aquasecurity/defsec/pkg/terraform/context" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/ext/typeexpr" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" +) + +const ( + maxContextIterations = 32 +) + +type evaluator struct { + filesystem fs.FS + ctx *tfcontext.Context + blocks terraform.Blocks + inputVars map[string]cty.Value + moduleMetadata *modulesMetadata + projectRootPath string // root of the current scan + modulePath string + moduleName string + ignores terraform.Ignores + parentParser *Parser + debug debug.Logger + allowDownloads bool + skipCachedModules bool +} + +func newEvaluator( + target fs.FS, + parentParser *Parser, + projectRootPath string, + modulePath string, + workingDir string, + moduleName string, + blocks terraform.Blocks, + inputVars map[string]cty.Value, + moduleMetadata *modulesMetadata, + workspace string, + ignores []terraform.Ignore, + logger debug.Logger, + allowDownloads bool, + skipCachedModules bool, +) *evaluator { + + // create a context to store variables and make functions available + ctx := tfcontext.NewContext(&hcl.EvalContext{ + Functions: Functions(target, modulePath), + }, nil) + + // these variables are made available by terraform to each module + ctx.SetByDot(cty.StringVal(workspace), "terraform.workspace") + ctx.SetByDot(cty.StringVal(projectRootPath), "path.root") + ctx.SetByDot(cty.StringVal(modulePath), "path.module") + ctx.SetByDot(cty.StringVal(workingDir), "path.cwd") + + // each block gets its own scope to define variables in + for _, b := range blocks { + b.OverrideContext(ctx.NewChild()) + } + + return &evaluator{ + filesystem: target, + parentParser: parentParser, + modulePath: modulePath, + moduleName: moduleName, + projectRootPath: projectRootPath, + ctx: ctx, + blocks: blocks, + inputVars: inputVars, + moduleMetadata: moduleMetadata, + ignores: ignores, + debug: logger, + allowDownloads: allowDownloads, + } +} + +func (e *evaluator) evaluateStep() { + + e.ctx.Set(e.getValuesByBlockType("variable"), "var") + e.ctx.Set(e.getValuesByBlockType("locals"), "local") + e.ctx.Set(e.getValuesByBlockType("provider"), "provider") + + resources := e.getValuesByBlockType("resource") + for key, resource := range resources.AsValueMap() { + e.ctx.Set(resource, key) + } + + e.ctx.Set(e.getValuesByBlockType("data"), "data") + e.ctx.Set(e.getValuesByBlockType("output"), "output") +} + +// exportOutputs is used to export module outputs to the parent module +func (e *evaluator) exportOutputs() cty.Value { + data := make(map[string]cty.Value) + for _, block := range e.blocks.OfType("output") { + attr := block.GetAttribute("value") + if attr.IsNil() { + continue + } + data[block.Label()] = attr.Value() + e.debug.Log("Added module output %s=%s.", block.Label(), attr.Value().GoString()) + } + return cty.ObjectVal(data) +} + +func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[string]fs.FS, time.Duration) { + + fsKey := types.CreateFSKey(e.filesystem) + e.debug.Log("Filesystem key is '%s'", fsKey) + + fsMap := make(map[string]fs.FS) + fsMap[fsKey] = e.filesystem + + var parseDuration time.Duration + + var lastContext hcl.EvalContext + start := time.Now() + e.debug.Log("Starting module evaluation...") + for i := 0; i < maxContextIterations; i++ { + + e.evaluateStep() + + // if ctx matches the last evaluation, we can bail, nothing left to resolve + if i > 0 && reflect.DeepEqual(lastContext.Variables, e.ctx.Inner().Variables) { + break + } + + if len(e.ctx.Inner().Variables) != len(lastContext.Variables) { + lastContext.Variables = make(map[string]cty.Value, len(e.ctx.Inner().Variables)) + } + for k, v := range e.ctx.Inner().Variables { + lastContext.Variables[k] = v + } + } + + // expand out resources and modules via count (not a typo, we do this twice so every order is processed) + e.blocks = e.expandBlocks(e.blocks) + e.blocks = e.expandBlocks(e.blocks) + + parseDuration += time.Since(start) + + e.debug.Log("Starting submodule evaluation...") + var modules terraform.Modules + for _, definition := range e.loadModules(ctx) { + submodules, outputs, err := definition.Parser.EvaluateAll(ctx) + if err != nil { + e.debug.Log("Failed to evaluate submodule '%s': %s.", definition.Name, err) + continue + } + // export module outputs + e.ctx.Set(outputs, "module", definition.Name) + modules = append(modules, submodules...) + for key, val := range definition.Parser.GetFilesystemMap() { + fsMap[key] = val + } + } + e.debug.Log("Finished processing %d submodule(s).", len(modules)) + + e.debug.Log("Starting post-submodule evaluation...") + for i := 0; i < maxContextIterations; i++ { + + e.evaluateStep() + + // if ctx matches the last evaluation, we can bail, nothing left to resolve + if i > 0 && reflect.DeepEqual(lastContext.Variables, e.ctx.Inner().Variables) { + break + } + + if len(e.ctx.Inner().Variables) != len(lastContext.Variables) { + lastContext.Variables = make(map[string]cty.Value, len(e.ctx.Inner().Variables)) + } + for k, v := range e.ctx.Inner().Variables { + lastContext.Variables[k] = v + } + } + + e.debug.Log("Module evaluation complete.") + parseDuration += time.Since(start) + rootModule := terraform.NewModule(e.projectRootPath, e.modulePath, e.blocks, e.ignores, e.isModuleLocal()) + for _, m := range modules { + m.SetParent(rootModule) + } + return append(terraform.Modules{rootModule}, modules...), fsMap, parseDuration +} + +func (e *evaluator) isModuleLocal() bool { + // the module source is empty only for local modules + return e.parentParser.moduleSource == "" +} + +func (e *evaluator) expandBlocks(blocks terraform.Blocks) terraform.Blocks { + return e.expandDynamicBlocks(e.expandBlockForEaches(e.expandBlockCounts(blocks))...) +} + +func (e *evaluator) expandDynamicBlocks(blocks ...*terraform.Block) terraform.Blocks { + for _, b := range blocks { + e.expandDynamicBlock(b) + } + return blocks +} + +func (e *evaluator) expandDynamicBlock(b *terraform.Block) { + for _, sub := range b.AllBlocks() { + e.expandDynamicBlock(sub) + } + for _, sub := range b.AllBlocks().OfType("dynamic") { + blockName := sub.TypeLabel() + expanded := e.expandBlockForEaches(terraform.Blocks{sub}) + for _, ex := range expanded { + if content := ex.GetBlock("content"); content.IsNotNil() { + _ = e.expandDynamicBlocks(content) + b.InjectBlock(content, blockName) + } + } + } +} + +func validateForEachArg(arg cty.Value) error { + if arg.IsNull() { + return errors.New("arg is null") + } + + ty := arg.Type() + + if !arg.IsKnown() || ty.Equals(cty.DynamicPseudoType) || arg.LengthInt() == 0 { + return nil + } + + if !(ty.IsSetType() || ty.IsObjectType() || ty.IsMapType()) { + return fmt.Errorf("%s type is not supported: arg is not set or map", ty.FriendlyName()) + } + + if ty.IsSetType() { + if !ty.ElementType().Equals(cty.String) { + return errors.New("arg is not set of strings") + } + + it := arg.ElementIterator() + for it.Next() { + key, _ := it.Element() + if key.IsNull() { + return errors.New("arg is set of strings, but contains null") + } + + if !key.IsKnown() { + return errors.New("arg is set of strings, but contains unknown value") + } + } + } + + return nil +} + +func isBlockSupportsForEachMetaArgument(block *terraform.Block) bool { + return slices.Contains([]string{"module", "resource", "data", "dynamic"}, block.Type()) +} + +func (e *evaluator) expandBlockForEaches(blocks terraform.Blocks) terraform.Blocks { + var forEachFiltered terraform.Blocks + + for _, block := range blocks { + + forEachAttr := block.GetAttribute("for_each") + + if forEachAttr.IsNil() || block.IsCountExpanded() || !isBlockSupportsForEachMetaArgument(block) { + forEachFiltered = append(forEachFiltered, block) + continue + } + + forEachVal := forEachAttr.Value() + + if err := validateForEachArg(forEachVal); err != nil { + e.debug.Log(`"for_each" argument is invalid: %s`, err.Error()) + continue + } + + clones := make(map[string]cty.Value) + _ = forEachAttr.Each(func(key cty.Value, val cty.Value) { + + if !key.Type().Equals(cty.String) { + e.debug.Log( + `Invalid "for-each" argument: map key (or set value) is not a string, but %s`, + key.Type().FriendlyName(), + ) + return + } + + clone := block.Clone(key) + + ctx := clone.Context() + + e.copyVariables(block, clone) + + ctx.SetByDot(key, "each.key") + ctx.SetByDot(val, "each.value") + + ctx.Set(key, block.TypeLabel(), "key") + ctx.Set(val, block.TypeLabel(), "value") + + forEachFiltered = append(forEachFiltered, clone) + + values := clone.Values() + clones[key.AsString()] = values + e.ctx.SetByDot(values, clone.GetMetadata().Reference()) + }) + + metadata := block.GetMetadata() + if len(clones) == 0 { + e.ctx.SetByDot(cty.EmptyTupleVal, metadata.Reference()) + } else { + // The for-each meta-argument creates multiple instances of the resource that are stored in the map. + // So we must replace the old resource with a map with the attributes of the resource. + e.ctx.Replace(cty.ObjectVal(clones), metadata.Reference()) + } + e.debug.Log("Expanded block '%s' into %d clones via 'for_each' attribute.", block.LocalName(), len(clones)) + } + + return forEachFiltered +} + +func isBlockSupportsCountMetaArgument(block *terraform.Block) bool { + return slices.Contains([]string{"module", "resource", "data"}, block.Type()) +} + +func (e *evaluator) expandBlockCounts(blocks terraform.Blocks) terraform.Blocks { + var countFiltered terraform.Blocks + for _, block := range blocks { + countAttr := block.GetAttribute("count") + if countAttr.IsNil() || block.IsCountExpanded() || !isBlockSupportsCountMetaArgument(block) { + countFiltered = append(countFiltered, block) + continue + } + count := 1 + countAttrVal := countAttr.Value() + if !countAttrVal.IsNull() && countAttrVal.IsKnown() && countAttrVal.Type() == cty.Number { + count = int(countAttr.AsNumber()) + } + + var clones []cty.Value + for i := 0; i < count; i++ { + clone := block.Clone(cty.NumberIntVal(int64(i))) + clones = append(clones, clone.Values()) + countFiltered = append(countFiltered, clone) + metadata := clone.GetMetadata() + e.ctx.SetByDot(clone.Values(), metadata.Reference()) + } + metadata := block.GetMetadata() + if len(clones) == 0 { + e.ctx.SetByDot(cty.EmptyTupleVal, metadata.Reference()) + } else { + e.ctx.SetByDot(cty.TupleVal(clones), metadata.Reference()) + } + e.debug.Log("Expanded block '%s' into %d clones via 'count' attribute.", block.LocalName(), len(clones)) + } + + return countFiltered +} + +func (e *evaluator) copyVariables(from, to *terraform.Block) { + + var fromBase string + var fromRel string + var toRel string + + switch from.Type() { + case "resource": + fromBase = from.TypeLabel() + fromRel = from.NameLabel() + toRel = to.NameLabel() + case "module": + fromBase = from.Type() + fromRel = from.TypeLabel() + toRel = to.TypeLabel() + default: + return + } + + srcValue := e.ctx.Root().Get(fromBase, fromRel) + if srcValue == cty.NilVal { + return + } + e.ctx.Root().Set(srcValue, fromBase, toRel) +} + +func (e *evaluator) evaluateVariable(b *terraform.Block) (cty.Value, error) { + if b.Label() == "" { + return cty.NilVal, errors.New("empty label - cannot resolve") + } + + attributes := b.Attributes() + if attributes == nil { + return cty.NilVal, errors.New("cannot resolve variable with no attributes") + } + + var valType cty.Type + var defaults *typeexpr.Defaults + if typeAttr, exists := attributes["type"]; exists { + ty, def, err := typeAttr.DecodeVarType() + if err != nil { + return cty.NilVal, err + } + valType = ty + defaults = def + } + + var val cty.Value + + if override, exists := e.inputVars[b.Label()]; exists { + val = override + } else if def, exists := attributes["default"]; exists { + val = def.NullableValue() + } else { + return cty.NilVal, errors.New("no value found") + } + + if valType != cty.NilType { + if defaults != nil { + val = defaults.Apply(val) + } + + typedVal, err := convert.Convert(val, valType) + if err != nil { + return cty.NilVal, err + } + return typedVal, nil + } + + return val, nil + +} + +func (e *evaluator) evaluateOutput(b *terraform.Block) (cty.Value, error) { + if b.Label() == "" { + return cty.NilVal, errors.New("empty label - cannot resolve") + } + + attribute := b.GetAttribute("value") + if attribute.IsNil() { + return cty.NilVal, errors.New("cannot resolve output with no attributes") + } + return attribute.Value(), nil +} + +// returns true if all evaluations were successful +func (e *evaluator) getValuesByBlockType(blockType string) cty.Value { + + blocksOfType := e.blocks.OfType(blockType) + values := make(map[string]cty.Value) + + for _, b := range blocksOfType { + + switch b.Type() { + case "variable": // variables are special in that their value comes from the "default" attribute + val, err := e.evaluateVariable(b) + if err != nil { + continue + } + values[b.Label()] = val + case "output": + val, err := e.evaluateOutput(b) + if err != nil { + continue + } + values[b.Label()] = val + case "locals", "moved", "import": + for key, val := range b.Values().AsValueMap() { + values[key] = val + } + case "provider", "module", "check": + if b.Label() == "" { + continue + } + values[b.Label()] = b.Values() + case "resource", "data": + if len(b.Labels()) < 2 { + continue + } + + blockMap, ok := values[b.Labels()[0]] + if !ok { + values[b.Labels()[0]] = cty.ObjectVal(make(map[string]cty.Value)) + blockMap = values[b.Labels()[0]] + } + + valueMap := blockMap.AsValueMap() + if valueMap == nil { + valueMap = make(map[string]cty.Value) + } + + valueMap[b.Labels()[1]] = b.Values() + values[b.Labels()[0]] = cty.ObjectVal(valueMap) + } + } + + return cty.ObjectVal(values) +} diff --git a/pkg/scanners/terraform/parser/evaluator_test.go b/pkg/scanners/terraform/parser/evaluator_test.go new file mode 100644 index 000000000000..8d3ef7b0f6e0 --- /dev/null +++ b/pkg/scanners/terraform/parser/evaluator_test.go @@ -0,0 +1,94 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/zclconf/go-cty/cty" +) + +func TestValidateForEachArg(t *testing.T) { + tests := []struct { + name string + arg cty.Value + expectedError string + }{ + { + name: "empty set", + arg: cty.SetValEmpty(cty.String), + }, + { + name: "set of strings", + arg: cty.SetVal([]cty.Value{cty.StringVal("val1"), cty.StringVal("val2")}), + }, + { + name: "set of non-strings", + arg: cty.SetVal([]cty.Value{cty.NumberIntVal(1), cty.NumberIntVal(2)}), + expectedError: "is not set of strings", + }, + { + name: "set with null", + arg: cty.SetVal([]cty.Value{cty.StringVal("val1"), cty.NullVal(cty.String)}), + expectedError: "arg is set of strings, but contains null", + }, + { + name: "set with unknown", + arg: cty.SetVal([]cty.Value{cty.StringVal("val1"), cty.UnknownVal(cty.String)}), + expectedError: "arg is set of strings, but contains unknown", + }, + { + name: "set with unknown", + arg: cty.SetVal([]cty.Value{cty.StringVal("val1"), cty.UnknownVal(cty.String)}), + expectedError: "arg is set of strings, but contains unknown", + }, + { + name: "non empty map", + arg: cty.MapVal(map[string]cty.Value{ + "val1": cty.StringVal("..."), + "val2": cty.StringVal("..."), + }), + }, + { + name: "map with unknown", + arg: cty.MapVal(map[string]cty.Value{ + "val1": cty.UnknownVal(cty.String), + "val2": cty.StringVal("..."), + }), + }, + { + name: "empty obj", + arg: cty.EmptyObjectVal, + }, + { + name: "obj with strings", + arg: cty.ObjectVal(map[string]cty.Value{ + "val1": cty.StringVal("..."), + "val2": cty.StringVal("..."), + }), + }, + { + name: "null", + arg: cty.NullVal(cty.Set(cty.String)), + expectedError: "arg is null", + }, + { + name: "unknown", + arg: cty.UnknownVal(cty.Set(cty.String)), + }, + { + name: "dynamic", + arg: cty.DynamicVal, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateForEachArg(tt.arg) + if tt.expectedError != "" && err != nil { + assert.ErrorContains(t, err, tt.expectedError) + return + } + assert.NoError(t, err) + }) + } +} diff --git a/pkg/scanners/terraform/parser/funcs/cidr.go b/pkg/scanners/terraform/parser/funcs/cidr.go new file mode 100644 index 000000000000..5f1504c0a8a1 --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/cidr.go @@ -0,0 +1,212 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "fmt" + "math/big" + "net" + + "github.com/apparentlymart/go-cidr/cidr" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/gocty" +) + +// CidrHostFunc constructs a function that calculates a full host IP address +// within a given IP network address prefix. +var CidrHostFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "prefix", + Type: cty.String, + }, + { + Name: "hostnum", + Type: cty.Number, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var hostNum *big.Int + if err := gocty.FromCtyValue(args[1], &hostNum); err != nil { + return cty.UnknownVal(cty.String), err + } + _, network, err := net.ParseCIDR(args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("invalid CIDR expression: %s", err) + } + + ip, err := cidr.HostBig(network, hostNum) + if err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.StringVal(ip.String()), nil + }, +}) + +// CidrNetmaskFunc constructs a function that converts an IPv4 address prefix given +// in CIDR notation into a subnet mask address. +var CidrNetmaskFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "prefix", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + _, network, err := net.ParseCIDR(args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("invalid CIDR expression: %s", err) + } + + return cty.StringVal(net.IP(network.Mask).String()), nil + }, +}) + +// CidrSubnetFunc constructs a function that calculates a subnet address within +// a given IP network address prefix. +var CidrSubnetFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "prefix", + Type: cty.String, + }, + { + Name: "newbits", + Type: cty.Number, + }, + { + Name: "netnum", + Type: cty.Number, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var newbits int + if err := gocty.FromCtyValue(args[1], &newbits); err != nil { + return cty.UnknownVal(cty.String), err + } + var netnum *big.Int + if err := gocty.FromCtyValue(args[2], &netnum); err != nil { + return cty.UnknownVal(cty.String), err + } + + _, network, err := net.ParseCIDR(args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("invalid CIDR expression: %s", err) + } + + newNetwork, err := cidr.SubnetBig(network, newbits, netnum) + if err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.StringVal(newNetwork.String()), nil + }, +}) + +// CidrSubnetsFunc is similar to CidrSubnetFunc but calculates many consecutive +// subnet addresses at once, rather than just a single subnet extension. +var CidrSubnetsFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "prefix", + Type: cty.String, + }, + }, + VarParam: &function.Parameter{ + Name: "newbits", + Type: cty.Number, + }, + Type: function.StaticReturnType(cty.List(cty.String)), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + _, network, err := net.ParseCIDR(args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "invalid CIDR expression: %s", err) + } + startPrefixLen, _ := network.Mask.Size() + + prefixLengthArgs := args[1:] + if len(prefixLengthArgs) == 0 { + return cty.ListValEmpty(cty.String), nil + } + + var firstLength int + if err := gocty.FromCtyValue(prefixLengthArgs[0], &firstLength); err != nil { + return cty.UnknownVal(cty.String), function.NewArgError(1, err) + } + firstLength += startPrefixLen + + retVals := make([]cty.Value, len(prefixLengthArgs)) + + current, _ := cidr.PreviousSubnet(network, firstLength) + for i, lengthArg := range prefixLengthArgs { + var length int + if err := gocty.FromCtyValue(lengthArg, &length); err != nil { + return cty.UnknownVal(cty.String), function.NewArgError(i+1, err) + } + + if length < 1 { + return cty.UnknownVal(cty.String), function.NewArgErrorf(i+1, "must extend prefix by at least one bit") + } + // For portability with 32-bit systems where the subnet number + // will be a 32-bit int, we only allow extension of 32 bits in + // one call even if we're running on a 64-bit machine. + // (Of course, this is significant only for IPv6.) + if length > 32 { + return cty.UnknownVal(cty.String), function.NewArgErrorf(i+1, "may not extend prefix by more than 32 bits") + } + length += startPrefixLen + if length > (len(network.IP) * 8) { + protocol := "IP" + switch len(network.IP) * 8 { + case 32: + protocol = "IPv4" + case 128: + protocol = "IPv6" + } + return cty.UnknownVal(cty.String), function.NewArgErrorf(i+1, "would extend prefix to %d bits, which is too long for an %s address", length, protocol) + } + + next, rollover := cidr.NextSubnet(current, length) + if rollover || !network.Contains(next.IP) { + // If we run out of suffix bits in the base CIDR prefix then + // NextSubnet will start incrementing the prefix bits, which + // we don't allow because it would then allocate addresses + // outside of the caller's given prefix. + return cty.UnknownVal(cty.String), function.NewArgErrorf(i+1, "not enough remaining address space for a subnet with a prefix of %d bits after %s", length, current.String()) + } + + current = next + retVals[i] = cty.StringVal(current.String()) + } + + return cty.ListVal(retVals), nil + }, +}) + +// CidrHost calculates a full host IP address within a given IP network address prefix. +func CidrHost(prefix, hostnum cty.Value) (cty.Value, error) { + return CidrHostFunc.Call([]cty.Value{prefix, hostnum}) +} + +// CidrNetmask converts an IPv4 address prefix given in CIDR notation into a subnet mask address. +func CidrNetmask(prefix cty.Value) (cty.Value, error) { + return CidrNetmaskFunc.Call([]cty.Value{prefix}) +} + +// CidrSubnet calculates a subnet address within a given IP network address prefix. +func CidrSubnet(prefix, newbits, netnum cty.Value) (cty.Value, error) { + return CidrSubnetFunc.Call([]cty.Value{prefix, newbits, netnum}) +} + +// CidrSubnets calculates a sequence of consecutive subnet prefixes that may +// be of different prefix lengths under a common base prefix. +func CidrSubnets(prefix cty.Value, newbits ...cty.Value) (cty.Value, error) { + args := make([]cty.Value, len(newbits)+1) + args[0] = prefix + copy(args[1:], newbits) + return CidrSubnetsFunc.Call(args) +} diff --git a/pkg/scanners/terraform/parser/funcs/collection.go b/pkg/scanners/terraform/parser/funcs/collection.go new file mode 100644 index 000000000000..693b8912f618 --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/collection.go @@ -0,0 +1,711 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "errors" + "fmt" + "math/big" + "sort" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/function/stdlib" + "github.com/zclconf/go-cty/cty/gocty" +) + +var LengthFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "value", + Type: cty.DynamicPseudoType, + AllowDynamicType: true, + AllowUnknown: true, + AllowMarked: true, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + collTy := args[0].Type() + switch { + case collTy == cty.String || collTy.IsTupleType() || collTy.IsObjectType() || collTy.IsListType() || collTy.IsMapType() || collTy.IsSetType() || collTy == cty.DynamicPseudoType: + return cty.Number, nil + default: + return cty.Number, errors.New("argument must be a string, a collection type, or a structural type") + } + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + coll := args[0] + collTy := args[0].Type() + marks := coll.Marks() + switch { + case collTy == cty.DynamicPseudoType: + return cty.UnknownVal(cty.Number).WithMarks(marks), nil + case collTy.IsTupleType(): + l := len(collTy.TupleElementTypes()) + return cty.NumberIntVal(int64(l)).WithMarks(marks), nil + case collTy.IsObjectType(): + l := len(collTy.AttributeTypes()) + return cty.NumberIntVal(int64(l)).WithMarks(marks), nil + case collTy == cty.String: + // We'll delegate to the cty stdlib strlen function here, because + // it deals with all of the complexities of tokenizing unicode + // grapheme clusters. + return stdlib.Strlen(coll) + case collTy.IsListType() || collTy.IsSetType() || collTy.IsMapType(): + return coll.Length(), nil + default: + // Should never happen, because of the checks in our Type func above + return cty.UnknownVal(cty.Number), errors.New("impossible value type for length(...)") + } + }, +}) + +// AllTrueFunc constructs a function that returns true if all elements of the +// list are true. If the list is empty, return true. +var AllTrueFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "list", + Type: cty.List(cty.Bool), + }, + }, + Type: function.StaticReturnType(cty.Bool), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + result := cty.True + for it := args[0].ElementIterator(); it.Next(); { + _, v := it.Element() + if !v.IsKnown() { + return cty.UnknownVal(cty.Bool), nil + } + if v.IsNull() { + return cty.False, nil + } + result = result.And(v) + if result.False() { + return cty.False, nil + } + } + return result, nil + }, +}) + +// AnyTrueFunc constructs a function that returns true if any element of the +// list is true. If the list is empty, return false. +var AnyTrueFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "list", + Type: cty.List(cty.Bool), + }, + }, + Type: function.StaticReturnType(cty.Bool), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + result := cty.False + var hasUnknown bool + for it := args[0].ElementIterator(); it.Next(); { + _, v := it.Element() + if !v.IsKnown() { + hasUnknown = true + continue + } + if v.IsNull() { + continue + } + result = result.Or(v) + if result.True() { + return cty.True, nil + } + } + if hasUnknown { + return cty.UnknownVal(cty.Bool), nil + } + return result, nil + }, +}) + +// CoalesceFunc constructs a function that takes any number of arguments and +// returns the first one that isn't empty. This function was copied from go-cty +// stdlib and modified so that it returns the first *non-empty* non-null element +// from a sequence, instead of merely the first non-null. +var CoalesceFunc = function.New(&function.Spec{ + Params: []function.Parameter{}, + VarParam: &function.Parameter{ + Name: "vals", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowDynamicType: true, + AllowNull: true, + }, + Type: func(args []cty.Value) (ret cty.Type, err error) { + argTypes := make([]cty.Type, len(args)) + for i, val := range args { + argTypes[i] = val.Type() + } + retType, _ := convert.UnifyUnsafe(argTypes) + if retType == cty.NilType { + return cty.NilType, errors.New("all arguments must have the same type") + } + return retType, nil + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + for _, argVal := range args { + // We already know this will succeed because of the checks in our Type func above + argVal, _ = convert.Convert(argVal, retType) + if !argVal.IsKnown() { + return cty.UnknownVal(retType), nil + } + if argVal.IsNull() { + continue + } + if retType == cty.String && argVal.RawEquals(cty.StringVal("")) { + continue + } + + return argVal, nil + } + return cty.NilVal, errors.New("no non-null, non-empty-string arguments") + }, +}) + +// IndexFunc constructs a function that finds the element index for a given value in a list. +var IndexFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "list", + Type: cty.DynamicPseudoType, + }, + { + Name: "value", + Type: cty.DynamicPseudoType, + }, + }, + Type: function.StaticReturnType(cty.Number), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + if !(args[0].Type().IsListType() || args[0].Type().IsTupleType()) { + return cty.NilVal, errors.New("argument must be a list or tuple") + } + + if !args[0].IsKnown() { + return cty.UnknownVal(cty.Number), nil + } + + if args[0].LengthInt() == 0 { // Easy path + return cty.NilVal, errors.New("cannot search an empty list") + } + + for it := args[0].ElementIterator(); it.Next(); { + i, v := it.Element() + eq, err := stdlib.Equal(v, args[1]) + if err != nil { + return cty.NilVal, err + } + if !eq.IsKnown() { + return cty.UnknownVal(cty.Number), nil + } + if eq.True() { + return i, nil + } + } + return cty.NilVal, errors.New("item not found") + + }, +}) + +// LookupFunc constructs a function that performs dynamic lookups of map types. +var LookupFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "inputMap", + Type: cty.DynamicPseudoType, + AllowMarked: true, + }, + { + Name: "key", + Type: cty.String, + AllowMarked: true, + }, + }, + VarParam: &function.Parameter{ + Name: "default", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowDynamicType: true, + AllowNull: true, + AllowMarked: true, + }, + Type: func(args []cty.Value) (ret cty.Type, err error) { + if len(args) < 1 || len(args) > 3 { + return cty.NilType, fmt.Errorf("lookup() takes two or three arguments, got %d", len(args)) + } + + ty := args[0].Type() + + switch { + case ty.IsObjectType(): + if !args[1].IsKnown() { + return cty.DynamicPseudoType, nil + } + + keyVal, _ := args[1].Unmark() + key := keyVal.AsString() + if ty.HasAttribute(key) { + return args[0].GetAttr(key).Type(), nil + } else if len(args) == 3 { + // if the key isn't found but a default is provided, + // return the default type + return args[2].Type(), nil + } + return cty.DynamicPseudoType, function.NewArgErrorf(0, "the given object has no attribute %q", key) + case ty.IsMapType(): + if len(args) == 3 { + _, err = convert.Convert(args[2], ty.ElementType()) + if err != nil { + return cty.NilType, function.NewArgErrorf(2, "the default value must have the same type as the map elements") + } + } + return ty.ElementType(), nil + default: + return cty.NilType, function.NewArgErrorf(0, "lookup() requires a map as the first argument") + } + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var defaultVal cty.Value + defaultValueSet := false + + if len(args) == 3 { + // intentionally leave default value marked + defaultVal = args[2] + defaultValueSet = true + } + + // keep track of marks from the collection and key + var markses []cty.ValueMarks + + // unmark collection, retain marks to reapply later + mapVar, mapMarks := args[0].Unmark() + markses = append(markses, mapMarks) + + // include marks on the key in the result + keyVal, keyMarks := args[1].Unmark() + if len(keyMarks) > 0 { + markses = append(markses, keyMarks) + } + lookupKey := keyVal.AsString() + + if !mapVar.IsKnown() { + return cty.UnknownVal(retType).WithMarks(markses...), nil + } + + if mapVar.Type().IsObjectType() { + if mapVar.Type().HasAttribute(lookupKey) { + return mapVar.GetAttr(lookupKey).WithMarks(markses...), nil + } + } else if mapVar.HasIndex(cty.StringVal(lookupKey)) == cty.True { + return mapVar.Index(cty.StringVal(lookupKey)).WithMarks(markses...), nil + } + + if defaultValueSet { + defaultVal, err = convert.Convert(defaultVal, retType) + if err != nil { + return cty.NilVal, err + } + return defaultVal.WithMarks(markses...), nil + } + + return cty.UnknownVal(cty.DynamicPseudoType).WithMarks(markses...), fmt.Errorf( + "lookup failed to find '%s'", lookupKey) + }, +}) + +// MatchkeysFunc constructs a function that constructs a new list by taking a +// subset of elements from one list whose indexes match the corresponding +// indexes of values in another list. +var MatchkeysFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "values", + Type: cty.List(cty.DynamicPseudoType), + }, + { + Name: "keys", + Type: cty.List(cty.DynamicPseudoType), + }, + { + Name: "searchset", + Type: cty.List(cty.DynamicPseudoType), + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + ty, _ := convert.UnifyUnsafe([]cty.Type{args[1].Type(), args[2].Type()}) + if ty == cty.NilType { + return cty.NilType, errors.New("keys and searchset must be of the same type") + } + + // the return type is based on args[0] (values) + return args[0].Type(), nil + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + if !args[0].IsKnown() { + return cty.UnknownVal(cty.List(retType.ElementType())), nil + } + + if args[0].LengthInt() != args[1].LengthInt() { + return cty.ListValEmpty(retType.ElementType()), errors.New("length of keys and values should be equal") + } + + output := make([]cty.Value, 0) + values := args[0] + + // Keys and searchset must be the same type. + // We can skip error checking here because we've already verified that + // they can be unified in the Type function + ty, _ := convert.UnifyUnsafe([]cty.Type{args[1].Type(), args[2].Type()}) + keys, _ := convert.Convert(args[1], ty) + searchset, _ := convert.Convert(args[2], ty) + + // if searchset is empty, return an empty list. + if searchset.LengthInt() == 0 { + return cty.ListValEmpty(retType.ElementType()), nil + } + + if !values.IsWhollyKnown() || !keys.IsWhollyKnown() { + return cty.UnknownVal(retType), nil + } + + i := 0 + for it := keys.ElementIterator(); it.Next(); { + _, key := it.Element() + for iter := searchset.ElementIterator(); iter.Next(); { + _, search := iter.Element() + eq, err := stdlib.Equal(key, search) + if err != nil { + return cty.NilVal, err + } + if !eq.IsKnown() { + return cty.ListValEmpty(retType.ElementType()), nil + } + if eq.True() { + v := values.Index(cty.NumberIntVal(int64(i))) + output = append(output, v) + break + } + } + i++ + } + + // if we haven't matched any key, then output is an empty list. + if len(output) == 0 { + return cty.ListValEmpty(retType.ElementType()), nil + } + return cty.ListVal(output), nil + }, +}) + +// OneFunc returns either the first element of a one-element list, or null +// if given a zero-element list. +var OneFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "list", + Type: cty.DynamicPseudoType, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + ty := args[0].Type() + switch { + case ty.IsListType() || ty.IsSetType(): + return ty.ElementType(), nil + case ty.IsTupleType(): + etys := ty.TupleElementTypes() + switch len(etys) { + case 0: + // No specific type information, so we'll ultimately return + // a null value of unknown type. + return cty.DynamicPseudoType, nil + case 1: + return etys[0], nil + } + } + return cty.NilType, function.NewArgErrorf(0, "must be a list, set, or tuple value with either zero or one elements") + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + val := args[0] + ty := val.Type() + + // Our parameter spec above doesn't set AllowUnknown or AllowNull, + // so we can assume our top-level collection is both known and non-null + // in here. + + switch { + case ty.IsListType() || ty.IsSetType(): + lenVal := val.Length() + if !lenVal.IsKnown() { + return cty.UnknownVal(retType), nil + } + var l int + err := gocty.FromCtyValue(lenVal, &l) + if err != nil { + // It would be very strange to get here, because that would + // suggest that the length is either not a number or isn't + // an integer, which would suggest a bug in cty. + return cty.NilVal, fmt.Errorf("invalid collection length: %s", err) + } + switch l { + case 0: + return cty.NullVal(retType), nil + case 1: + var ret cty.Value + // We'll use an iterator here because that works for both lists + // and sets, whereas indexing directly would only work for lists. + // Since we've just checked the length, we should only actually + // run this loop body once. + for it := val.ElementIterator(); it.Next(); { + _, ret = it.Element() + } + return ret, nil + } + case ty.IsTupleType(): + etys := ty.TupleElementTypes() + switch len(etys) { + case 0: + return cty.NullVal(retType), nil + case 1: + ret := val.Index(cty.NumberIntVal(0)) + return ret, nil + } + } + return cty.NilVal, function.NewArgErrorf(0, "must be a list, set, or tuple value with either zero or one elements") + }, +}) + +// SumFunc constructs a function that returns the sum of all +// numbers provided in a list +var SumFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "list", + Type: cty.DynamicPseudoType, + }, + }, + Type: function.StaticReturnType(cty.Number), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + + if !args[0].CanIterateElements() { + return cty.NilVal, function.NewArgErrorf(0, "cannot sum noniterable") + } + + if args[0].LengthInt() == 0 { // Easy path + return cty.NilVal, function.NewArgErrorf(0, "cannot sum an empty list") + } + + arg := args[0].AsValueSlice() + ty := args[0].Type() + + if !ty.IsListType() && !ty.IsSetType() && !ty.IsTupleType() { + return cty.NilVal, function.NewArgErrorf(0, fmt.Sprintf("argument must be list, set, or tuple. Received %s", ty.FriendlyName())) + } + + if !args[0].IsWhollyKnown() { + return cty.UnknownVal(cty.Number), nil + } + + // big.Float.Add can panic if the input values are opposing infinities, + // so we must catch that here in order to remain within + // the cty Function abstraction. + defer func() { + if r := recover(); r != nil { + if _, ok := r.(big.ErrNaN); ok { + ret = cty.NilVal + err = fmt.Errorf("can't compute sum of opposing infinities") + } else { + // not a panic we recognize + panic(r) + } + } + }() + + s := arg[0] + if s.IsNull() { + return cty.NilVal, function.NewArgErrorf(0, "argument must be list, set, or tuple of number values") + } + for _, v := range arg[1:] { + if v.IsNull() { + return cty.NilVal, function.NewArgErrorf(0, "argument must be list, set, or tuple of number values") + } + v, err = convert.Convert(v, cty.Number) + if err != nil { + return cty.NilVal, function.NewArgErrorf(0, "argument must be list, set, or tuple of number values") + } + s = s.Add(v) + } + + return s, nil + }, +}) + +// TransposeFunc constructs a function that takes a map of lists of strings and +// swaps the keys and values to produce a new map of lists of strings. +var TransposeFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "values", + Type: cty.Map(cty.List(cty.String)), + }, + }, + Type: function.StaticReturnType(cty.Map(cty.List(cty.String))), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + inputMap := args[0] + if !inputMap.IsWhollyKnown() { + return cty.UnknownVal(retType), nil + } + + outputMap := make(map[string]cty.Value) + tmpMap := make(map[string][]string) + + for it := inputMap.ElementIterator(); it.Next(); { + inKey, inVal := it.Element() + for iter := inVal.ElementIterator(); iter.Next(); { + _, val := iter.Element() + if !val.Type().Equals(cty.String) { + return cty.MapValEmpty(cty.List(cty.String)), errors.New("input must be a map of lists of strings") + } + + outKey := val.AsString() + if _, ok := tmpMap[outKey]; !ok { + tmpMap[outKey] = make([]string, 0) + } + outVal := tmpMap[outKey] + outVal = append(outVal, inKey.AsString()) + sort.Strings(outVal) + tmpMap[outKey] = outVal + } + } + + for outKey, outVal := range tmpMap { + values := make([]cty.Value, 0) + for _, v := range outVal { + values = append(values, cty.StringVal(v)) + } + outputMap[outKey] = cty.ListVal(values) + } + + if len(outputMap) == 0 { + return cty.MapValEmpty(cty.List(cty.String)), nil + } + + return cty.MapVal(outputMap), nil + }, +}) + +// ListFunc constructs a function that takes an arbitrary number of arguments +// and returns a list containing those values in the same order. +// +// This function is deprecated in Terraform v0.12 +var ListFunc = function.New(&function.Spec{ + Params: []function.Parameter{}, + VarParam: &function.Parameter{ + Name: "vals", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowDynamicType: true, + AllowNull: true, + }, + Type: func(args []cty.Value) (ret cty.Type, err error) { + return cty.DynamicPseudoType, fmt.Errorf("the \"list\" function was deprecated in Terraform v0.12 and is no longer available; use tolist([ ... ]) syntax to write a literal list") + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + return cty.DynamicVal, fmt.Errorf("the \"list\" function was deprecated in Terraform v0.12 and is no longer available; use tolist([ ... ]) syntax to write a literal list") + }, +}) + +// MapFunc constructs a function that takes an even number of arguments and +// returns a map whose elements are constructed from consecutive pairs of arguments. +// +// This function is deprecated in Terraform v0.12 +var MapFunc = function.New(&function.Spec{ + Params: []function.Parameter{}, + VarParam: &function.Parameter{ + Name: "vals", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowDynamicType: true, + AllowNull: true, + }, + Type: func(args []cty.Value) (ret cty.Type, err error) { + return cty.DynamicPseudoType, fmt.Errorf("the \"map\" function was deprecated in Terraform v0.12 and is no longer available; use tomap({ ... }) syntax to write a literal map") + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + return cty.DynamicVal, fmt.Errorf("the \"map\" function was deprecated in Terraform v0.12 and is no longer available; use tomap({ ... }) syntax to write a literal map") + }, +}) + +// Length returns the number of elements in the given collection or number of +// Unicode characters in the given string. +func Length(collection cty.Value) (cty.Value, error) { + return LengthFunc.Call([]cty.Value{collection}) +} + +// AllTrue returns true if all elements of the list are true. If the list is empty, +// return true. +func AllTrue(collection cty.Value) (cty.Value, error) { + return AllTrueFunc.Call([]cty.Value{collection}) +} + +// AnyTrue returns true if any element of the list is true. If the list is empty, +// return false. +func AnyTrue(collection cty.Value) (cty.Value, error) { + return AnyTrueFunc.Call([]cty.Value{collection}) +} + +// Coalesce takes any number of arguments and returns the first one that isn't empty. +func Coalesce(args ...cty.Value) (cty.Value, error) { + return CoalesceFunc.Call(args) +} + +// Index finds the element index for a given value in a list. +func Index(list, value cty.Value) (cty.Value, error) { + return IndexFunc.Call([]cty.Value{list, value}) +} + +// List takes any number of list arguments and returns a list containing those +// +// values in the same order. +func List(args ...cty.Value) (cty.Value, error) { + return ListFunc.Call(args) +} + +// Lookup performs a dynamic lookup into a map. +// There are two required arguments, map and key, plus an optional default, +// which is a value to return if no key is found in map. +func Lookup(args ...cty.Value) (cty.Value, error) { + return LookupFunc.Call(args) +} + +// Map takes an even number of arguments and returns a map whose elements are constructed +// from consecutive pairs of arguments. +func Map(args ...cty.Value) (cty.Value, error) { + return MapFunc.Call(args) +} + +// Matchkeys constructs a new list by taking a subset of elements from one list +// whose indexes match the corresponding indexes of values in another list. +func Matchkeys(values, keys, searchset cty.Value) (cty.Value, error) { + return MatchkeysFunc.Call([]cty.Value{values, keys, searchset}) +} + +// One returns either the first element of a one-element list, or null +// if given a zero-element list.. +func One(list cty.Value) (cty.Value, error) { + return OneFunc.Call([]cty.Value{list}) +} + +// Sum adds numbers in a list, set, or tuple +func Sum(list cty.Value) (cty.Value, error) { + return SumFunc.Call([]cty.Value{list}) +} + +// Transpose takes a map of lists of strings and swaps the keys and values to +// produce a new map of lists of strings. +func Transpose(values cty.Value) (cty.Value, error) { + return TransposeFunc.Call([]cty.Value{values}) +} diff --git a/pkg/scanners/terraform/parser/funcs/conversion.go b/pkg/scanners/terraform/parser/funcs/conversion.go new file mode 100644 index 000000000000..02fb3164a6f0 --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/conversion.go @@ -0,0 +1,223 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "fmt" + "sort" + "strconv" + "strings" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" + "github.com/zclconf/go-cty/cty/function" +) + +// MakeToFunc constructs a "to..." function, like "tostring", which converts +// its argument to a specific type or type kind. +// +// The given type wantTy can be any type constraint that cty's "convert" package +// would accept. In particular, this means that you can pass +// cty.List(cty.DynamicPseudoType) to mean "list of any single type", which +// will then cause cty to attempt to unify all of the element types when given +// a tuple. +func MakeToFunc(wantTy cty.Type) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "v", + // We use DynamicPseudoType rather than wantTy here so that + // all values will pass through the function API verbatim and + // we can handle the conversion logic within the Type and + // Impl functions. This allows us to customize the error + // messages to be more appropriate for an explicit type + // conversion, whereas the cty function system produces + // messages aimed at _implicit_ type conversions. + Type: cty.DynamicPseudoType, + AllowNull: true, + AllowMarked: true, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + gotTy := args[0].Type() + if gotTy.Equals(wantTy) { + return wantTy, nil + } + conv := convert.GetConversionUnsafe(args[0].Type(), wantTy) + if conv == nil { + // We'll use some specialized errors for some trickier cases, + // but most we can handle in a simple way. + switch { + case gotTy.IsTupleType() && wantTy.IsTupleType(): + return cty.NilType, function.NewArgErrorf(0, "incompatible tuple type for conversion: %s", convert.MismatchMessage(gotTy, wantTy)) + case gotTy.IsObjectType() && wantTy.IsObjectType(): + return cty.NilType, function.NewArgErrorf(0, "incompatible object type for conversion: %s", convert.MismatchMessage(gotTy, wantTy)) + default: + return cty.NilType, function.NewArgErrorf(0, "cannot convert %s to %s", gotTy.FriendlyName(), wantTy.FriendlyNameForConstraint()) + } + } + // If a conversion is available then everything is fine. + return wantTy, nil + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + // We didn't set "AllowUnknown" on our argument, so it is guaranteed + // to be known here but may still be null. + ret, err := convert.Convert(args[0], retType) + if err != nil { + val, _ := args[0].UnmarkDeep() + // Because we used GetConversionUnsafe above, conversion can + // still potentially fail in here. For example, if the user + // asks to convert the string "a" to bool then we'll + // optimistically permit it during type checking but fail here + // once we note that the value isn't either "true" or "false". + gotTy := val.Type() + switch { + case Contains(args[0], MarkedSensitive): + // Generic message so we won't inadvertently disclose + // information about sensitive values. + return cty.NilVal, function.NewArgErrorf(0, "cannot convert this sensitive %s to %s", gotTy.FriendlyName(), wantTy.FriendlyNameForConstraint()) + + case gotTy == cty.String && wantTy == cty.Bool: + what := "string" + if !val.IsNull() { + what = strconv.Quote(val.AsString()) + } + return cty.NilVal, function.NewArgErrorf(0, `cannot convert %s to bool; only the strings "true" or "false" are allowed`, what) + case gotTy == cty.String && wantTy == cty.Number: + what := "string" + if !val.IsNull() { + what = strconv.Quote(val.AsString()) + } + return cty.NilVal, function.NewArgErrorf(0, `cannot convert %s to number; given string must be a decimal representation of a number`, what) + default: + return cty.NilVal, function.NewArgErrorf(0, "cannot convert %s to %s", gotTy.FriendlyName(), wantTy.FriendlyNameForConstraint()) + } + } + return ret, nil + }, + }) +} + +var TypeFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "value", + Type: cty.DynamicPseudoType, + AllowDynamicType: true, + AllowUnknown: true, + AllowNull: true, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(TypeString(args[0].Type())).Mark(MarkedRaw), nil + }, +}) + +// Modified copy of TypeString from go-cty: +// https://github.com/zclconf/go-cty-debug/blob/master/ctydebug/type_string.go +// +// TypeString returns a string representation of a given type that is +// reminiscent of Go syntax calling into the cty package but is mainly +// intended for easy human inspection of values in tests, debug output, etc. +// +// The resulting string will include newlines and indentation in order to +// increase the readability of complex structures. It always ends with a +// newline, so you can print this result directly to your output. +func TypeString(ty cty.Type) string { + var b strings.Builder + writeType(ty, &b, 0) + return b.String() +} + +func writeType(ty cty.Type, b *strings.Builder, indent int) { + switch { + case ty == cty.NilType: + b.WriteString("nil") + return + case ty.IsObjectType(): + atys := ty.AttributeTypes() + if len(atys) == 0 { + b.WriteString("object({})") + return + } + attrNames := make([]string, 0, len(atys)) + for name := range atys { + attrNames = append(attrNames, name) + } + sort.Strings(attrNames) + b.WriteString("object({\n") + indent++ + for _, name := range attrNames { + aty := atys[name] + b.WriteString(indentSpaces(indent)) + fmt.Fprintf(b, "%s: ", name) + writeType(aty, b, indent) + b.WriteString(",\n") + } + indent-- + b.WriteString(indentSpaces(indent)) + b.WriteString("})") + case ty.IsTupleType(): + etys := ty.TupleElementTypes() + if len(etys) == 0 { + b.WriteString("tuple([])") + return + } + b.WriteString("tuple([\n") + indent++ + for _, ety := range etys { + b.WriteString(indentSpaces(indent)) + writeType(ety, b, indent) + b.WriteString(",\n") + } + indent-- + b.WriteString(indentSpaces(indent)) + b.WriteString("])") + case ty.IsCollectionType(): + ety := ty.ElementType() + switch { + case ty.IsListType(): + b.WriteString("list(") + case ty.IsMapType(): + b.WriteString("map(") + case ty.IsSetType(): + b.WriteString("set(") + default: + // At the time of writing there are no other collection types, + // but we'll be robust here and just pass through the GoString + // of anything we don't recognize. + b.WriteString(ty.FriendlyName()) + return + } + // Because object and tuple types render split over multiple + // lines, a collection type container around them can end up + // being hard to see when scanning, so we'll generate some extra + // indentation to make a collection of structural type more visually + // distinct from the structural type alone. + complexElem := ety.IsObjectType() || ety.IsTupleType() + if complexElem { + indent++ + b.WriteString("\n") + b.WriteString(indentSpaces(indent)) + } + writeType(ty.ElementType(), b, indent) + if complexElem { + indent-- + b.WriteString(",\n") + b.WriteString(indentSpaces(indent)) + } + b.WriteString(")") + default: + // For any other type we'll just use its GoString and assume it'll + // follow the usual GoString conventions. + b.WriteString(ty.FriendlyName()) + } +} + +func indentSpaces(level int) string { + return strings.Repeat(" ", level) +} + +func Type(input []cty.Value) (cty.Value, error) { + return TypeFunc.Call(input) +} diff --git a/pkg/scanners/terraform/parser/funcs/crypto.go b/pkg/scanners/terraform/parser/funcs/crypto.go new file mode 100644 index 000000000000..424c4c184763 --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/crypto.go @@ -0,0 +1,335 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "crypto/md5" + "crypto/rsa" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding/asn1" + "encoding/base64" + "encoding/hex" + "fmt" + "hash" + "io" + "io/fs" + "strings" + + uuidv5 "github.com/google/uuid" + uuid "github.com/hashicorp/go-uuid" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/gocty" + "golang.org/x/crypto/bcrypt" + "golang.org/x/crypto/ssh" +) + +var UUIDFunc = function.New(&function.Spec{ + Params: []function.Parameter{}, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + result, err := uuid.GenerateUUID() + if err != nil { + return cty.UnknownVal(cty.String), err + } + return cty.StringVal(result), nil + }, +}) + +var UUIDV5Func = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "namespace", + Type: cty.String, + }, + { + Name: "name", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var namespace uuidv5.UUID + switch { + case args[0].AsString() == "dns": + namespace = uuidv5.NameSpaceDNS + case args[0].AsString() == "url": + namespace = uuidv5.NameSpaceURL + case args[0].AsString() == "oid": + namespace = uuidv5.NameSpaceOID + case args[0].AsString() == "x500": + namespace = uuidv5.NameSpaceX500 + default: + if namespace, err = uuidv5.Parse(args[0].AsString()); err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("uuidv5() doesn't support namespace %s (%v)", args[0].AsString(), err) + } + } + val := args[1].AsString() + return cty.StringVal(uuidv5.NewSHA1(namespace, []byte(val)).String()), nil + }, +}) + +// Base64Sha256Func constructs a function that computes the SHA256 hash of a given string +// and encodes it with Base64. +var Base64Sha256Func = makeStringHashFunction(sha256.New, base64.StdEncoding.EncodeToString) + +// MakeFileBase64Sha256Func constructs a function that is like Base64Sha256Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileBase64Sha256Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, sha256.New, base64.StdEncoding.EncodeToString) +} + +// Base64Sha512Func constructs a function that computes the SHA256 hash of a given string +// and encodes it with Base64. +var Base64Sha512Func = makeStringHashFunction(sha512.New, base64.StdEncoding.EncodeToString) + +// MakeFileBase64Sha512Func constructs a function that is like Base64Sha512Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileBase64Sha512Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, sha512.New, base64.StdEncoding.EncodeToString) +} + +// BcryptFunc constructs a function that computes a hash of the given string using the Blowfish cipher. +var BcryptFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + VarParam: &function.Parameter{ + Name: "cost", + Type: cty.Number, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + defaultCost := 10 + + if len(args) > 1 { + var val int + if err := gocty.FromCtyValue(args[1], &val); err != nil { + return cty.UnknownVal(cty.String), err + } + defaultCost = val + } + + if len(args) > 2 { + return cty.UnknownVal(cty.String), fmt.Errorf("bcrypt() takes no more than two arguments") + } + + input := args[0].AsString() + out, err := bcrypt.GenerateFromPassword([]byte(input), defaultCost) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("error occurred generating password %s", err.Error()) + } + + return cty.StringVal(string(out)), nil + }, +}) + +// Md5Func constructs a function that computes the MD5 hash of a given string and encodes it with hexadecimal digits. +var Md5Func = makeStringHashFunction(md5.New, hex.EncodeToString) + +// MakeFileMd5Func constructs a function that is like Md5Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileMd5Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, md5.New, hex.EncodeToString) +} + +// RsaDecryptFunc constructs a function that decrypts an RSA-encrypted ciphertext. +var RsaDecryptFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "ciphertext", + Type: cty.String, + }, + { + Name: "privatekey", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + s := args[0].AsString() + key := args[1].AsString() + + b, err := base64.StdEncoding.DecodeString(s) + if err != nil { + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "failed to decode input %q: cipher text must be base64-encoded", s) + } + + rawKey, err := ssh.ParseRawPrivateKey([]byte(key)) + if err != nil { + var errStr string + switch e := err.(type) { + case asn1.SyntaxError: + errStr = strings.ReplaceAll(e.Error(), "asn1: syntax error", "invalid ASN1 data in the given private key") + case asn1.StructuralError: + errStr = strings.ReplaceAll(e.Error(), "asn1: structure error", "invalid ASN1 data in the given private key") + default: + errStr = fmt.Sprintf("invalid private key: %s", e) + } + return cty.UnknownVal(cty.String), function.NewArgErrorf(1, errStr) + } + privateKey, ok := rawKey.(*rsa.PrivateKey) + if !ok { + return cty.UnknownVal(cty.String), function.NewArgErrorf(1, "invalid private key type %t", rawKey) + } + + out, err := rsa.DecryptPKCS1v15(nil, privateKey, b) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("failed to decrypt: %s", err) + } + + return cty.StringVal(string(out)), nil + }, +}) + +// Sha1Func constructs a function that computes the SHA1 hash of a given string +// and encodes it with hexadecimal digits. +var Sha1Func = makeStringHashFunction(sha1.New, hex.EncodeToString) + +// MakeFileSha1Func constructs a function that is like Sha1Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileSha1Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, sha1.New, hex.EncodeToString) +} + +// Sha256Func constructs a function that computes the SHA256 hash of a given string +// and encodes it with hexadecimal digits. +var Sha256Func = makeStringHashFunction(sha256.New, hex.EncodeToString) + +// MakeFileSha256Func constructs a function that is like Sha256Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileSha256Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, sha256.New, hex.EncodeToString) +} + +// Sha512Func constructs a function that computes the SHA512 hash of a given string +// and encodes it with hexadecimal digits. +var Sha512Func = makeStringHashFunction(sha512.New, hex.EncodeToString) + +// MakeFileSha512Func constructs a function that is like Sha512Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileSha512Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, sha512.New, hex.EncodeToString) +} + +func makeStringHashFunction(hf func() hash.Hash, enc func([]byte) string) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + s := args[0].AsString() + h := hf() + h.Write([]byte(s)) + rv := enc(h.Sum(nil)) + return cty.StringVal(rv), nil + }, + }) +} + +func makeFileHashFunction(target fs.FS, baseDir string, hf func() hash.Hash, enc func([]byte) string) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + path := args[0].AsString() + f, err := openFile(target, baseDir, path) + if err != nil { + return cty.UnknownVal(cty.String), err + } + + h := hf() + _, err = io.Copy(h, f) + if err != nil { + return cty.UnknownVal(cty.String), err + } + rv := enc(h.Sum(nil)) + return cty.StringVal(rv), nil + }, + }) +} + +// UUID generates and returns a Type-4 UUID in the standard hexadecimal string +// format. +// +// This is not a pure function: it will generate a different result for each +// call. It must therefore be registered as an impure function in the function +// table in the "lang" package. +func UUID() (cty.Value, error) { + return UUIDFunc.Call(nil) +} + +// UUIDV5 generates and returns a Type-5 UUID in the standard hexadecimal string +// format. +func UUIDV5(namespace cty.Value, name cty.Value) (cty.Value, error) { + return UUIDV5Func.Call([]cty.Value{namespace, name}) +} + +// Base64Sha256 computes the SHA256 hash of a given string and encodes it with +// Base64. +// +// The given string is first encoded as UTF-8 and then the SHA256 algorithm is applied +// as defined in RFC 4634. The raw hash is then encoded with Base64 before returning. +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +func Base64Sha256(str cty.Value) (cty.Value, error) { + return Base64Sha256Func.Call([]cty.Value{str}) +} + +// Base64Sha512 computes the SHA512 hash of a given string and encodes it with +// Base64. +// +// The given string is first encoded as UTF-8 and then the SHA256 algorithm is applied +// as defined in RFC 4634. The raw hash is then encoded with Base64 before returning. +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4 +func Base64Sha512(str cty.Value) (cty.Value, error) { + return Base64Sha512Func.Call([]cty.Value{str}) +} + +// Bcrypt computes a hash of the given string using the Blowfish cipher, +// returning a string in the Modular Crypt Format +// usually expected in the shadow password file on many Unix systems. +func Bcrypt(str cty.Value, cost ...cty.Value) (cty.Value, error) { + args := make([]cty.Value, len(cost)+1) + args[0] = str + copy(args[1:], cost) + return BcryptFunc.Call(args) +} + +// Md5 computes the MD5 hash of a given string and encodes it with hexadecimal digits. +func Md5(str cty.Value) (cty.Value, error) { + return Md5Func.Call([]cty.Value{str}) +} + +// RsaDecrypt decrypts an RSA-encrypted ciphertext, returning the corresponding +// cleartext. +func RsaDecrypt(ciphertext, privatekey cty.Value) (cty.Value, error) { + return RsaDecryptFunc.Call([]cty.Value{ciphertext, privatekey}) +} + +// Sha1 computes the SHA1 hash of a given string and encodes it with hexadecimal digits. +func Sha1(str cty.Value) (cty.Value, error) { + return Sha1Func.Call([]cty.Value{str}) +} + +// Sha256 computes the SHA256 hash of a given string and encodes it with hexadecimal digits. +func Sha256(str cty.Value) (cty.Value, error) { + return Sha256Func.Call([]cty.Value{str}) +} + +// Sha512 computes the SHA512 hash of a given string and encodes it with hexadecimal digits. +func Sha512(str cty.Value) (cty.Value, error) { + return Sha512Func.Call([]cty.Value{str}) +} diff --git a/pkg/scanners/terraform/parser/funcs/datetime.go b/pkg/scanners/terraform/parser/funcs/datetime.go new file mode 100644 index 000000000000..253e59eef018 --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/datetime.go @@ -0,0 +1,71 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "time" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" +) + +// TimestampFunc constructs a function that returns a string representation of the current date and time. +var TimestampFunc = function.New(&function.Spec{ + Params: []function.Parameter{}, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(time.Now().UTC().Format(time.RFC3339)), nil + }, +}) + +// TimeAddFunc constructs a function that adds a duration to a timestamp, returning a new timestamp. +var TimeAddFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "timestamp", + Type: cty.String, + }, + { + Name: "duration", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + ts, err := time.Parse(time.RFC3339, args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), err + } + duration, err := time.ParseDuration(args[1].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.StringVal(ts.Add(duration).Format(time.RFC3339)), nil + }, +}) + +// Timestamp returns a string representation of the current date and time. +// +// In the Terraform language, timestamps are conventionally represented as +// strings using RFC 3339 "Date and Time format" syntax, and so timestamp +// returns a string in this format. +func Timestamp() (cty.Value, error) { + return TimestampFunc.Call([]cty.Value{}) +} + +// TimeAdd adds a duration to a timestamp, returning a new timestamp. +// +// In the Terraform language, timestamps are conventionally represented as +// strings using RFC 3339 "Date and Time format" syntax. Timeadd requires +// the timestamp argument to be a string conforming to this syntax. +// +// `duration` is a string representation of a time difference, consisting of +// sequences of number and unit pairs, like `"1.5h"` or `1h30m`. The accepted +// units are `ns`, `us` (or `µs`), `"ms"`, `"s"`, `"m"`, and `"h"`. The first +// number may be negative to indicate a negative duration, like `"-2h5m"`. +// +// The result is a string, also in RFC 3339 format, representing the result +// of adding the given direction to the given timestamp. +func TimeAdd(timestamp cty.Value, duration cty.Value) (cty.Value, error) { + return TimeAddFunc.Call([]cty.Value{timestamp, duration}) +} diff --git a/pkg/scanners/terraform/parser/funcs/defaults.go b/pkg/scanners/terraform/parser/funcs/defaults.go new file mode 100644 index 000000000000..4467b81e35ce --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/defaults.go @@ -0,0 +1,288 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "fmt" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" + "github.com/zclconf/go-cty/cty/function" +) + +// DefaultsFunc is a helper function for substituting default values in +// place of null values in a given data structure. +// +// See the documentation for function Defaults for more information. +var DefaultsFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "input", + Type: cty.DynamicPseudoType, + AllowNull: true, + AllowMarked: true, + }, + { + Name: "defaults", + Type: cty.DynamicPseudoType, + AllowMarked: true, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + // The result type is guaranteed to be the same as the input type, + // since all we're doing is replacing null values with non-null + // values of the same type. + retType := args[0].Type() + defaultsType := args[1].Type() + + // This function is aimed at filling in object types or collections + // of object types where some of the attributes might be null, so + // it doesn't make sense to use a primitive type directly with it. + // (The "coalesce" function may be appropriate for such cases.) + if retType.IsPrimitiveType() { + // This error message is a bit of a fib because we can actually + // apply defaults to tuples too, but we expect that to be so + // unusual as to not be worth mentioning here, because mentioning + // it would require using some less-well-known Terraform language + // terminology in the message (tuple types, structural types). + return cty.DynamicPseudoType, function.NewArgErrorf(1, "only object types and collections of object types can have defaults applied") + } + + defaultsPath := make(cty.Path, 0, 4) // some capacity so that most structures won't reallocate + if err := defaultsAssertSuitableFallback(retType, defaultsType, defaultsPath); err != nil { + errMsg := err.Error() + return cty.DynamicPseudoType, function.NewArgErrorf(1, "%s", errMsg) + } + + return retType, nil + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + if args[0].Type().HasDynamicTypes() { + // If the types our input object aren't known yet for some reason + // then we'll defer all of our work here, because our + // interpretation of the defaults depends on the types in + // the input. + return cty.UnknownVal(retType), nil + } + + v := defaultsApply(args[0], args[1]) + return v, nil + }, +}) + +func defaultsApply(input, fallback cty.Value) cty.Value { + wantTy := input.Type() + + umInput, inputMarks := input.Unmark() + umFb, fallbackMarks := fallback.Unmark() + + // If neither are known, we very conservatively return an unknown value + // with the union of marks on both input and default. + if !(umInput.IsKnown() && umFb.IsKnown()) { + return cty.UnknownVal(wantTy).WithMarks(inputMarks).WithMarks(fallbackMarks) + } + + // For the rest of this function we're assuming that the given defaults + // will always be valid, because we expect to have caught any problems + // during the type checking phase. Any inconsistencies that reach here are + // therefore considered to be implementation bugs, and so will panic. + + // Our strategy depends on the kind of type we're working with. + switch { + case wantTy.IsPrimitiveType(): + // For leaf primitive values the rule is relatively simple: use the + // input if it's non-null, or fallback if input is null. + if !umInput.IsNull() { + return input + } + v, err := convert.Convert(umFb, wantTy) + if err != nil { + // Should not happen because we checked in defaultsAssertSuitableFallback + panic(err.Error()) + } + return v.WithMarks(fallbackMarks) + + case wantTy.IsObjectType(): + // For structural types, a null input value must be passed through. We + // do not apply default values for missing optional structural values, + // only their contents. + // + // We also pass through the input if the fallback value is null. This + // can happen if the given defaults do not include a value for this + // attribute. + if umInput.IsNull() || umFb.IsNull() { + return input + } + atys := wantTy.AttributeTypes() + ret := map[string]cty.Value{} + for attr, aty := range atys { + inputSub := umInput.GetAttr(attr) + fallbackSub := cty.NullVal(aty) + if umFb.Type().HasAttribute(attr) { + fallbackSub = umFb.GetAttr(attr) + } + ret[attr] = defaultsApply(inputSub.WithMarks(inputMarks), fallbackSub.WithMarks(fallbackMarks)) + } + return cty.ObjectVal(ret) + + case wantTy.IsTupleType(): + // For structural types, a null input value must be passed through. We + // do not apply default values for missing optional structural values, + // only their contents. + // + // We also pass through the input if the fallback value is null. This + // can happen if the given defaults do not include a value for this + // attribute. + if umInput.IsNull() || umFb.IsNull() { + return input + } + + l := wantTy.Length() + ret := make([]cty.Value, l) + for i := 0; i < l; i++ { + inputSub := umInput.Index(cty.NumberIntVal(int64(i))) + fallbackSub := umFb.Index(cty.NumberIntVal(int64(i))) + ret[i] = defaultsApply(inputSub.WithMarks(inputMarks), fallbackSub.WithMarks(fallbackMarks)) + } + return cty.TupleVal(ret) + + case wantTy.IsCollectionType(): + // For collection types we apply a single fallback value to each + // element of the input collection, because in the situations this + // function is intended for we assume that the number of elements + // is the caller's decision, and so we'll just apply the same defaults + // to all of the elements. + ety := wantTy.ElementType() + switch { + case wantTy.IsMapType(): + newVals := map[string]cty.Value{} + + if !umInput.IsNull() { + for it := umInput.ElementIterator(); it.Next(); { + k, v := it.Element() + newVals[k.AsString()] = defaultsApply(v.WithMarks(inputMarks), fallback.WithMarks(fallbackMarks)) + } + } + + if len(newVals) == 0 { + return cty.MapValEmpty(ety) + } + return cty.MapVal(newVals) + case wantTy.IsListType(), wantTy.IsSetType(): + var newVals []cty.Value + + if !umInput.IsNull() { + for it := umInput.ElementIterator(); it.Next(); { + _, v := it.Element() + newV := defaultsApply(v.WithMarks(inputMarks), fallback.WithMarks(fallbackMarks)) + newVals = append(newVals, newV) + } + } + + if len(newVals) == 0 { + if wantTy.IsSetType() { + return cty.SetValEmpty(ety) + } + return cty.ListValEmpty(ety) + } + if wantTy.IsSetType() { + return cty.SetVal(newVals) + } + return cty.ListVal(newVals) + default: + // There are no other collection types, so this should not happen + panic(fmt.Sprintf("invalid collection type %#v", wantTy)) + } + default: + // We should've caught anything else in defaultsAssertSuitableFallback, + // so this should not happen. + panic(fmt.Sprintf("invalid target type %#v", wantTy)) + } +} + +func defaultsAssertSuitableFallback(wantTy, fallbackTy cty.Type, fallbackPath cty.Path) error { + // If the type we want is a collection type then we need to keep peeling + // away collection type wrappers until we find the non-collection-type + // that's underneath, which is what the fallback will actually be applied + // to. + inCollection := false + for wantTy.IsCollectionType() { + wantTy = wantTy.ElementType() + inCollection = true + } + + switch { + case wantTy.IsPrimitiveType(): + // The fallback is valid if it's equal to or convertible to what we want. + if fallbackTy.Equals(wantTy) { + return nil + } + conversion := convert.GetConversion(fallbackTy, wantTy) + if conversion == nil { + msg := convert.MismatchMessage(fallbackTy, wantTy) + return fallbackPath.NewErrorf("invalid default value for %s: %s", wantTy.FriendlyName(), msg) + } + return nil + case wantTy.IsObjectType(): + if !fallbackTy.IsObjectType() { + if inCollection { + return fallbackPath.NewErrorf("the default value for a collection of an object type must itself be an object type, not %s", fallbackTy.FriendlyName()) + } + return fallbackPath.NewErrorf("the default value for an object type must itself be an object type, not %s", fallbackTy.FriendlyName()) + } + for attr, wantAty := range wantTy.AttributeTypes() { + if !fallbackTy.HasAttribute(attr) { + continue // it's always okay to not have a default value + } + fallbackSubpath := fallbackPath.GetAttr(attr) + fallbackSubTy := fallbackTy.AttributeType(attr) + err := defaultsAssertSuitableFallback(wantAty, fallbackSubTy, fallbackSubpath) + if err != nil { + return err + } + } + for attr := range fallbackTy.AttributeTypes() { + if !wantTy.HasAttribute(attr) { + fallbackSubpath := fallbackPath.GetAttr(attr) + return fallbackSubpath.NewErrorf("target type does not expect an attribute named %q", attr) + } + } + return nil + case wantTy.IsTupleType(): + if !fallbackTy.IsTupleType() { + if inCollection { + return fallbackPath.NewErrorf("the default value for a collection of a tuple type must itself be a tuple type, not %s", fallbackTy.FriendlyName()) + } + return fallbackPath.NewErrorf("the default value for a tuple type must itself be a tuple type, not %s", fallbackTy.FriendlyName()) + } + wantEtys := wantTy.TupleElementTypes() + fallbackEtys := fallbackTy.TupleElementTypes() + if got, want := len(wantEtys), len(fallbackEtys); got != want { + return fallbackPath.NewErrorf("the default value for a tuple type of length %d must also have length %d, not %d", want, want, got) + } + for i := 0; i < len(wantEtys); i++ { + fallbackSubpath := fallbackPath.IndexInt(i) + wantSubTy := wantEtys[i] + fallbackSubTy := fallbackEtys[i] + err := defaultsAssertSuitableFallback(wantSubTy, fallbackSubTy, fallbackSubpath) + if err != nil { + return err + } + } + return nil + default: + // No other types are supported right now. + return fallbackPath.NewErrorf("cannot apply defaults to %s", wantTy.FriendlyName()) + } +} + +// Defaults is a helper function for substituting default values in +// place of null values in a given data structure. +// +// This is primarily intended for use with a module input variable that +// has an object type constraint (or a collection thereof) that has optional +// attributes, so that the receiver of a value that omits those attributes +// can insert non-null default values in place of the null values caused by +// omitting the attributes. +func Defaults(input, defaults cty.Value) (cty.Value, error) { + return DefaultsFunc.Call([]cty.Value{input, defaults}) +} diff --git a/pkg/scanners/terraform/parser/funcs/encoding.go b/pkg/scanners/terraform/parser/funcs/encoding.go new file mode 100644 index 000000000000..f74a508fb7ed --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/encoding.go @@ -0,0 +1,254 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "bytes" + "compress/gzip" + "encoding/base64" + "fmt" + "log" + "net/url" + "unicode/utf8" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "golang.org/x/text/encoding/ianaindex" +) + +// Base64DecodeFunc constructs a function that decodes a string containing a base64 sequence. +var Base64DecodeFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + s := args[0].AsString() + sDec, err := base64.StdEncoding.DecodeString(s) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("failed to decode base64 data '%s'", s) + } + if !utf8.Valid([]byte(sDec)) { + log.Printf("[DEBUG] the result of decoding the provided string is not valid UTF-8: %s", sDec) + return cty.UnknownVal(cty.String), fmt.Errorf("the result of decoding the provided string is not valid UTF-8") + } + return cty.StringVal(string(sDec)), nil + }, +}) + +// Base64EncodeFunc constructs a function that encodes a string to a base64 sequence. +var Base64EncodeFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(base64.StdEncoding.EncodeToString([]byte(args[0].AsString()))), nil + }, +}) + +// TextEncodeBase64Func constructs a function that encodes a string to a target encoding and then to a base64 sequence. +var TextEncodeBase64Func = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "string", + Type: cty.String, + }, + { + Name: "encoding", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + encoding, err := ianaindex.IANA.Encoding(args[1].AsString()) + if err != nil || encoding == nil { + return cty.UnknownVal(cty.String), function.NewArgErrorf(1, "%q is not a supported IANA encoding name or alias in this Terraform version", args[1].AsString()) + } + + encName, err := ianaindex.IANA.Name(encoding) + if err != nil { // would be weird, since we just read this encoding out + encName = args[1].AsString() + } + + encoder := encoding.NewEncoder() + encodedInput, err := encoder.Bytes([]byte(args[0].AsString())) + if err != nil { + // The string representations of "err" disclose implementation + // details of the underlying library, and the main error we might + // like to return a special message for is unexported as + // golang.org/x/text/encoding/internal.RepertoireError, so this + // is just a generic error message for now. + // + // We also don't include the string itself in the message because + // it can typically be very large, contain newline characters, + // etc. + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "the given string contains characters that cannot be represented in %s", encName) + } + + return cty.StringVal(base64.StdEncoding.EncodeToString(encodedInput)), nil + }, +}) + +// TextDecodeBase64Func constructs a function that decodes a base64 sequence to a target encoding. +var TextDecodeBase64Func = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "source", + Type: cty.String, + }, + { + Name: "encoding", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + encoding, err := ianaindex.IANA.Encoding(args[1].AsString()) + if err != nil || encoding == nil { + return cty.UnknownVal(cty.String), function.NewArgErrorf(1, "%q is not a supported IANA encoding name or alias in this Terraform version", args[1].AsString()) + } + + encName, err := ianaindex.IANA.Name(encoding) + if err != nil { // would be weird, since we just read this encoding out + encName = args[1].AsString() + } + + s := args[0].AsString() + sDec, err := base64.StdEncoding.DecodeString(s) + if err != nil { + switch err := err.(type) { + case base64.CorruptInputError: + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "the given value is has an invalid base64 symbol at offset %d", int(err)) + default: + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "invalid source string: %T", err) + } + + } + + decoder := encoding.NewDecoder() + decoded, err := decoder.Bytes(sDec) + if err != nil || bytes.ContainsRune(decoded, '�') { + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "the given string contains symbols that are not defined for %s", encName) + } + + return cty.StringVal(string(decoded)), nil + }, +}) + +// Base64GzipFunc constructs a function that compresses a string with gzip and then encodes the result in +// Base64 encoding. +var Base64GzipFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + s := args[0].AsString() + + var b bytes.Buffer + gz := gzip.NewWriter(&b) + if _, err := gz.Write([]byte(s)); err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("failed to write gzip raw data: '%s'", s) + } + if err := gz.Flush(); err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("failed to flush gzip writer: '%s'", s) + } + if err := gz.Close(); err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("failed to close gzip writer: '%s'", s) + } + return cty.StringVal(base64.StdEncoding.EncodeToString(b.Bytes())), nil + }, +}) + +// URLEncodeFunc constructs a function that applies URL encoding to a given string. +var URLEncodeFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(url.QueryEscape(args[0].AsString())), nil + }, +}) + +// Base64Decode decodes a string containing a base64 sequence. +// +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +// +// Strings in the Terraform language are sequences of unicode characters rather +// than bytes, so this function will also interpret the resulting bytes as +// UTF-8. If the bytes after Base64 decoding are _not_ valid UTF-8, this function +// produces an error. +func Base64Decode(str cty.Value) (cty.Value, error) { + return Base64DecodeFunc.Call([]cty.Value{str}) +} + +// Base64Encode applies Base64 encoding to a string. +// +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +// +// Strings in the Terraform language are sequences of unicode characters rather +// than bytes, so this function will first encode the characters from the string +// as UTF-8, and then apply Base64 encoding to the result. +func Base64Encode(str cty.Value) (cty.Value, error) { + return Base64EncodeFunc.Call([]cty.Value{str}) +} + +// Base64Gzip compresses a string with gzip and then encodes the result in +// Base64 encoding. +// +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +// +// Strings in the Terraform language are sequences of unicode characters rather +// than bytes, so this function will first encode the characters from the string +// as UTF-8, then apply gzip compression, and then finally apply Base64 encoding. +func Base64Gzip(str cty.Value) (cty.Value, error) { + return Base64GzipFunc.Call([]cty.Value{str}) +} + +// URLEncode applies URL encoding to a given string. +// +// This function identifies characters in the given string that would have a +// special meaning when included as a query string argument in a URL and +// escapes them using RFC 3986 "percent encoding". +// +// If the given string contains non-ASCII characters, these are first encoded as +// UTF-8 and then percent encoding is applied separately to each UTF-8 byte. +func URLEncode(str cty.Value) (cty.Value, error) { + return URLEncodeFunc.Call([]cty.Value{str}) +} + +// TextEncodeBase64 applies Base64 encoding to a string that was encoded before with a target encoding. +// +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +// +// First step is to apply the target IANA encoding (e.g. UTF-16LE). +// Strings in the Terraform language are sequences of unicode characters rather +// than bytes, so this function will first encode the characters from the string +// as UTF-8, and then apply Base64 encoding to the result. +func TextEncodeBase64(str, enc cty.Value) (cty.Value, error) { + return TextEncodeBase64Func.Call([]cty.Value{str, enc}) +} + +// TextDecodeBase64 decodes a string containing a base64 sequence whereas a specific encoding of the string is expected. +// +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +// +// Strings in the Terraform language are sequences of unicode characters rather +// than bytes, so this function will also interpret the resulting bytes as +// the target encoding. +func TextDecodeBase64(str, enc cty.Value) (cty.Value, error) { + return TextDecodeBase64Func.Call([]cty.Value{str, enc}) +} diff --git a/pkg/scanners/terraform/parser/funcs/filesystem.go b/pkg/scanners/terraform/parser/funcs/filesystem.go new file mode 100644 index 000000000000..910e17f325c6 --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/filesystem.go @@ -0,0 +1,467 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "encoding/base64" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "unicode/utf8" + + "github.com/bmatcuk/doublestar/v4" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclsyntax" + "github.com/mitchellh/go-homedir" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" +) + +// MakeFileFunc constructs a function that takes a file path and returns the +// contents of that file, either directly as a string (where valid UTF-8 is +// required) or as a string containing base64 bytes. +func MakeFileFunc(target fs.FS, baseDir string, encBase64 bool) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + path := args[0].AsString() + src, err := readFileBytes(target, baseDir, path) + if err != nil { + err = function.NewArgError(0, err) + return cty.UnknownVal(cty.String), err + } + + switch { + case encBase64: + enc := base64.StdEncoding.EncodeToString(src) + return cty.StringVal(enc), nil + default: + if !utf8.Valid(src) { + return cty.UnknownVal(cty.String), fmt.Errorf("contents of %s are not valid UTF-8; use the filebase64 function to obtain the Base64 encoded contents or the other file functions (e.g. filemd5, filesha256) to obtain file hashing results instead", path) + } + return cty.StringVal(string(src)), nil + } + }, + }) +} + +// MakeTemplateFileFunc constructs a function that takes a file path and +// an arbitrary object of named values and attempts to render the referenced +// file as a template using HCL template syntax. +// +// The template itself may recursively call other functions so a callback +// must be provided to get access to those functions. The template cannot, +// however, access any variables defined in the scope: it is restricted only to +// those variables provided in the second function argument, to ensure that all +// dependencies on other graph nodes can be seen before executing this function. +// +// As a special exception, a referenced template file may not recursively call +// the templatefile function, since that would risk the same file being +// included into itself indefinitely. +func MakeTemplateFileFunc(target fs.FS, baseDir string, funcsCb func() map[string]function.Function) function.Function { + + params := []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + { + Name: "vars", + Type: cty.DynamicPseudoType, + }, + } + + loadTmpl := func(fn string) (hcl.Expression, error) { + // We re-use File here to ensure the same filename interpretation + // as it does, along with its other safety checks. + tmplVal, err := File(target, baseDir, cty.StringVal(fn)) + if err != nil { + return nil, err + } + + expr, diags := hclsyntax.ParseTemplate([]byte(tmplVal.AsString()), fn, hcl.Pos{Line: 1, Column: 1}) + if diags.HasErrors() { + return nil, diags + } + + return expr, nil + } + + renderTmpl := func(expr hcl.Expression, varsVal cty.Value) (cty.Value, error) { + if varsTy := varsVal.Type(); !(varsTy.IsMapType() || varsTy.IsObjectType()) { + return cty.DynamicVal, function.NewArgErrorf(1, "invalid vars value: must be a map") // or an object, but we don't strongly distinguish these most of the time + } + + ctx := &hcl.EvalContext{ + Variables: varsVal.AsValueMap(), + } + + // We require all of the variables to be valid HCL identifiers, because + // otherwise there would be no way to refer to them in the template + // anyway. Rejecting this here gives better feedback to the user + // than a syntax error somewhere in the template itself. + for n := range ctx.Variables { + if !hclsyntax.ValidIdentifier(n) { + // This error message intentionally doesn't describe _all_ of + // the different permutations that are technically valid as an + // HCL identifier, but rather focuses on what we might + // consider to be an "idiomatic" variable name. + return cty.DynamicVal, function.NewArgErrorf(1, "invalid template variable name %q: must start with a letter, followed by zero or more letters, digits, and underscores", n) + } + } + + // We'll pre-check references in the template here so we can give a + // more specialized error message than HCL would by default, so it's + // clearer that this problem is coming from a templatefile call. + for _, traversal := range expr.Variables() { + root := traversal.RootName() + if _, ok := ctx.Variables[root]; !ok { + return cty.DynamicVal, function.NewArgErrorf(1, "vars map does not contain key %q, referenced at %s", root, traversal[0].SourceRange()) + } + } + + givenFuncs := funcsCb() // this callback indirection is to avoid chicken/egg problems + funcs := make(map[string]function.Function, len(givenFuncs)) + for name, fn := range givenFuncs { + if name == "templatefile" { + // We stub this one out to prevent recursive calls. + funcs[name] = function.New(&function.Spec{ + Params: params, + Type: func(args []cty.Value) (cty.Type, error) { + return cty.NilType, fmt.Errorf("cannot recursively call templatefile from inside templatefile call") + }, + }) + continue + } + funcs[name] = fn + } + ctx.Functions = funcs + + val, diags := expr.Value(ctx) + if diags.HasErrors() { + return cty.DynamicVal, diags + } + return val, nil + } + + return function.New(&function.Spec{ + Params: params, + Type: func(args []cty.Value) (cty.Type, error) { + if !(args[0].IsKnown() && args[1].IsKnown()) { + return cty.DynamicPseudoType, nil + } + + // We'll render our template now to see what result type it produces. + // A template consisting only of a single interpolation an potentially + // return any type. + expr, err := loadTmpl(args[0].AsString()) + if err != nil { + return cty.DynamicPseudoType, err + } + + // This is safe even if args[1] contains unknowns because the HCL + // template renderer itself knows how to short-circuit those. + val, err := renderTmpl(expr, args[1]) + return val.Type(), err + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + expr, err := loadTmpl(args[0].AsString()) + if err != nil { + return cty.DynamicVal, err + } + return renderTmpl(expr, args[1]) + }, + }) + +} + +// MakeFileExistsFunc constructs a function that takes a path +// and determines whether a file exists at that path +func MakeFileExistsFunc(baseDir string) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.Bool), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + path := args[0].AsString() + path, err := homedir.Expand(path) + if err != nil { + return cty.UnknownVal(cty.Bool), fmt.Errorf("failed to expand ~: %s", err) + } + + if !filepath.IsAbs(path) { + path = filepath.Join(baseDir, path) + } + + // Ensure that the path is canonical for the host OS + path = filepath.Clean(path) + + fi, err := os.Stat(path) + if err != nil { + if os.IsNotExist(err) { + return cty.False, nil + } + return cty.UnknownVal(cty.Bool), fmt.Errorf("failed to stat %s", path) + } + + if fi.Mode().IsRegular() { + return cty.True, nil + } + + return cty.False, fmt.Errorf("%s is not a regular file, but %q", + path, fi.Mode().String()) + }, + }) +} + +// MakeFileSetFunc constructs a function that takes a glob pattern +// and enumerates a file set from that pattern +func MakeFileSetFunc(baseDir string) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + { + Name: "pattern", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.Set(cty.String)), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + path := args[0].AsString() + pattern := args[1].AsString() + + if !filepath.IsAbs(path) { + path = filepath.Join(baseDir, path) + } + + // Join the path to the glob pattern, while ensuring the full + // pattern is canonical for the host OS. The joined path is + // automatically cleaned during this operation. + pattern = filepath.Join(path, pattern) + + matches, err := doublestar.Glob(os.DirFS(path), pattern) + if err != nil { + return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to glob pattern (%s): %s", pattern, err) + } + + var matchVals []cty.Value + for _, match := range matches { + fi, err := os.Stat(match) + + if err != nil { + return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to stat (%s): %s", match, err) + } + + if !fi.Mode().IsRegular() { + continue + } + + // Remove the path and file separator from matches. + match, err = filepath.Rel(path, match) + + if err != nil { + return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to trim path of match (%s): %s", match, err) + } + + // Replace any remaining file separators with forward slash (/) + // separators for cross-system compatibility. + match = filepath.ToSlash(match) + + matchVals = append(matchVals, cty.StringVal(match)) + } + + if len(matchVals) == 0 { + return cty.SetValEmpty(cty.String), nil + } + + return cty.SetVal(matchVals), nil + }, + }) +} + +// BasenameFunc constructs a function that takes a string containing a filesystem path +// and removes all except the last portion from it. +var BasenameFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(filepath.Base(args[0].AsString())), nil + }, +}) + +// DirnameFunc constructs a function that takes a string containing a filesystem path +// and removes the last portion from it. +var DirnameFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(filepath.Dir(args[0].AsString())), nil + }, +}) + +// AbsPathFunc constructs a function that converts a filesystem path to an absolute path +var AbsPathFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + absPath, err := filepath.Abs(args[0].AsString()) + return cty.StringVal(filepath.ToSlash(absPath)), err + }, +}) + +// PathExpandFunc constructs a function that expands a leading ~ character to the current user's home directory. +var PathExpandFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + + homePath, err := homedir.Expand(args[0].AsString()) + return cty.StringVal(homePath), err + }, +}) + +func openFile(target fs.FS, baseDir, path string) (fs.File, error) { + path, err := homedir.Expand(path) + if err != nil { + return nil, fmt.Errorf("failed to expand ~: %s", err) + } + + if !filepath.IsAbs(path) { + path = filepath.Join(baseDir, path) + } + + // Ensure that the path is canonical for the host OS + path = filepath.Clean(path) + + if target != nil { + return target.Open(path) + } + return os.Open(path) +} + +func readFileBytes(target fs.FS, baseDir, path string) ([]byte, error) { + f, err := openFile(target, baseDir, path) + if err != nil { + if os.IsNotExist(err) { + // An extra Terraform-specific hint for this situation + return nil, fmt.Errorf("no file exists at %s; this function works only with files that are distributed as part of the configuration source code, so if this file will be created by a resource in this configuration you must instead obtain this result from an attribute of that resource", path) + } + return nil, err + } + + src, err := io.ReadAll(f) + if err != nil { + return nil, fmt.Errorf("failed to read %s", path) + } + + return src, nil +} + +// File reads the contents of the file at the given path. +// +// The file must contain valid UTF-8 bytes, or this function will return an error. +// +// The underlying function implementation works relative to a particular base +// directory, so this wrapper takes a base directory string and uses it to +// construct the underlying function before calling it. +func File(target fs.FS, baseDir string, path cty.Value) (cty.Value, error) { + fn := MakeFileFunc(target, baseDir, false) + return fn.Call([]cty.Value{path}) +} + +// FileExists determines whether a file exists at the given path. +// +// The underlying function implementation works relative to a particular base +// directory, so this wrapper takes a base directory string and uses it to +// construct the underlying function before calling it. +func FileExists(baseDir string, path cty.Value) (cty.Value, error) { + fn := MakeFileExistsFunc(baseDir) + return fn.Call([]cty.Value{path}) +} + +// FileSet enumerates a set of files given a glob pattern +// +// The underlying function implementation works relative to a particular base +// directory, so this wrapper takes a base directory string and uses it to +// construct the underlying function before calling it. +func FileSet(baseDir string, path, pattern cty.Value) (cty.Value, error) { + fn := MakeFileSetFunc(baseDir) + return fn.Call([]cty.Value{path, pattern}) +} + +// FileBase64 reads the contents of the file at the given path. +// +// The bytes from the file are encoded as base64 before returning. +// +// The underlying function implementation works relative to a particular base +// directory, so this wrapper takes a base directory string and uses it to +// construct the underlying function before calling it. +func FileBase64(target fs.FS, baseDir string, path cty.Value) (cty.Value, error) { + fn := MakeFileFunc(target, baseDir, true) + return fn.Call([]cty.Value{path}) +} + +// Basename takes a string containing a filesystem path and removes all except the last portion from it. +// +// The underlying function implementation works only with the path string and does not access the filesystem itself. +// It is therefore unable to take into account filesystem features such as symlinks. +// +// If the path is empty then the result is ".", representing the current working directory. +func Basename(path cty.Value) (cty.Value, error) { + return BasenameFunc.Call([]cty.Value{path}) +} + +// Dirname takes a string containing a filesystem path and removes the last portion from it. +// +// The underlying function implementation works only with the path string and does not access the filesystem itself. +// It is therefore unable to take into account filesystem features such as symlinks. +// +// If the path is empty then the result is ".", representing the current working directory. +func Dirname(path cty.Value) (cty.Value, error) { + return DirnameFunc.Call([]cty.Value{path}) +} + +// Pathexpand takes a string that might begin with a `~` segment, and if so it replaces that segment with +// the current user's home directory path. +// +// The underlying function implementation works only with the path string and does not access the filesystem itself. +// It is therefore unable to take into account filesystem features such as symlinks. +// +// If the leading segment in the path is not `~` then the given path is returned unmodified. +func Pathexpand(path cty.Value) (cty.Value, error) { + return PathExpandFunc.Call([]cty.Value{path}) +} diff --git a/pkg/scanners/terraform/parser/funcs/marks.go b/pkg/scanners/terraform/parser/funcs/marks.go new file mode 100644 index 000000000000..ca368c113c5c --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/marks.go @@ -0,0 +1,44 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/marks +package funcs + +import ( + "github.com/zclconf/go-cty/cty" + "golang.org/x/text/cases" + "golang.org/x/text/language" +) + +// valueMarks allow creating strictly typed values for use as cty.Value marks. +// The variable name for new values should be the title-cased format of the +// value to better match the GoString output for debugging. +type valueMark string + +func (m valueMark) GoString() string { + return "marks." + cases.Title(language.English).String(string(m)) +} + +// Has returns true if and only if the cty.Value has the given mark. +func Has(val cty.Value, mark valueMark) bool { + return val.HasMark(mark) +} + +// Contains returns true if the cty.Value or any any value within it contains +// the given mark. +func Contains(val cty.Value, mark valueMark) bool { + ret := false + _ = cty.Walk(val, func(_ cty.Path, v cty.Value) (bool, error) { + if v.HasMark(mark) { + ret = true + return false, nil + } + return true, nil + }) + return ret +} + +// MarkedSensitive indicates that this value is marked as sensitive in the context of +// Terraform. +var MarkedSensitive = valueMark("sensitive") + +// MarkedRaw is used to indicate to the repl that the value should be written without +// any formatting. +var MarkedRaw = valueMark("raw") diff --git a/pkg/scanners/terraform/parser/funcs/number.go b/pkg/scanners/terraform/parser/funcs/number.go new file mode 100644 index 000000000000..6c8f5dc3b6d9 --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/number.go @@ -0,0 +1,170 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "math" + "math/big" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/gocty" +) + +// LogFunc constructs a function that returns the logarithm of a given number in a given base. +var LogFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "num", + Type: cty.Number, + }, + { + Name: "base", + Type: cty.Number, + }, + }, + Type: function.StaticReturnType(cty.Number), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var num float64 + if err := gocty.FromCtyValue(args[0], &num); err != nil { + return cty.UnknownVal(cty.String), err + } + + var base float64 + if err := gocty.FromCtyValue(args[1], &base); err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.NumberFloatVal(math.Log(num) / math.Log(base)), nil + }, +}) + +// PowFunc constructs a function that returns the logarithm of a given number in a given base. +var PowFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "num", + Type: cty.Number, + }, + { + Name: "power", + Type: cty.Number, + }, + }, + Type: function.StaticReturnType(cty.Number), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var num float64 + if err := gocty.FromCtyValue(args[0], &num); err != nil { + return cty.UnknownVal(cty.String), err + } + + var power float64 + if err := gocty.FromCtyValue(args[1], &power); err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.NumberFloatVal(math.Pow(num, power)), nil + }, +}) + +// SignumFunc constructs a function that returns the closest whole number greater +// than or equal to the given value. +var SignumFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "num", + Type: cty.Number, + }, + }, + Type: function.StaticReturnType(cty.Number), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var num int + if err := gocty.FromCtyValue(args[0], &num); err != nil { + return cty.UnknownVal(cty.String), err + } + switch { + case num < 0: + return cty.NumberIntVal(-1), nil + case num > 0: + return cty.NumberIntVal(+1), nil + default: + return cty.NumberIntVal(0), nil + } + }, +}) + +// ParseIntFunc constructs a function that parses a string argument and returns an integer of the specified base. +var ParseIntFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "number", + Type: cty.DynamicPseudoType, + }, + { + Name: "base", + Type: cty.Number, + }, + }, + + Type: func(args []cty.Value) (cty.Type, error) { + if !args[0].Type().Equals(cty.String) { + return cty.Number, function.NewArgErrorf(0, "first argument must be a string, not %s", args[0].Type().FriendlyName()) + } + return cty.Number, nil + }, + + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + var numstr string + var base int + var err error + + if err = gocty.FromCtyValue(args[0], &numstr); err != nil { + return cty.UnknownVal(cty.String), function.NewArgError(0, err) + } + + if err = gocty.FromCtyValue(args[1], &base); err != nil { + return cty.UnknownVal(cty.Number), function.NewArgError(1, err) + } + + if base < 2 || base > 62 { + return cty.UnknownVal(cty.Number), function.NewArgErrorf( + 1, + "base must be a whole number between 2 and 62 inclusive", + ) + } + + num, ok := (&big.Int{}).SetString(numstr, base) + if !ok { + return cty.UnknownVal(cty.Number), function.NewArgErrorf( + 0, + "cannot parse %q as a base %d integer", + numstr, + base, + ) + } + + parsedNum := cty.NumberVal((&big.Float{}).SetInt(num)) + + return parsedNum, nil + }, +}) + +// Log returns returns the logarithm of a given number in a given base. +func Log(num, base cty.Value) (cty.Value, error) { + return LogFunc.Call([]cty.Value{num, base}) +} + +// Pow returns the logarithm of a given number in a given base. +func Pow(num, power cty.Value) (cty.Value, error) { + return PowFunc.Call([]cty.Value{num, power}) +} + +// Signum determines the sign of a number, returning a number between -1 and +// 1 to represent the sign. +func Signum(num cty.Value) (cty.Value, error) { + return SignumFunc.Call([]cty.Value{num}) +} + +// ParseInt parses a string argument and returns an integer of the specified base. +func ParseInt(num cty.Value, base cty.Value) (cty.Value, error) { + return ParseIntFunc.Call([]cty.Value{num, base}) +} diff --git a/pkg/scanners/terraform/parser/funcs/sensitive.go b/pkg/scanners/terraform/parser/funcs/sensitive.go new file mode 100644 index 000000000000..c67ed13e6e7b --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/sensitive.go @@ -0,0 +1,67 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" +) + +// SensitiveFunc returns a value identical to its argument except that +// Terraform will consider it to be sensitive. +var SensitiveFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "value", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowNull: true, + AllowMarked: true, + AllowDynamicType: true, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + // This function only affects the value's marks, so the result + // type is always the same as the argument type. + return args[0].Type(), nil + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + val, _ := args[0].Unmark() + return val.Mark(MarkedSensitive), nil + }, +}) + +// NonsensitiveFunc takes a sensitive value and returns the same value without +// the sensitive marking, effectively exposing the value. +var NonsensitiveFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "value", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowNull: true, + AllowMarked: true, + AllowDynamicType: true, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + // This function only affects the value's marks, so the result + // type is always the same as the argument type. + return args[0].Type(), nil + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + if args[0].IsKnown() && !args[0].HasMark(MarkedSensitive) { + return cty.DynamicVal, function.NewArgErrorf(0, "the given value is not sensitive, so this call is redundant") + } + v, m := args[0].Unmark() + delete(m, MarkedSensitive) // remove the sensitive marking + return v.WithMarks(m), nil + }, +}) + +func Sensitive(v cty.Value) (cty.Value, error) { + return SensitiveFunc.Call([]cty.Value{v}) +} + +func Nonsensitive(v cty.Value) (cty.Value, error) { + return NonsensitiveFunc.Call([]cty.Value{v}) +} diff --git a/pkg/scanners/terraform/parser/funcs/string.go b/pkg/scanners/terraform/parser/funcs/string.go new file mode 100644 index 000000000000..49696784e872 --- /dev/null +++ b/pkg/scanners/terraform/parser/funcs/string.go @@ -0,0 +1,54 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "regexp" + "strings" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" +) + +// ReplaceFunc constructs a function that searches a given string for another +// given substring, and replaces each occurrence with a given replacement string. +var ReplaceFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + { + Name: "substr", + Type: cty.String, + }, + { + Name: "replace", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + str := args[0].AsString() + substr := args[1].AsString() + replace := args[2].AsString() + + // We search/replace using a regexp if the string is surrounded + // in forward slashes. + if len(substr) > 1 && substr[0] == '/' && substr[len(substr)-1] == '/' { + re, err := regexp.Compile(substr[1 : len(substr)-1]) + if err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.StringVal(re.ReplaceAllString(str, replace)), nil + } + + return cty.StringVal(strings.Replace(str, substr, replace, -1)), nil + }, +}) + +// Replace searches a given string for another given substring, +// and replaces all occurrences with a given replacement string. +func Replace(str, substr, replace cty.Value) (cty.Value, error) { + return ReplaceFunc.Call([]cty.Value{str, substr, replace}) +} diff --git a/pkg/scanners/terraform/parser/functions.go b/pkg/scanners/terraform/parser/functions.go new file mode 100644 index 000000000000..72cb74e0246b --- /dev/null +++ b/pkg/scanners/terraform/parser/functions.go @@ -0,0 +1,123 @@ +package parser + +import ( + "io/fs" + + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/funcs" + "github.com/hashicorp/hcl/v2/ext/tryfunc" + ctyyaml "github.com/zclconf/go-cty-yaml" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/function/stdlib" +) + +// Functions returns the set of functions that should be used to when evaluating +// expressions in the receiving scope. +func Functions(target fs.FS, baseDir string) map[string]function.Function { + return map[string]function.Function{ + "abs": stdlib.AbsoluteFunc, + "abspath": funcs.AbsPathFunc, + "basename": funcs.BasenameFunc, + "base64decode": funcs.Base64DecodeFunc, + "base64encode": funcs.Base64EncodeFunc, + "base64gzip": funcs.Base64GzipFunc, + "base64sha256": funcs.Base64Sha256Func, + "base64sha512": funcs.Base64Sha512Func, + "bcrypt": funcs.BcryptFunc, + "can": tryfunc.CanFunc, + "ceil": stdlib.CeilFunc, + "chomp": stdlib.ChompFunc, + "cidrhost": funcs.CidrHostFunc, + "cidrnetmask": funcs.CidrNetmaskFunc, + "cidrsubnet": funcs.CidrSubnetFunc, + "cidrsubnets": funcs.CidrSubnetsFunc, + "coalesce": funcs.CoalesceFunc, + "coalescelist": stdlib.CoalesceListFunc, + "compact": stdlib.CompactFunc, + "concat": stdlib.ConcatFunc, + "contains": stdlib.ContainsFunc, + "csvdecode": stdlib.CSVDecodeFunc, + "dirname": funcs.DirnameFunc, + "distinct": stdlib.DistinctFunc, + "element": stdlib.ElementFunc, + "chunklist": stdlib.ChunklistFunc, + "file": funcs.MakeFileFunc(target, baseDir, false), + "fileexists": funcs.MakeFileExistsFunc(baseDir), + "fileset": funcs.MakeFileSetFunc(baseDir), + "filebase64": funcs.MakeFileFunc(target, baseDir, true), + "filebase64sha256": funcs.MakeFileBase64Sha256Func(target, baseDir), + "filebase64sha512": funcs.MakeFileBase64Sha512Func(target, baseDir), + "filemd5": funcs.MakeFileMd5Func(target, baseDir), + "filesha1": funcs.MakeFileSha1Func(target, baseDir), + "filesha256": funcs.MakeFileSha256Func(target, baseDir), + "filesha512": funcs.MakeFileSha512Func(target, baseDir), + "flatten": stdlib.FlattenFunc, + "floor": stdlib.FloorFunc, + "format": stdlib.FormatFunc, + "formatdate": stdlib.FormatDateFunc, + "formatlist": stdlib.FormatListFunc, + "indent": stdlib.IndentFunc, + "index": funcs.IndexFunc, // stdlib.IndexFunc is not compatible + "join": stdlib.JoinFunc, + "jsondecode": stdlib.JSONDecodeFunc, + "jsonencode": stdlib.JSONEncodeFunc, + "keys": stdlib.KeysFunc, + "length": funcs.LengthFunc, + "list": funcs.ListFunc, + "log": stdlib.LogFunc, + "lookup": funcs.LookupFunc, + "lower": stdlib.LowerFunc, + "map": funcs.MapFunc, + "matchkeys": funcs.MatchkeysFunc, + "max": stdlib.MaxFunc, + "md5": funcs.Md5Func, + "merge": stdlib.MergeFunc, + "min": stdlib.MinFunc, + "parseint": stdlib.ParseIntFunc, + "pathexpand": funcs.PathExpandFunc, + "pow": stdlib.PowFunc, + "range": stdlib.RangeFunc, + "regex": stdlib.RegexFunc, + "regexall": stdlib.RegexAllFunc, + "replace": funcs.ReplaceFunc, + "reverse": stdlib.ReverseListFunc, + "rsadecrypt": funcs.RsaDecryptFunc, + "setintersection": stdlib.SetIntersectionFunc, + "setproduct": stdlib.SetProductFunc, + "setsubtract": stdlib.SetSubtractFunc, + "setunion": stdlib.SetUnionFunc, + "sha1": funcs.Sha1Func, + "sha256": funcs.Sha256Func, + "sha512": funcs.Sha512Func, + "signum": stdlib.SignumFunc, + "slice": stdlib.SliceFunc, + "sort": stdlib.SortFunc, + "split": stdlib.SplitFunc, + "strrev": stdlib.ReverseFunc, + "substr": stdlib.SubstrFunc, + "timestamp": funcs.TimestampFunc, + "timeadd": stdlib.TimeAddFunc, + "title": stdlib.TitleFunc, + "tostring": funcs.MakeToFunc(cty.String), + "tonumber": funcs.MakeToFunc(cty.Number), + "tobool": funcs.MakeToFunc(cty.Bool), + "toset": funcs.MakeToFunc(cty.Set(cty.DynamicPseudoType)), + "tolist": funcs.MakeToFunc(cty.List(cty.DynamicPseudoType)), + "tomap": funcs.MakeToFunc(cty.Map(cty.DynamicPseudoType)), + "transpose": funcs.TransposeFunc, + "trim": stdlib.TrimFunc, + "trimprefix": stdlib.TrimPrefixFunc, + "trimspace": stdlib.TrimSpaceFunc, + "trimsuffix": stdlib.TrimSuffixFunc, + "try": tryfunc.TryFunc, + "upper": stdlib.UpperFunc, + "urlencode": funcs.URLEncodeFunc, + "uuid": funcs.UUIDFunc, + "uuidv5": funcs.UUIDV5Func, + "values": stdlib.ValuesFunc, + "yamldecode": ctyyaml.YAMLDecodeFunc, + "yamlencode": ctyyaml.YAMLEncodeFunc, + "zipmap": stdlib.ZipmapFunc, + } + +} diff --git a/pkg/scanners/terraform/parser/load_blocks.go b/pkg/scanners/terraform/parser/load_blocks.go new file mode 100644 index 000000000000..88bd9de47f3b --- /dev/null +++ b/pkg/scanners/terraform/parser/load_blocks.go @@ -0,0 +1,130 @@ +package parser + +import ( + "fmt" + "regexp" + "strings" + "time" + + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/hashicorp/hcl/v2" +) + +func loadBlocksFromFile(file sourceFile, moduleSource string) (hcl.Blocks, []terraform.Ignore, error) { + ignores := parseIgnores(file.file.Bytes, file.path, moduleSource) + contents, diagnostics := file.file.Body.Content(terraform.Schema) + if diagnostics != nil && diagnostics.HasErrors() { + return nil, nil, diagnostics + } + if contents == nil { + return nil, nil, nil + } + return contents.Blocks, ignores, nil +} + +func parseIgnores(data []byte, path string, moduleSource string) []terraform.Ignore { + var ignores []terraform.Ignore + for i, line := range strings.Split(string(data), "\n") { + line = strings.TrimSpace(line) + lineIgnores := parseIgnoresFromLine(line) + for _, lineIgnore := range lineIgnores { + lineIgnore.Range = types.NewRange(path, i+1, i+1, moduleSource, nil) + ignores = append(ignores, lineIgnore) + } + } + for a, ignoreA := range ignores { + if !ignoreA.Block { + continue + } + for _, ignoreB := range ignores { + if !ignoreB.Block { + continue + } + if ignoreA.Range.GetStartLine()+1 == ignoreB.Range.GetStartLine() { + ignoreA.Range = ignoreB.Range + ignores[a] = ignoreA + } + } + } + return ignores + +} + +var commentPattern = regexp.MustCompile(`^\s*([/]+|/\*|#)+\s*tfsec:`) +var trivyCommentPattern = regexp.MustCompile(`^\s*([/]+|/\*|#)+\s*trivy:`) + +func parseIgnoresFromLine(input string) []terraform.Ignore { + + var ignores []terraform.Ignore + + input = commentPattern.ReplaceAllString(input, "tfsec:") + input = trivyCommentPattern.ReplaceAllString(input, "trivy:") + + bits := strings.Split(strings.TrimSpace(input), " ") + for i, bit := range bits { + bit := strings.TrimSpace(bit) + bit = strings.TrimPrefix(bit, "#") + bit = strings.TrimPrefix(bit, "//") + bit = strings.TrimPrefix(bit, "/*") + + if strings.HasPrefix(bit, "tfsec:") || strings.HasPrefix(bit, "trivy:") { + ignore, err := parseIgnoreFromComment(bit) + if err != nil { + continue + } + ignore.Block = i == 0 + ignores = append(ignores, *ignore) + } + } + + return ignores +} + +func parseIgnoreFromComment(input string) (*terraform.Ignore, error) { + var ignore terraform.Ignore + if !strings.HasPrefix(input, "tfsec:") && !strings.HasPrefix(input, "trivy:") { + return nil, fmt.Errorf("invalid ignore") + } + + input = input[6:] + + segments := strings.Split(input, ":") + + for i := 0; i < len(segments)-1; i += 2 { + key := segments[i] + val := segments[i+1] + switch key { + case "ignore": + ignore.RuleID, ignore.Params = parseIDWithParams(val) + case "exp": + parsed, err := time.Parse("2006-01-02", val) + if err != nil { + return &ignore, err + } + ignore.Expiry = &parsed + case "ws": + ignore.Workspace = val + } + } + + return &ignore, nil +} + +func parseIDWithParams(input string) (string, map[string]string) { + params := make(map[string]string) + if !strings.Contains(input, "[") { + return input, params + } + parts := strings.Split(input, "[") + id := parts[0] + paramStr := strings.TrimSuffix(parts[1], "]") + for _, pair := range strings.Split(paramStr, ",") { + parts := strings.Split(pair, "=") + if len(parts) != 2 { + continue + } + params[parts[0]] = parts[1] + } + return id, params +} diff --git a/pkg/scanners/terraform/parser/load_blocks_test.go b/pkg/scanners/terraform/parser/load_blocks_test.go new file mode 100644 index 000000000000..e32d19a75044 --- /dev/null +++ b/pkg/scanners/terraform/parser/load_blocks_test.go @@ -0,0 +1,13 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParsingDoubleComment(t *testing.T) { + ignores := parseIgnoresFromLine("## tfsec:ignore:abc") + assert.Equal(t, 1, len(ignores)) + assert.Truef(t, ignores[0].Block, "Expected ignore to be a block") +} diff --git a/pkg/scanners/terraform/parser/load_module.go b/pkg/scanners/terraform/parser/load_module.go new file mode 100644 index 000000000000..c8d0141bfd01 --- /dev/null +++ b/pkg/scanners/terraform/parser/load_module.go @@ -0,0 +1,183 @@ +package parser + +import ( + "context" + "errors" + "fmt" + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/resolvers" + + "github.com/zclconf/go-cty/cty" +) + +type moduleLoadError struct { + source string + err error +} + +func (m *moduleLoadError) Error() string { + return fmt.Sprintf("failed to load module '%s': %s", m.source, m.err) +} + +type ModuleDefinition struct { + Name string + Path string + FileSystem fs.FS + Definition *terraform.Block + Parser *Parser + External bool +} + +// LoadModules reads all module blocks and loads the underlying modules, adding blocks to e.moduleBlocks +func (e *evaluator) loadModules(ctx context.Context) []*ModuleDefinition { + + blocks := e.blocks + + var moduleDefinitions []*ModuleDefinition + + expanded := e.expandBlocks(blocks.OfType("module")) + + var loadErrors []*moduleLoadError + + for _, moduleBlock := range expanded { + if moduleBlock.Label() == "" { + continue + } + moduleDefinition, err := e.loadModule(ctx, moduleBlock) + if err != nil { + var loadErr *moduleLoadError + if errors.As(err, &loadErr) { + var found bool + for _, fm := range loadErrors { + if fm.source == loadErr.source { + found = true + break + } + } + if !found { + loadErrors = append(loadErrors, loadErr) + } + continue + } + e.debug.Log("Failed to load module '%s'. Maybe try 'terraform init'?", err) + continue + } + e.debug.Log("Loaded module '%s' from '%s'.", moduleDefinition.Name, moduleDefinition.Path) + moduleDefinitions = append(moduleDefinitions, moduleDefinition) + } + + return moduleDefinitions +} + +// takes in a module "x" {} block and loads resources etc. into e.moduleBlocks - additionally returns variables to add to ["module.x.*"] variables +func (e *evaluator) loadModule(ctx context.Context, b *terraform.Block) (*ModuleDefinition, error) { + + metadata := b.GetMetadata() + + if b.Label() == "" { + return nil, fmt.Errorf("module without label at %s", metadata.Range()) + } + + var source string + attrs := b.Attributes() + for _, attr := range attrs { + if attr.Name() == "source" { + sourceVal := attr.Value() + if sourceVal.Type() == cty.String { + source = sourceVal.AsString() + } + } + } + if source == "" { + return nil, fmt.Errorf("could not read module source attribute at %s", metadata.Range().String()) + } + + if def, err := e.loadModuleFromTerraformCache(ctx, b, source); err == nil { + e.debug.Log("found module '%s' in .terraform/modules", source) + return def, nil + } + + // we don't have the module installed via 'terraform init' so we need to grab it... + return e.loadExternalModule(ctx, b, source) +} + +func (e *evaluator) loadModuleFromTerraformCache(ctx context.Context, b *terraform.Block, source string) (*ModuleDefinition, error) { + var modulePath string + if e.moduleMetadata != nil { + // if we have module metadata we can parse all the modules as they'll be cached locally! + name := b.ModuleName() + for _, module := range e.moduleMetadata.Modules { + if module.Key == name { + modulePath = filepath.Clean(filepath.Join(e.projectRootPath, module.Dir)) + break + } + } + } + if modulePath == "" { + return nil, fmt.Errorf("failed to load module from .terraform/modules") + } + if strings.HasPrefix(source, ".") { + source = "" + } + + if prefix, relativeDir, ok := strings.Cut(source, "//"); ok && !strings.HasSuffix(prefix, ":") && strings.Count(prefix, "/") == 2 { + if !strings.HasSuffix(modulePath, relativeDir) { + modulePath = fmt.Sprintf("%s/%s", modulePath, relativeDir) + } + } + + e.debug.Log("Module '%s' resolved to path '%s' in filesystem '%s' using modules.json", b.FullName(), modulePath, e.filesystem) + moduleParser := e.parentParser.newModuleParser(e.filesystem, source, modulePath, b.Label(), b) + if err := moduleParser.ParseFS(ctx, modulePath); err != nil { + return nil, err + } + return &ModuleDefinition{ + Name: b.Label(), + Path: modulePath, + Definition: b, + Parser: moduleParser, + FileSystem: e.filesystem, + }, nil +} + +func (e *evaluator) loadExternalModule(ctx context.Context, b *terraform.Block, source string) (*ModuleDefinition, error) { + + e.debug.Log("locating non-initialised module '%s'...", source) + + version := b.GetAttribute("version").AsStringValueOrDefault("", b).Value() + opt := resolvers.Options{ + Source: source, + OriginalSource: source, + Version: version, + OriginalVersion: version, + WorkingDir: e.projectRootPath, + Name: b.FullName(), + ModulePath: e.modulePath, + DebugLogger: e.debug.Extend("resolver"), + AllowDownloads: e.allowDownloads, + SkipCache: e.skipCachedModules, + } + + filesystem, prefix, path, err := resolveModule(ctx, e.filesystem, opt) + if err != nil { + return nil, err + } + prefix = filepath.Join(e.parentParser.moduleSource, prefix) + e.debug.Log("Module '%s' resolved to path '%s' in filesystem '%s' with prefix '%s'", b.FullName(), path, filesystem, prefix) + moduleParser := e.parentParser.newModuleParser(filesystem, prefix, path, b.Label(), b) + if err := moduleParser.ParseFS(ctx, path); err != nil { + return nil, err + } + return &ModuleDefinition{ + Name: b.Label(), + Path: path, + Definition: b, + Parser: moduleParser, + FileSystem: filesystem, + External: true, + }, nil +} diff --git a/pkg/scanners/terraform/parser/load_module_metadata.go b/pkg/scanners/terraform/parser/load_module_metadata.go new file mode 100644 index 000000000000..9d06402a76fc --- /dev/null +++ b/pkg/scanners/terraform/parser/load_module_metadata.go @@ -0,0 +1,33 @@ +package parser + +import ( + "encoding/json" + "io/fs" + "path/filepath" +) + +type modulesMetadata struct { + Modules []struct { + Key string `json:"Key"` + Source string `json:"Source"` + Version string `json:"Version"` + Dir string `json:"Dir"` + } `json:"Modules"` +} + +func loadModuleMetadata(target fs.FS, fullPath string) (*modulesMetadata, string, error) { + metadataPath := filepath.Join(fullPath, ".terraform/modules/modules.json") + + f, err := target.Open(metadataPath) + if err != nil { + return nil, metadataPath, err + } + defer func() { _ = f.Close() }() + + var metadata modulesMetadata + if err := json.NewDecoder(f).Decode(&metadata); err != nil { + return nil, metadataPath, err + } + + return &metadata, metadataPath, nil +} diff --git a/pkg/scanners/terraform/parser/load_vars.go b/pkg/scanners/terraform/parser/load_vars.go new file mode 100644 index 000000000000..58f67ce93910 --- /dev/null +++ b/pkg/scanners/terraform/parser/load_vars.go @@ -0,0 +1,83 @@ +package parser + +import ( + "fmt" + "io/fs" + "os" + "path/filepath" + "strings" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclsyntax" + hcljson "github.com/hashicorp/hcl/v2/json" + "github.com/zclconf/go-cty/cty" +) + +func loadTFVars(srcFS fs.FS, filenames []string) (map[string]cty.Value, error) { + combinedVars := make(map[string]cty.Value) + + for _, env := range os.Environ() { + split := strings.Split(env, "=") + key := split[0] + if !strings.HasPrefix(key, "TF_VAR_") { + continue + } + key = strings.TrimPrefix(key, "TF_VAR_") + var val string + if len(split) > 1 { + val = split[1] + } + combinedVars[key] = cty.StringVal(val) + } + + for _, filename := range filenames { + vars, err := loadTFVarsFile(srcFS, filename) + if err != nil { + return nil, fmt.Errorf("failed to load tfvars from %s: %w", filename, err) + } + for k, v := range vars { + combinedVars[k] = v + } + } + + return combinedVars, nil +} + +func loadTFVarsFile(srcFS fs.FS, filename string) (map[string]cty.Value, error) { + inputVars := make(map[string]cty.Value) + if filename == "" { + return inputVars, nil + } + + src, err := fs.ReadFile(srcFS, filepath.ToSlash(filename)) + if err != nil { + return nil, err + } + + var attrs hcl.Attributes + if strings.HasSuffix(filename, ".json") { + variableFile, err := hcljson.Parse(src, filename) + if err != nil { + return nil, err + } + attrs, err = variableFile.Body.JustAttributes() + if err != nil { + return nil, err + } + } else { + variableFile, err := hclsyntax.ParseConfig(src, filename, hcl.Pos{Line: 1, Column: 1}) + if err != nil { + return nil, err + } + attrs, err = variableFile.Body.JustAttributes() + if err != nil { + return nil, err + } + } + + for _, attr := range attrs { + inputVars[attr.Name], _ = attr.Expr.Value(&hcl.EvalContext{}) + } + + return inputVars, nil +} diff --git a/pkg/scanners/terraform/parser/load_vars_test.go b/pkg/scanners/terraform/parser/load_vars_test.go new file mode 100644 index 000000000000..28d0dcbada67 --- /dev/null +++ b/pkg/scanners/terraform/parser/load_vars_test.go @@ -0,0 +1,46 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/trivy-iac/test/testutil" + + "github.com/zclconf/go-cty/cty" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_TFVarsFile(t *testing.T) { + t.Run("tfvars file", func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "test.tfvars": `instance_type = "t2.large"`, + }) + + vars, err := loadTFVars(fs, []string{"test.tfvars"}) + require.NoError(t, err) + assert.Equal(t, "t2.large", vars["instance_type"].AsString()) + }) + + t.Run("tfvars json file", func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "test.tfvars.json": `{ + "variable": { + "foo": { + "default": "bar" + }, + "baz": "qux" + }, + "foo2": true, + "foo3": 3 +}`, + }) + + vars, err := loadTFVars(fs, []string{"test.tfvars.json"}) + require.NoError(t, err) + assert.Equal(t, "bar", vars["variable"].GetAttr("foo").GetAttr("default").AsString()) + assert.Equal(t, "qux", vars["variable"].GetAttr("baz").AsString()) + assert.Equal(t, true, vars["foo2"].True()) + assert.Equal(t, true, vars["foo3"].Equals(cty.NumberIntVal(3)).True()) + }) +} diff --git a/pkg/scanners/terraform/parser/module_retrieval.go b/pkg/scanners/terraform/parser/module_retrieval.go new file mode 100644 index 000000000000..66127715f513 --- /dev/null +++ b/pkg/scanners/terraform/parser/module_retrieval.go @@ -0,0 +1,33 @@ +package parser + +import ( + "context" + "fmt" + "io/fs" + + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/resolvers" +) + +type ModuleResolver interface { + Resolve(context.Context, fs.FS, resolvers.Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) +} + +var defaultResolvers = []ModuleResolver{ + resolvers.Cache, + resolvers.Local, + resolvers.Remote, + resolvers.Registry, +} + +func resolveModule(ctx context.Context, current fs.FS, opt resolvers.Options) (filesystem fs.FS, sourcePrefix string, downloadPath string, err error) { + opt.Debug("Resolving module '%s' with source: '%s'...", opt.Name, opt.Source) + for _, resolver := range defaultResolvers { + if filesystem, prefix, path, applies, err := resolver.Resolve(ctx, current, opt); err != nil { + return nil, "", "", err + } else if applies { + opt.Debug("Module path is %s", path) + return filesystem, prefix, path, nil + } + } + return nil, "", "", fmt.Errorf("failed to resolve module '%s' with source: %s", opt.Name, opt.Source) +} diff --git a/pkg/scanners/terraform/parser/option.go b/pkg/scanners/terraform/parser/option.go new file mode 100644 index 000000000000..a37e20da1888 --- /dev/null +++ b/pkg/scanners/terraform/parser/option.go @@ -0,0 +1,67 @@ +package parser + +import ( + "io/fs" + + "github.com/aquasecurity/defsec/pkg/scanners/options" +) + +type ConfigurableTerraformParser interface { + options.ConfigurableParser + SetTFVarsPaths(...string) + SetStopOnHCLError(bool) + SetWorkspaceName(string) + SetAllowDownloads(bool) + SetSkipCachedModules(bool) + SetConfigsFS(fsys fs.FS) +} + +type Option func(p ConfigurableTerraformParser) + +func OptionWithTFVarsPaths(paths ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if tf, ok := p.(ConfigurableTerraformParser); ok { + tf.SetTFVarsPaths(paths...) + } + } +} + +func OptionStopOnHCLError(stop bool) options.ParserOption { + return func(p options.ConfigurableParser) { + if tf, ok := p.(ConfigurableTerraformParser); ok { + tf.SetStopOnHCLError(stop) + } + } +} + +func OptionWithWorkspaceName(workspaceName string) options.ParserOption { + return func(p options.ConfigurableParser) { + if tf, ok := p.(ConfigurableTerraformParser); ok { + tf.SetWorkspaceName(workspaceName) + } + } +} + +func OptionWithDownloads(allowed bool) options.ParserOption { + return func(p options.ConfigurableParser) { + if tf, ok := p.(ConfigurableTerraformParser); ok { + tf.SetAllowDownloads(allowed) + } + } +} + +func OptionWithSkipCachedModules(b bool) options.ParserOption { + return func(p options.ConfigurableParser) { + if tf, ok := p.(ConfigurableTerraformParser); ok { + tf.SetSkipCachedModules(b) + } + } +} + +func OptionWithConfigsFS(fsys fs.FS) options.ParserOption { + return func(s options.ConfigurableParser) { + if p, ok := s.(ConfigurableTerraformParser); ok { + p.SetConfigsFS(fsys) + } + } +} diff --git a/pkg/scanners/terraform/parser/parser.go b/pkg/scanners/terraform/parser/parser.go new file mode 100644 index 000000000000..2521fb5dbcb6 --- /dev/null +++ b/pkg/scanners/terraform/parser/parser.go @@ -0,0 +1,349 @@ +package parser + +import ( + "context" + "io" + "io/fs" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/terraform" + tfcontext "github.com/aquasecurity/defsec/pkg/terraform/context" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" + "github.com/zclconf/go-cty/cty" + + "github.com/aquasecurity/trivy/pkg/extrafs" +) + +type sourceFile struct { + file *hcl.File + path string +} + +type Metrics struct { + Timings struct { + DiskIODuration time.Duration + ParseDuration time.Duration + } + Counts struct { + Blocks int + Modules int + ModuleDownloads int + Files int + } +} + +var _ ConfigurableTerraformParser = (*Parser)(nil) + +// Parser is a tool for parsing terraform templates at a given file system location +type Parser struct { + projectRoot string + moduleName string + modulePath string + moduleSource string + moduleFS fs.FS + moduleBlock *terraform.Block + files []sourceFile + tfvarsPaths []string + stopOnHCLError bool + workspaceName string + underlying *hclparse.Parser + children []*Parser + metrics Metrics + options []options.ParserOption + debug debug.Logger + allowDownloads bool + skipCachedModules bool + fsMap map[string]fs.FS + skipRequired bool + configsFS fs.FS +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "terraform", "parser", "<"+p.moduleName+">") +} + +func (p *Parser) SetTFVarsPaths(s ...string) { + p.tfvarsPaths = s +} + +func (p *Parser) SetStopOnHCLError(b bool) { + p.stopOnHCLError = b +} + +func (p *Parser) SetWorkspaceName(s string) { + p.workspaceName = s +} + +func (p *Parser) SetAllowDownloads(b bool) { + p.allowDownloads = b +} + +func (p *Parser) SetSkipCachedModules(b bool) { + p.skipCachedModules = b +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +func (p *Parser) SetConfigsFS(fsys fs.FS) { + p.configsFS = fsys +} + +// New creates a new Parser +func New(moduleFS fs.FS, moduleSource string, opts ...options.ParserOption) *Parser { + p := &Parser{ + workspaceName: "default", + underlying: hclparse.NewParser(), + options: opts, + moduleName: "root", + allowDownloads: true, + moduleFS: moduleFS, + moduleSource: moduleSource, + configsFS: moduleFS, + } + + for _, option := range opts { + option(p) + } + + return p +} + +func (p *Parser) newModuleParser(moduleFS fs.FS, moduleSource, modulePath, moduleName string, moduleBlock *terraform.Block) *Parser { + mp := New(moduleFS, moduleSource) + mp.modulePath = modulePath + mp.moduleBlock = moduleBlock + mp.moduleName = moduleName + mp.projectRoot = p.projectRoot + p.children = append(p.children, mp) + for _, option := range p.options { + option(mp) + } + return mp +} + +func (p *Parser) Metrics() Metrics { + total := p.metrics + for _, child := range p.children { + metrics := child.Metrics() + total.Counts.Files += metrics.Counts.Files + total.Counts.Blocks += metrics.Counts.Blocks + total.Timings.ParseDuration += metrics.Timings.ParseDuration + total.Timings.DiskIODuration += metrics.Timings.DiskIODuration + // NOTE: we don't add module count - this has already propagated to the top level + } + return total +} + +func (p *Parser) ParseFile(_ context.Context, fullPath string) error { + diskStart := time.Now() + + isJSON := strings.HasSuffix(fullPath, ".tf.json") + isHCL := strings.HasSuffix(fullPath, ".tf") + if !isJSON && !isHCL { + return nil + } + + p.debug.Log("Parsing '%s'...", fullPath) + f, err := p.moduleFS.Open(filepath.ToSlash(fullPath)) + if err != nil { + return err + } + defer func() { _ = f.Close() }() + + data, err := io.ReadAll(f) + if err != nil { + return err + } + p.metrics.Timings.DiskIODuration += time.Since(diskStart) + if dir := filepath.Dir(fullPath); p.projectRoot == "" { + p.debug.Log("Setting project/module root to '%s'", dir) + p.projectRoot = dir + p.modulePath = dir + } + + start := time.Now() + var file *hcl.File + var diag hcl.Diagnostics + + if isHCL { + file, diag = p.underlying.ParseHCL(data, fullPath) + } else { + file, diag = p.underlying.ParseJSON(data, fullPath) + } + if diag != nil && diag.HasErrors() { + return diag + } + p.files = append(p.files, sourceFile{ + file: file, + path: fullPath, + }) + p.metrics.Counts.Files++ + p.metrics.Timings.ParseDuration += time.Since(start) + p.debug.Log("Added file %s.", fullPath) + return nil +} + +// ParseFS parses a root module, where it exists at the root of the provided filesystem +func (p *Parser) ParseFS(ctx context.Context, dir string) error { + + dir = filepath.Clean(dir) + + if p.projectRoot == "" { + p.debug.Log("Setting project/module root to '%s'", dir) + p.projectRoot = dir + p.modulePath = dir + } + + slashed := filepath.ToSlash(dir) + p.debug.Log("Parsing FS from '%s'", slashed) + fileInfos, err := fs.ReadDir(p.moduleFS, slashed) + if err != nil { + return err + } + + var paths []string + for _, info := range fileInfos { + realPath := filepath.Join(dir, info.Name()) + if info.Type()&os.ModeSymlink != 0 { + extra, ok := p.moduleFS.(extrafs.FS) + if !ok { + // we can't handle symlinks in this fs type for now + p.debug.Log("Cannot resolve symlink '%s' in '%s' for this fs type", info.Name(), dir) + continue + } + realPath, err = extra.ResolveSymlink(info.Name(), dir) + if err != nil { + p.debug.Log("Failed to resolve symlink '%s' in '%s': %s", info.Name(), dir, err) + continue + } + info, err := extra.Stat(realPath) + if err != nil { + p.debug.Log("Failed to stat resolved symlink '%s': %s", realPath, err) + continue + } + if info.IsDir() { + continue + } + p.debug.Log("Resolved symlink '%s' in '%s' to '%s'", info.Name(), dir, realPath) + } else if info.IsDir() { + continue + } + paths = append(paths, realPath) + } + sort.Strings(paths) + for _, path := range paths { + if err := p.ParseFile(ctx, path); err != nil { + if p.stopOnHCLError { + return err + } + p.debug.Log("error parsing '%s': %s", path, err) + continue + } + } + + return nil +} + +func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, error) { + + p.debug.Log("Evaluating module...") + + if len(p.files) == 0 { + p.debug.Log("No files found, nothing to do.") + return nil, cty.NilVal, nil + } + + blocks, ignores, err := p.readBlocks(p.files) + if err != nil { + return nil, cty.NilVal, err + } + p.debug.Log("Read %d block(s) and %d ignore(s) for module '%s' (%d file[s])...", len(blocks), len(ignores), p.moduleName, len(p.files)) + + p.metrics.Counts.Blocks = len(blocks) + + var inputVars map[string]cty.Value + if p.moduleBlock != nil { + inputVars = p.moduleBlock.Values().AsValueMap() + p.debug.Log("Added %d input variables from module definition.", len(inputVars)) + } else { + inputVars, err = loadTFVars(p.configsFS, p.tfvarsPaths) + if err != nil { + return nil, cty.NilVal, err + } + p.debug.Log("Added %d variables from tfvars.", len(inputVars)) + } + + modulesMetadata, metadataPath, err := loadModuleMetadata(p.moduleFS, p.projectRoot) + if err != nil { + p.debug.Log("Error loading module metadata: %s.", err) + } else { + p.debug.Log("Loaded module metadata for %d module(s) from '%s'.", len(modulesMetadata.Modules), metadataPath) + } + + workingDir, err := os.Getwd() + if err != nil { + return nil, cty.NilVal, err + } + p.debug.Log("Working directory for module evaluation is '%s'", workingDir) + evaluator := newEvaluator( + p.moduleFS, + p, + p.projectRoot, + p.modulePath, + workingDir, + p.moduleName, + blocks, + inputVars, + modulesMetadata, + p.workspaceName, + ignores, + p.debug.Extend("evaluator"), + p.allowDownloads, + p.skipCachedModules, + ) + modules, fsMap, parseDuration := evaluator.EvaluateAll(ctx) + p.metrics.Counts.Modules = len(modules) + p.metrics.Timings.ParseDuration = parseDuration + p.debug.Log("Finished parsing module '%s'.", p.moduleName) + p.fsMap = fsMap + return modules, evaluator.exportOutputs(), nil +} + +func (p *Parser) GetFilesystemMap() map[string]fs.FS { + if p.fsMap == nil { + return make(map[string]fs.FS) + } + return p.fsMap +} + +func (p *Parser) readBlocks(files []sourceFile) (terraform.Blocks, terraform.Ignores, error) { + var blocks terraform.Blocks + var ignores terraform.Ignores + moduleCtx := tfcontext.NewContext(&hcl.EvalContext{}, nil) + for _, file := range files { + fileBlocks, fileIgnores, err := loadBlocksFromFile(file, p.moduleSource) + if err != nil { + if p.stopOnHCLError { + return nil, nil, err + } + p.debug.Log("Encountered HCL parse error: %s", err) + continue + } + for _, fileBlock := range fileBlocks { + blocks = append(blocks, terraform.NewBlock(fileBlock, moduleCtx, p.moduleBlock, nil, p.moduleSource, p.moduleFS)) + } + ignores = append(ignores, fileIgnores...) + } + + sortBlocksByHierarchy(blocks) + return blocks, ignores, nil +} diff --git a/pkg/scanners/terraform/parser/parser_integration_test.go b/pkg/scanners/terraform/parser/parser_integration_test.go new file mode 100644 index 000000000000..ba7dd82c7bd7 --- /dev/null +++ b/pkg/scanners/terraform/parser/parser_integration_test.go @@ -0,0 +1,51 @@ +package parser + +import ( + "context" + "testing" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/require" +) + +func Test_DefaultRegistry(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +module "registry" { + source = "terraform-aws-modules/vpc/aws" +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true), OptionWithSkipCachedModules(true)) + if err := parser.ParseFS(context.TODO(), "code"); err != nil { + t.Fatal(err) + } + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + require.Len(t, modules, 2) +} + +func Test_SpecificRegistry(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +module "registry" { + source = "registry.terraform.io/terraform-aws-modules/vpc/aws" +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true), OptionWithSkipCachedModules(true)) + if err := parser.ParseFS(context.TODO(), "code"); err != nil { + t.Fatal(err) + } + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + require.Len(t, modules, 2) +} diff --git a/pkg/scanners/terraform/parser/parser_test.go b/pkg/scanners/terraform/parser/parser_test.go new file mode 100644 index 000000000000..21ee4ffd381a --- /dev/null +++ b/pkg/scanners/terraform/parser/parser_test.go @@ -0,0 +1,1141 @@ +package parser + +import ( + "context" + "os" + "sort" + "testing" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/zclconf/go-cty/cty" +) + +func Test_BasicParsing(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "test.tf": ` + +locals { + proxy = var.cats_mother +} + +variable "cats_mother" { + default = "boots" +} + +provider "cats" { + +} + +moved { + +} + +import { + to = cats_cat.mittens + id = "mittens" +} + +resource "cats_cat" "mittens" { + name = "mittens" + special = true +} + +resource "cats_kitten" "the-great-destroyer" { + name = "the great destroyer" + parent = cats_cat.mittens.name +} + +data "cats_cat" "the-cats-mother" { + name = local.proxy +} + +check "cats_mittens_is_special" { + data "cats_cat" "mittens" { + name = "mittens" + } + + assert { + condition = data.cats_cat.mittens.special == true + error_message = "${data.cats_cat.mittens.name} must be special" + } +} + +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + + blocks := modules[0].GetBlocks() + + // variable + variables := blocks.OfType("variable") + require.Len(t, variables, 1) + assert.Equal(t, "variable", variables[0].Type()) + require.Len(t, variables[0].Labels(), 1) + assert.Equal(t, "cats_mother", variables[0].TypeLabel()) + defaultVal := variables[0].GetAttribute("default") + require.NotNil(t, defaultVal) + assert.Equal(t, cty.String, defaultVal.Value().Type()) + assert.Equal(t, "boots", defaultVal.Value().AsString()) + + // provider + providerBlocks := blocks.OfType("provider") + require.Len(t, providerBlocks, 1) + assert.Equal(t, "provider", providerBlocks[0].Type()) + require.Len(t, providerBlocks[0].Labels(), 1) + assert.Equal(t, "cats", providerBlocks[0].TypeLabel()) + + // resources + resourceBlocks := blocks.OfType("resource") + + sort.Slice(resourceBlocks, func(i, j int) bool { + return resourceBlocks[i].TypeLabel() < resourceBlocks[j].TypeLabel() + }) + + require.Len(t, resourceBlocks, 2) + require.Len(t, resourceBlocks[0].Labels(), 2) + + assert.Equal(t, "resource", resourceBlocks[0].Type()) + assert.Equal(t, "cats_cat", resourceBlocks[0].TypeLabel()) + assert.Equal(t, "mittens", resourceBlocks[0].NameLabel()) + + assert.Equal(t, "mittens", resourceBlocks[0].GetAttribute("name").Value().AsString()) + assert.True(t, resourceBlocks[0].GetAttribute("special").Value().True()) + + assert.Equal(t, "resource", resourceBlocks[1].Type()) + assert.Equal(t, "cats_kitten", resourceBlocks[1].TypeLabel()) + assert.Equal(t, "the great destroyer", resourceBlocks[1].GetAttribute("name").Value().AsString()) + assert.Equal(t, "mittens", resourceBlocks[1].GetAttribute("parent").Value().AsString()) + + // import + importBlocks := blocks.OfType("import") + + assert.Equal(t, "import", importBlocks[0].Type()) + require.NotNil(t, importBlocks[0].GetAttribute("to")) + assert.Equal(t, "mittens", importBlocks[0].GetAttribute("id").Value().AsString()) + + // data + dataBlocks := blocks.OfType("data") + require.Len(t, dataBlocks, 1) + require.Len(t, dataBlocks[0].Labels(), 2) + + assert.Equal(t, "data", dataBlocks[0].Type()) + assert.Equal(t, "cats_cat", dataBlocks[0].TypeLabel()) + assert.Equal(t, "the-cats-mother", dataBlocks[0].NameLabel()) + + assert.Equal(t, "boots", dataBlocks[0].GetAttribute("name").Value().AsString()) + + // check + checkBlocks := blocks.OfType("check") + require.Len(t, checkBlocks, 1) + require.Len(t, checkBlocks[0].Labels(), 1) + + assert.Equal(t, "check", checkBlocks[0].Type()) + assert.Equal(t, "cats_mittens_is_special", checkBlocks[0].TypeLabel()) + + require.NotNil(t, checkBlocks[0].GetBlock("data")) + require.NotNil(t, checkBlocks[0].GetBlock("assert")) +} + +func Test_Modules(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +module "my-mod" { + source = "../module" + input = "ok" +} + +output "result" { + value = module.my-mod.mod_result +} +`, + "module/module.tf": ` +variable "input" { + default = "?" +} + +output "mod_result" { + value = var.input +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true), options.ParserWithDebug(os.Stderr)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + + require.Len(t, modules, 2) + rootModule := modules[0] + childModule := modules[1] + + moduleBlocks := rootModule.GetBlocks().OfType("module") + require.Len(t, moduleBlocks, 1) + + assert.Equal(t, "module", moduleBlocks[0].Type()) + assert.Equal(t, "module.my-mod", moduleBlocks[0].FullName()) + inputAttr := moduleBlocks[0].GetAttribute("input") + require.NotNil(t, inputAttr) + require.Equal(t, cty.String, inputAttr.Value().Type()) + assert.Equal(t, "ok", inputAttr.Value().AsString()) + + rootOutputs := rootModule.GetBlocks().OfType("output") + require.Len(t, rootOutputs, 1) + assert.Equal(t, "output.result", rootOutputs[0].FullName()) + valAttr := rootOutputs[0].GetAttribute("value") + require.NotNil(t, valAttr) + require.Equal(t, cty.String, valAttr.Type()) + assert.Equal(t, "ok", valAttr.Value().AsString()) + + childOutputs := childModule.GetBlocks().OfType("output") + require.Len(t, childOutputs, 1) + assert.Equal(t, "module.my-mod.output.mod_result", childOutputs[0].FullName()) + childValAttr := childOutputs[0].GetAttribute("value") + require.NotNil(t, childValAttr) + require.Equal(t, cty.String, childValAttr.Type()) + assert.Equal(t, "ok", childValAttr.Value().AsString()) + +} + +func Test_NestedParentModule(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +module "my-mod" { + source = "../." + input = "ok" +} + +output "result" { + value = module.my-mod.mod_result +} +`, + "root.tf": ` +variable "input" { + default = "?" +} + +output "mod_result" { + value = var.input +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 2) + rootModule := modules[0] + childModule := modules[1] + + moduleBlocks := rootModule.GetBlocks().OfType("module") + require.Len(t, moduleBlocks, 1) + + assert.Equal(t, "module", moduleBlocks[0].Type()) + assert.Equal(t, "module.my-mod", moduleBlocks[0].FullName()) + inputAttr := moduleBlocks[0].GetAttribute("input") + require.NotNil(t, inputAttr) + require.Equal(t, cty.String, inputAttr.Value().Type()) + assert.Equal(t, "ok", inputAttr.Value().AsString()) + + rootOutputs := rootModule.GetBlocks().OfType("output") + require.Len(t, rootOutputs, 1) + assert.Equal(t, "output.result", rootOutputs[0].FullName()) + valAttr := rootOutputs[0].GetAttribute("value") + require.NotNil(t, valAttr) + require.Equal(t, cty.String, valAttr.Type()) + assert.Equal(t, "ok", valAttr.Value().AsString()) + + childOutputs := childModule.GetBlocks().OfType("output") + require.Len(t, childOutputs, 1) + assert.Equal(t, "module.my-mod.output.mod_result", childOutputs[0].FullName()) + childValAttr := childOutputs[0].GetAttribute("value") + require.NotNil(t, childValAttr) + require.Equal(t, cty.String, childValAttr.Type()) + assert.Equal(t, "ok", childValAttr.Value().AsString()) +} + +func Test_UndefinedModuleOutputReference(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +resource "something" "blah" { + value = module.x.y +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, false, attr.IsResolvable()) +} + +func Test_UndefinedModuleOutputReferenceInSlice(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +resource "something" "blah" { + value = ["first", module.x.y, "last"] +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 3) + + assert.Equal(t, "first", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, false, values[1].GetMetadata().IsResolvable()) + + assert.Equal(t, "last", values[2].Value()) + assert.Equal(t, true, values[2].GetMetadata().IsResolvable()) +} + +func Test_TemplatedSliceValue(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` + +variable "x" { + default = "hello" +} + +resource "something" "blah" { + value = ["first", "${var.x}-${var.x}", "last"] +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 3) + + assert.Equal(t, "first", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, "hello-hello", values[1].Value()) + assert.Equal(t, true, values[1].GetMetadata().IsResolvable()) + + assert.Equal(t, "last", values[2].Value()) + assert.Equal(t, true, values[2].GetMetadata().IsResolvable()) +} + +func Test_SliceOfVars(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` + +variable "x" { + default = "1" +} + +variable "y" { + default = "2" +} + +resource "something" "blah" { + value = [var.x, var.y] +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 2) + + assert.Equal(t, "1", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, "2", values[1].Value()) + assert.Equal(t, true, values[1].GetMetadata().IsResolvable()) +} + +func Test_VarSlice(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` + +variable "x" { + default = ["a", "b", "c"] +} + +resource "something" "blah" { + value = var.x +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 3) + + assert.Equal(t, "a", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, "b", values[1].Value()) + assert.Equal(t, true, values[1].GetMetadata().IsResolvable()) + + assert.Equal(t, "c", values[2].Value()) + assert.Equal(t, true, values[2].GetMetadata().IsResolvable()) +} + +func Test_LocalSliceNested(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` + +variable "x" { + default = "a" +} + +locals { + y = [var.x, "b", "c"] +} + +resource "something" "blah" { + value = local.y +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 3) + + assert.Equal(t, "a", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, "b", values[1].Value()) + assert.Equal(t, true, values[1].GetMetadata().IsResolvable()) + + assert.Equal(t, "c", values[2].Value()) + assert.Equal(t, true, values[2].GetMetadata().IsResolvable()) +} + +func Test_FunctionCall(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` + +variable "x" { + default = ["a", "b"] +} + +resource "something" "blah" { + value = concat(var.x, ["c"]) +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 3) + + assert.Equal(t, "a", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, "b", values[1].Value()) + assert.Equal(t, true, values[1].GetMetadata().IsResolvable()) + + assert.Equal(t, "c", values[2].Value()) + assert.Equal(t, true, values[2].GetMetadata().IsResolvable()) +} + +func Test_NullDefaultValueForVar(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "test.tf": ` +variable "bucket_name" { + type = string + default = null +} + +resource "aws_s3_bucket" "default" { + bucket = var.bucket_name != null ? var.bucket_name : "default" +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + require.Len(t, modules, 1) + + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("aws_s3_bucket") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("bucket") + require.NotNil(t, attr) + assert.Equal(t, "default", attr.Value().AsString()) +} + +func Test_MultipleInstancesOfSameResource(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "test.tf": ` + +resource "aws_kms_key" "key1" { + description = "Key #1" + enable_key_rotation = true +} + +resource "aws_kms_key" "key2" { + description = "Key #2" + enable_key_rotation = true +} + +resource "aws_s3_bucket" "this" { + bucket = "test" + } + + +resource "aws_s3_bucket_server_side_encryption_configuration" "this1" { + bucket = aws_s3_bucket.this.id + + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = aws_kms_key.key1.arn + sse_algorithm = "aws:kms" + } + } +} + +resource "aws_s3_bucket_server_side_encryption_configuration" "this2" { + bucket = aws_s3_bucket.this.id + + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = aws_kms_key.key2.arn + sse_algorithm = "aws:kms" + } + } +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("aws_s3_bucket_server_side_encryption_configuration") + assert.Len(t, blocks, 2) + + for _, block := range blocks { + attr, parent := block.GetNestedAttribute("rule.apply_server_side_encryption_by_default.kms_master_key_id") + assert.Equal(t, "apply_server_side_encryption_by_default", parent.Type()) + assert.NotNil(t, attr) + assert.NotEmpty(t, attr.Value().AsString()) + } +} + +func Test_IfConfigFsIsNotSet_ThenUseModuleFsForVars(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +variable "bucket_name" { + type = string +} +resource "aws_s3_bucket" "main" { + bucket = var.bucket_name +} +`, + "main.tfvars": `bucket_name = "test_bucket"`, + }) + parser := New(fs, "", + OptionStopOnHCLError(true), + OptionWithTFVarsPaths("main.tfvars"), + ) + + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + blocks := rootModule.GetResourcesByType("aws_s3_bucket") + require.Len(t, blocks, 1) + + block := blocks[0] + + assert.Equal(t, "test_bucket", block.GetAttribute("bucket").AsStringValueOrDefault("", block).Value()) +} + +func Test_ForEachRefToLocals(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +locals { + buckets = toset([ + "foo", + "bar", + ]) +} + +resource "aws_s3_bucket" "this" { + for_each = local.buckets + bucket = each.key +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("aws_s3_bucket") + assert.Len(t, blocks, 2) + + for _, block := range blocks { + attr := block.GetAttribute("bucket") + require.NotNil(t, attr) + assert.Contains(t, []string{"foo", "bar"}, attr.AsStringValueOrDefault("", block).Value()) + } +} + +func Test_ForEachRefToVariableWithDefault(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +variable "buckets" { + type = set(string) + default = ["foo", "bar"] +} + +resource "aws_s3_bucket" "this" { + for_each = var.buckets + bucket = each.key +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("aws_s3_bucket") + assert.Len(t, blocks, 2) + + for _, block := range blocks { + attr := block.GetAttribute("bucket") + require.NotNil(t, attr) + assert.Contains(t, []string{"foo", "bar"}, attr.AsStringValueOrDefault("", block).Value()) + } +} + +func Test_ForEachRefToVariableFromFile(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +variable "policy_rules" { + type = object({ + secure_tags = optional(map(object({ + session_matcher = optional(string) + priority = number + enabled = optional(bool, true) + })), {}) + }) +} + +resource "google_network_security_gateway_security_policy_rule" "secure_tag_rules" { + for_each = var.policy_rules.secure_tags + provider = google-beta + project = "test" + name = each.key + enabled = each.value.enabled + priority = each.value.priority + session_matcher = each.value.session_matcher +} +`, + "main.tfvars": ` +policy_rules = { + secure_tags = { + secure-tag-1 = { + session_matcher = "host() != 'google.com'" + priority = 1001 + } + } +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true), OptionWithTFVarsPaths("main.tfvars")) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("google_network_security_gateway_security_policy_rule") + assert.Len(t, blocks, 1) + + block := blocks[0] + + assert.Equal(t, "secure-tag-1", block.GetAttribute("name").AsStringValueOrDefault("", block).Value()) + assert.Equal(t, true, block.GetAttribute("enabled").AsBoolValueOrDefault(false, block).Value()) + assert.Equal(t, "host() != 'google.com'", block.GetAttribute("session_matcher").AsStringValueOrDefault("", block).Value()) + assert.Equal(t, 1001, block.GetAttribute("priority").AsIntValueOrDefault(0, block).Value()) +} + +func Test_ForEachRefersToMapThatContainsSameStringValues(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": `locals { + buckets = { + bucket1 = "test1" + bucket2 = "test1" + } +} + +resource "aws_s3_bucket" "this" { + for_each = local.buckets + bucket = each.key +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + bucketBlocks := modules.GetResourcesByType("aws_s3_bucket") + assert.Len(t, bucketBlocks, 2) + + var labels []string + + for _, b := range bucketBlocks { + labels = append(labels, b.Label()) + } + + expectedLabels := []string{ + `aws_s3_bucket.this["bucket1"]`, + `aws_s3_bucket.this["bucket2"]`, + } + assert.Equal(t, expectedLabels, labels) +} + +func TestDataSourceWithCountMetaArgument(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +data "http" "example" { + count = 2 +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + httpDataSources := rootModule.GetDatasByType("http") + assert.Len(t, httpDataSources, 2) + + var labels []string + for _, b := range httpDataSources { + labels = append(labels, b.Label()) + } + + expectedLabels := []string{ + `http.example[0]`, + `http.example[1]`, + } + assert.Equal(t, expectedLabels, labels) +} + +func TestDataSourceWithForEachMetaArgument(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +locals { + ports = ["80", "8080"] +} +data "http" "example" { + for_each = toset(local.ports) + url = "localhost:${each.key}" +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + httpDataSources := rootModule.GetDatasByType("http") + assert.Len(t, httpDataSources, 2) +} + +func TestForEach(t *testing.T) { + + tests := []struct { + name string + source string + expectedCount int + }{ + { + name: "arg is list of strings", + source: `locals { + buckets = ["bucket1", "bucket2"] +} + +resource "aws_s3_bucket" "this" { + for_each = local.buckets + bucket = each.key +}`, + expectedCount: 0, + }, + { + name: "arg is empty set", + source: `locals { + buckets = toset([]) +} + +resource "aws_s3_bucket" "this" { + for_each = loca.buckets + bucket = each.key +}`, + expectedCount: 0, + }, + { + name: "arg is set of strings", + source: `locals { + buckets = ["bucket1", "bucket2"] +} + +resource "aws_s3_bucket" "this" { + for_each = toset(local.buckets) + bucket = each.key +}`, + expectedCount: 2, + }, + { + name: "arg is map", + source: `locals { + buckets = { + 1 = {} + 2 = {} + } +} + +resource "aws_s3_bucket" "this" { + for_each = local.buckets + bucket = each.key +}`, + expectedCount: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": tt.source, + }) + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + bucketBlocks := modules.GetResourcesByType("aws_s3_bucket") + assert.Len(t, bucketBlocks, tt.expectedCount) + }) + } +} + +func TestForEachRefToResource(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` + locals { + vpcs = { + "test1" = { + cidr_block = "192.168.0.0/28" + } + "test2" = { + cidr_block = "192.168.1.0/28" + } + } +} + +resource "aws_vpc" "example" { + for_each = local.vpcs + cidr_block = each.value.cidr_block +} + +resource "aws_internet_gateway" "example" { + for_each = aws_vpc.example + vpc_id = each.key +} +`, + }) + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + require.Len(t, modules, 1) + + blocks := modules.GetResourcesByType("aws_internet_gateway") + require.Len(t, blocks, 2) + + var vpcIds []string + for _, b := range blocks { + vpcIds = append(vpcIds, b.GetAttribute("vpc_id").Value().AsString()) + } + + expectedVpcIds := []string{"test1", "test2"} + assert.Equal(t, expectedVpcIds, vpcIds) +} + +func TestArnAttributeOfBucketIsCorrect(t *testing.T) { + + t.Run("the bucket doesn't have a name", func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": `resource "aws_s3_bucket" "this" {}`, + }) + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + require.Len(t, modules, 1) + + blocks := modules.GetResourcesByType("aws_s3_bucket") + assert.Len(t, blocks, 1) + + bucket := blocks[0] + + values := bucket.Values() + arnVal := values.GetAttr("arn") + assert.True(t, arnVal.Type().Equals(cty.String)) + + id := values.GetAttr("id").AsString() + + arn := arnVal.AsString() + assert.Equal(t, "arn:aws:s3:::"+id, arn) + }) + + t.Run("the bucket has a name", func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": `resource "aws_s3_bucket" "this" { + bucket = "test" +} + +resource "aws_iam_role" "this" { + name = "test_role" + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Sid = "" + Principal = { + Service = "s3.amazonaws.com" + } + }, + ] + }) +} + +resource "aws_iam_role_policy" "this" { + name = "test_policy" + role = aws_iam_role.this.id + policy = data.aws_iam_policy_document.this.json +} + +data "aws_iam_policy_document" "this" { + statement { + effect = "Allow" + actions = [ + "s3:GetObject" + ] + resources = ["${aws_s3_bucket.this.arn}/*"] + } +}`, + }) + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + require.Len(t, modules, 1) + + blocks := modules[0].GetDatasByType("aws_iam_policy_document") + assert.Len(t, blocks, 1) + + policyDoc := blocks[0] + + statement := policyDoc.GetBlock("statement") + resources := statement.GetAttribute("resources").AsStringValueSliceOrEmpty() + + assert.Len(t, resources, 1) + assert.True(t, resources[0].EqualTo("arn:aws:s3:::test/*")) + }) +} + +func TestForEachWithObjectsOfDifferentTypes(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": `module "backups" { + bucket_name = each.key + client = each.value.client + path_writers = each.value.path_writers + + for_each = { + "bucket1" = { + client = "client1" + path_writers = ["writer1"] // tuple with string + }, + "bucket2" = { + client = "client2" + path_writers = [] // empty tuple + } + } +} +`, + }) + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) +} diff --git a/pkg/scanners/terraform/parser/resolvers/cache.go b/pkg/scanners/terraform/parser/resolvers/cache.go new file mode 100644 index 000000000000..1314d538a60a --- /dev/null +++ b/pkg/scanners/terraform/parser/resolvers/cache.go @@ -0,0 +1,62 @@ +package resolvers + +import ( + "context" + "crypto/md5" // nolint + "fmt" + "io/fs" + "os" + "path/filepath" +) + +type cacheResolver struct{} + +var Cache = &cacheResolver{} + +const tempDirName = ".aqua" + +func locateCacheFS() (fs.FS, error) { + dir, err := locateCacheDir() + if err != nil { + return nil, err + } + return os.DirFS(dir), nil +} + +func locateCacheDir() (string, error) { + cacheDir := filepath.Join(os.TempDir(), tempDirName, "cache") + if err := os.MkdirAll(cacheDir, 0o755); err != nil { + return "", err + } + if !isWritable(cacheDir) { + return "", fmt.Errorf("cache directory is not writable") + } + return cacheDir, nil +} + +func (r *cacheResolver) Resolve(_ context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { + if opt.SkipCache { + opt.Debug("Cache is disabled.") + return nil, "", "", false, nil + } + cacheFS, err := locateCacheFS() + if err != nil { + opt.Debug("No cache filesystem is available on this machine.") + return nil, "", "", false, nil + } + key := cacheKey(opt.Source, opt.Version, opt.RelativePath) + opt.Debug("Trying to resolve: %s", key) + if info, err := fs.Stat(cacheFS, filepath.ToSlash(key)); err == nil && info.IsDir() { + opt.Debug("Module '%s' resolving via cache...", opt.Name) + cacheDir, err := locateCacheDir() + if err != nil { + return nil, "", "", true, err + } + return os.DirFS(filepath.Join(cacheDir, key)), opt.OriginalSource, ".", true, nil + } + return nil, "", "", false, nil +} + +func cacheKey(source, version, relativePath string) string { + return fmt.Sprintf("%x", md5.Sum([]byte(fmt.Sprintf("%s:%s:%s", source, version, relativePath)))) // nolint +} diff --git a/pkg/scanners/terraform/parser/resolvers/local.go b/pkg/scanners/terraform/parser/resolvers/local.go new file mode 100644 index 000000000000..94d92099b6c3 --- /dev/null +++ b/pkg/scanners/terraform/parser/resolvers/local.go @@ -0,0 +1,26 @@ +package resolvers + +import ( + "context" + "io/fs" + "path/filepath" +) + +type localResolver struct{} + +var Local = &localResolver{} + +func (r *localResolver) Resolve(_ context.Context, target fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { + if !opt.hasPrefix(".", "..") { + return nil, "", "", false, nil + } + joined := filepath.Clean(filepath.Join(opt.ModulePath, opt.Source)) + if _, err := fs.Stat(target, filepath.ToSlash(joined)); err == nil { + opt.Debug("Module '%s' resolved locally to %s", opt.Name, joined) + return target, "", joined, true, nil + } + + clean := filepath.Clean(opt.Source) + opt.Debug("Module '%s' resolved locally to %s", opt.Name, clean) + return target, "", clean, true, nil +} diff --git a/pkg/scanners/terraform/parser/resolvers/options.go b/pkg/scanners/terraform/parser/resolvers/options.go new file mode 100644 index 000000000000..61f720e8cc9e --- /dev/null +++ b/pkg/scanners/terraform/parser/resolvers/options.go @@ -0,0 +1,28 @@ +package resolvers + +import ( + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" +) + +type Options struct { + Source, OriginalSource, Version, OriginalVersion, WorkingDir, Name, ModulePath string + DebugLogger debug.Logger + AllowDownloads bool + SkipCache bool + RelativePath string +} + +func (o *Options) hasPrefix(prefixes ...string) bool { + for _, prefix := range prefixes { + if strings.HasPrefix(o.Source, prefix) { + return true + } + } + return false +} + +func (o *Options) Debug(format string, args ...interface{}) { + o.DebugLogger.Log(format, args...) +} diff --git a/pkg/scanners/terraform/parser/resolvers/registry.go b/pkg/scanners/terraform/parser/resolvers/registry.go new file mode 100644 index 000000000000..5623e9064e06 --- /dev/null +++ b/pkg/scanners/terraform/parser/resolvers/registry.go @@ -0,0 +1,165 @@ +package resolvers + +import ( + "context" + "encoding/json" + "fmt" + "io/fs" + "net/http" + "os" + "sort" + "strings" + "time" + + "github.com/Masterminds/semver" +) + +type registryResolver struct { + client *http.Client +} + +var Registry = ®istryResolver{ + client: &http.Client{ + // give it a maximum 5 seconds to resolve the module + Timeout: time.Second * 5, + }, +} + +type moduleVersions struct { + Modules []struct { + Versions []struct { + Version string `json:"version"` + } `json:"versions"` + } `json:"modules"` +} + +const registryHostname = "registry.terraform.io" + +// nolint +func (r *registryResolver) Resolve(ctx context.Context, target fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { + + if !opt.AllowDownloads { + return + } + + inputVersion := opt.Version + source, relativePath, _ := strings.Cut(opt.Source, "//") + parts := strings.Split(source, "/") + if len(parts) < 3 || len(parts) > 4 { + return + } + + hostname := registryHostname + var token string + if len(parts) == 4 { + hostname = parts[0] + parts = parts[1:] + + envVar := fmt.Sprintf("TF_TOKEN_%s", strings.ReplaceAll(hostname, ".", "_")) + token = os.Getenv(envVar) + if token != "" { + opt.Debug("Found a token for the registry at %s", hostname) + } else { + opt.Debug("No token was found for the registry at %s", hostname) + } + } + + moduleName := strings.Join(parts, "/") + + if opt.Version != "" { + versionUrl := fmt.Sprintf("https://%s/v1/modules/%s/versions", hostname, moduleName) + opt.Debug("Requesting module versions from registry using '%s'...", versionUrl) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, versionUrl, nil) + if err != nil { + return nil, "", "", true, err + } + if token != "" { + req.Header.Set("Authorization", "Bearer "+token) + } + resp, err := r.client.Do(req) + if err != nil { + return nil, "", "", true, err + } + defer func() { _ = resp.Body.Close() }() + if resp.StatusCode != http.StatusOK { + return nil, "", "", true, fmt.Errorf("unexpected status code for versions endpoint: %d", resp.StatusCode) + } + var availableVersions moduleVersions + if err := json.NewDecoder(resp.Body).Decode(&availableVersions); err != nil { + return nil, "", "", true, err + } + + opt.Version, err = resolveVersion(inputVersion, availableVersions) + if err != nil { + return nil, "", "", true, err + } + opt.Debug("Found version '%s' for constraint '%s'", opt.Version, inputVersion) + } + + var url string + if opt.Version == "" { + url = fmt.Sprintf("https://%s/v1/modules/%s/download", hostname, moduleName) + } else { + url = fmt.Sprintf("https://%s/v1/modules/%s/%s/download", hostname, moduleName, opt.Version) + } + + opt.Debug("Requesting module source from registry using '%s'...", url) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, "", "", true, err + } + if token != "" { + req.Header.Set("Authorization", "Bearer "+token) + } + if opt.Version != "" { + req.Header.Set("X-Terraform-Version", opt.Version) + } + + resp, err := r.client.Do(req) + if err != nil { + return nil, "", "", true, err + } + defer func() { _ = resp.Body.Close() }() + if resp.StatusCode != http.StatusNoContent { + return nil, "", "", true, fmt.Errorf("unexpected status code: %d", resp.StatusCode) + } + + opt.Source = resp.Header.Get("X-Terraform-Get") + opt.Debug("Module '%s' resolved via registry to new source: '%s'", opt.Name, opt.Source) + opt.RelativePath = relativePath + filesystem, prefix, downloadPath, _, err = Remote.Resolve(ctx, target, opt) + if err != nil { + return nil, "", "", true, err + } + + return filesystem, prefix, downloadPath, true, nil +} + +func resolveVersion(input string, versions moduleVersions) (string, error) { + if len(versions.Modules) != 1 { + return "", fmt.Errorf("1 module expected, found %d", len(versions.Modules)) + } + if len(versions.Modules[0].Versions) == 0 { + return "", fmt.Errorf("no available versions for module") + } + constraints, err := semver.NewConstraint(input) + if err != nil { + return "", err + } + var realVersions semver.Collection + for _, rawVersion := range versions.Modules[0].Versions { + realVersion, err := semver.NewVersion(rawVersion.Version) + if err != nil { + continue + } + realVersions = append(realVersions, realVersion) + } + sort.Sort(sort.Reverse(realVersions)) + for _, realVersion := range realVersions { + if constraints.Check(realVersion) { + return realVersion.String(), nil + } + } + return "", fmt.Errorf("no available versions for module constraint '%s'", input) +} diff --git a/pkg/scanners/terraform/parser/resolvers/remote.go b/pkg/scanners/terraform/parser/resolvers/remote.go new file mode 100644 index 000000000000..4c1a96437e65 --- /dev/null +++ b/pkg/scanners/terraform/parser/resolvers/remote.go @@ -0,0 +1,92 @@ +package resolvers + +import ( + "context" + "fmt" + "io/fs" + "os" + "path/filepath" + "sync/atomic" + + "github.com/hashicorp/go-getter" +) + +type remoteResolver struct { + count int32 +} + +var Remote = &remoteResolver{ + count: 0, +} + +func (r *remoteResolver) incrementCount(o Options) { + o.Debug("Incrementing the download counter") + atomic.CompareAndSwapInt32(&r.count, r.count, r.count+1) + o.Debug("Download counter is now %d", r.count) +} + +func (r *remoteResolver) GetDownloadCount() int { + return int(atomic.LoadInt32(&r.count)) +} + +func (r *remoteResolver) Resolve(ctx context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { + if !opt.hasPrefix("github.com/", "bitbucket.org/", "s3:", "git@", "git:", "hg:", "https:", "gcs:") { + return nil, "", "", false, nil + } + + if !opt.AllowDownloads { + return nil, "", "", false, nil + } + + key := cacheKey(opt.OriginalSource, opt.OriginalVersion, opt.RelativePath) + opt.Debug("Storing with cache key %s", key) + + baseCacheDir, err := locateCacheDir() + if err != nil { + return nil, "", "", true, fmt.Errorf("failed to locate cache directory: %w", err) + } + cacheDir := filepath.Join(baseCacheDir, key) + if err := r.download(ctx, opt, cacheDir); err != nil { + return nil, "", "", true, err + } + + r.incrementCount(opt) + opt.Debug("Successfully downloaded %s from %s", opt.Name, opt.Source) + opt.Debug("Module '%s' resolved via remote download.", opt.Name) + return os.DirFS(cacheDir), opt.Source, filepath.Join(".", opt.RelativePath), true, nil +} + +func (r *remoteResolver) download(ctx context.Context, opt Options, dst string) error { + _ = os.RemoveAll(dst) + if err := os.MkdirAll(filepath.Dir(dst), 0o755); err != nil { + return err + } + + var opts []getter.ClientOption + + // Overwrite the file getter so that a file will be copied + getter.Getters["file"] = &getter.FileGetter{Copy: true} + + opt.Debug("Downloading %s...", opt.Source) + + // Build the client + client := &getter.Client{ + Ctx: ctx, + Src: opt.Source, + Dst: dst, + Pwd: opt.WorkingDir, + Getters: getter.Getters, + Mode: getter.ClientModeAny, + Options: opts, + } + + if err := client.Get(); err != nil { + return fmt.Errorf("failed to download: %w", err) + } + + return nil +} + +func (r *remoteResolver) GetSourcePrefix(source string) string { + return source +} diff --git a/pkg/scanners/terraform/parser/resolvers/writable.go b/pkg/scanners/terraform/parser/resolvers/writable.go new file mode 100644 index 000000000000..84f471f779c2 --- /dev/null +++ b/pkg/scanners/terraform/parser/resolvers/writable.go @@ -0,0 +1,36 @@ +//go:build !windows +// +build !windows + +package resolvers + +import ( + "os" + "syscall" +) + +func isWritable(path string) bool { + info, err := os.Stat(path) + if err != nil { + return false + } + + if !info.IsDir() { + return false + } + + // Check if the user bit is enabled in file permission + if info.Mode().Perm()&(1<<(uint(7))) == 0 { + return false + } + + var stat syscall.Stat_t + if err = syscall.Stat(path, &stat); err != nil { + return false + } + + if uint32(os.Geteuid()) != stat.Uid { + return false + } + + return true +} diff --git a/pkg/scanners/terraform/parser/resolvers/writable_windows.go b/pkg/scanners/terraform/parser/resolvers/writable_windows.go new file mode 100644 index 000000000000..69cb3c7169b1 --- /dev/null +++ b/pkg/scanners/terraform/parser/resolvers/writable_windows.go @@ -0,0 +1,24 @@ +package resolvers + +import ( + "os" +) + +func isWritable(path string) bool { + + info, err := os.Stat(path) + if err != nil { + return false + } + + if !info.IsDir() { + return false + } + + // Check if the user bit is enabled in file permission + if info.Mode().Perm()&(1<<(uint(7))) == 0 { + return false + } + + return true +} diff --git a/pkg/scanners/terraform/parser/sort.go b/pkg/scanners/terraform/parser/sort.go new file mode 100644 index 000000000000..d43e86b4e740 --- /dev/null +++ b/pkg/scanners/terraform/parser/sort.go @@ -0,0 +1,58 @@ +package parser + +import ( + "sort" + + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func sortBlocksByHierarchy(blocks terraform.Blocks) { + c := &counter{ + cache: make(map[string]int), + } + sort.Slice(blocks, func(i, j int) bool { + a := blocks[i] + b := blocks[j] + iDepth, jDepth := c.countBlockRecursion(a, blocks, 0), c.countBlockRecursion(b, blocks, 0) + switch { + case iDepth < jDepth: + return true + case iDepth > jDepth: + return false + default: + return blocks[i].FullName() < blocks[j].FullName() + } + }) +} + +type counter struct { + cache map[string]int +} + +func (c *counter) countBlockRecursion(block *terraform.Block, blocks terraform.Blocks, count int) int { + metadata := block.GetMetadata() + if cached, ok := c.cache[metadata.Reference()]; ok { + return cached + } + var maxCount int + var hasRecursion bool + for _, attrName := range []string{"for_each", "count"} { + if attr := block.GetAttribute(attrName); attr.IsNotNil() { + hasRecursion = true + for _, other := range blocks { + if attr.ReferencesBlock(other) { + depth := c.countBlockRecursion(other, blocks, count) + if depth > maxCount { + maxCount = depth + } + } + } + } + } + if hasRecursion { + maxCount++ + } + result := maxCount + count + c.cache[metadata.Reference()] = result + return result +} diff --git a/pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars b/pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars new file mode 100644 index 000000000000..23fee69e2bb1 --- /dev/null +++ b/pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars @@ -0,0 +1 @@ +instance_type = "t2.large" \ No newline at end of file diff --git a/pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json b/pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json new file mode 100644 index 000000000000..bde0e75763b1 --- /dev/null +++ b/pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json @@ -0,0 +1,10 @@ +{ + "variable": { + "foo": { + "default": "bar" + }, + "baz": "qux" + }, + "foo2": true, + "foo3": 3 +} \ No newline at end of file diff --git a/pkg/scanners/terraform/scanner.go b/pkg/scanners/terraform/scanner.go new file mode 100644 index 000000000000..9569848b35a4 --- /dev/null +++ b/pkg/scanners/terraform/scanner.go @@ -0,0 +1,379 @@ +package terraform + +import ( + "context" + "io" + "io/fs" + "path/filepath" + "sort" + "strings" + "sync" + "time" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" + "golang.org/x/exp/slices" + + "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/trivy/pkg/extrafs" + "github.com/aquasecurity/trivy/pkg/scanners" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/resolvers" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) +var _ ConfigurableTerraformScanner = (*Scanner)(nil) + +type Scanner struct { + sync.Mutex + options []options.ScannerOption + parserOpt []options.ParserOption + executorOpt []executor.Option + dirs map[string]struct{} + forceAllDirs bool + policyDirs []string + policyReaders []io.Reader + regoScanner *rego.Scanner + execLock sync.RWMutex + debug debug.Logger + frameworks []framework.Framework + spec string + loadEmbeddedLibraries bool + loadEmbeddedPolicies bool +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(regoOnly bool) { + s.executorOpt = append(s.executorOpt, executor.OptionWithRegoOnly(regoOnly)) +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) Name() string { + return "Terraform" +} + +func (s *Scanner) SetForceAllDirs(b bool) { + s.forceAllDirs = b +} + +func (s *Scanner) AddParserOptions(options ...options.ParserOption) { + s.parserOpt = append(s.parserOpt, options...) +} + +func (s *Scanner) AddExecutorOptions(options ...executor.Option) { + s.executorOpt = append(s.executorOpt, options...) +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.parserOpt = append(s.parserOpt, options.ParserWithSkipRequiredCheck(skip)) +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.parserOpt = append(s.parserOpt, options.ParserWithDebug(writer)) + s.executorOpt = append(s.executorOpt, executor.OptionWithDebugWriter(writer)) + s.debug = debug.New(writer, "terraform", "scanner") +} + +func (s *Scanner) SetTraceWriter(_ io.Writer) { +} + +func (s *Scanner) SetPerResultTracingEnabled(_ bool) { +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetDataDirs(_ ...string) {} +func (s *Scanner) SetPolicyNamespaces(_ ...string) {} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +type Metrics struct { + Parser parser.Metrics + Executor executor.Metrics + Timings struct { + Total time.Duration + } +} + +func New(options ...options.ScannerOption) *Scanner { + s := &Scanner{ + dirs: make(map[string]struct{}), + options: options, + } + for _, opt := range options { + opt(s) + } + return s +} + +func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, dir string) (scan.Results, error) { + results, _, err := s.ScanFSWithMetrics(ctx, target, dir) + return results, err +} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return s.regoScanner, nil + } + regoScanner := rego.NewScanner(types.SourceCloud, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return nil, err + } + s.regoScanner = regoScanner + return regoScanner, nil +} + +// terraformRootModule represents the module to be used as the root module for Terraform deployment. +type terraformRootModule struct { + rootPath string + childs terraform.Modules + fsMap map[string]fs.FS +} + +func excludeNonRootModules(modules []terraformRootModule) []terraformRootModule { + var result []terraformRootModule + var childPaths []string + + for _, module := range modules { + childPaths = append(childPaths, module.childs.ChildModulesPaths()...) + } + + for _, module := range modules { + // if the path of the root module matches the path of the child module, + // then we should not scan it + if !slices.Contains(childPaths, module.rootPath) { + result = append(result, module) + } + } + return result +} + +func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir string) (scan.Results, Metrics, error) { + + var metrics Metrics + + s.debug.Log("Scanning [%s] at '%s'...", target, dir) + + // find directories which directly contain tf files (and have no parent containing tf files) + rootDirs := s.findRootModules(target, dir, dir) + sort.Strings(rootDirs) + + if len(rootDirs) == 0 { + s.debug.Log("no root modules found") + return nil, metrics, nil + } + + regoScanner, err := s.initRegoScanner(target) + if err != nil { + return nil, metrics, err + } + + s.execLock.Lock() + s.executorOpt = append(s.executorOpt, executor.OptionWithRegoScanner(regoScanner), executor.OptionWithFrameworks(s.frameworks...)) + s.execLock.Unlock() + + var allResults scan.Results + + // parse all root module directories + var rootModules []terraformRootModule + for _, dir := range rootDirs { + + s.debug.Log("Scanning root module '%s'...", dir) + + p := parser.New(target, "", s.parserOpt...) + + if err := p.ParseFS(ctx, dir); err != nil { + return nil, metrics, err + } + + modules, _, err := p.EvaluateAll(ctx) + if err != nil { + return nil, metrics, err + } + + parserMetrics := p.Metrics() + metrics.Parser.Counts.Blocks += parserMetrics.Counts.Blocks + metrics.Parser.Counts.Modules += parserMetrics.Counts.Modules + metrics.Parser.Counts.Files += parserMetrics.Counts.Files + metrics.Parser.Timings.DiskIODuration += parserMetrics.Timings.DiskIODuration + metrics.Parser.Timings.ParseDuration += parserMetrics.Timings.ParseDuration + + rootModules = append(rootModules, terraformRootModule{ + rootPath: dir, + childs: modules, + fsMap: p.GetFilesystemMap(), + }) + } + + rootModules = excludeNonRootModules(rootModules) + + for _, module := range rootModules { + s.execLock.RLock() + e := executor.New(s.executorOpt...) + s.execLock.RUnlock() + results, execMetrics, err := e.Execute(module.childs) + if err != nil { + return nil, metrics, err + } + + for i, result := range results { + if result.Metadata().Range().GetFS() != nil { + continue + } + key := result.Metadata().Range().GetFSKey() + if key == "" { + continue + } + if filesystem, ok := module.fsMap[key]; ok { + override := scan.Results{ + result, + } + override.SetSourceAndFilesystem(result.Range().GetSourcePrefix(), filesystem, false) + results[i] = override[0] + } + } + + metrics.Executor.Counts.Passed += execMetrics.Counts.Passed + metrics.Executor.Counts.Failed += execMetrics.Counts.Failed + metrics.Executor.Counts.Ignored += execMetrics.Counts.Ignored + metrics.Executor.Counts.Critical += execMetrics.Counts.Critical + metrics.Executor.Counts.High += execMetrics.Counts.High + metrics.Executor.Counts.Medium += execMetrics.Counts.Medium + metrics.Executor.Counts.Low += execMetrics.Counts.Low + metrics.Executor.Timings.Adaptation += execMetrics.Timings.Adaptation + metrics.Executor.Timings.RunningChecks += execMetrics.Timings.RunningChecks + + allResults = append(allResults, results...) + } + + metrics.Parser.Counts.ModuleDownloads = resolvers.Remote.GetDownloadCount() + + metrics.Timings.Total += metrics.Parser.Timings.DiskIODuration + metrics.Timings.Total += metrics.Parser.Timings.ParseDuration + metrics.Timings.Total += metrics.Executor.Timings.Adaptation + metrics.Timings.Total += metrics.Executor.Timings.RunningChecks + + return allResults, metrics, nil +} + +func (s *Scanner) removeNestedDirs(dirs []string) []string { + if s.forceAllDirs { + return dirs + } + var clean []string + for _, dirA := range dirs { + dirOK := true + for _, dirB := range dirs { + if dirA == dirB { + continue + } + if str, err := filepath.Rel(dirB, dirA); err == nil && !strings.HasPrefix(str, "..") { + dirOK = false + break + } + } + if dirOK { + clean = append(clean, dirA) + } + } + return clean +} + +func (s *Scanner) findRootModules(target fs.FS, scanDir string, dirs ...string) []string { + + var roots []string + var others []string + + for _, dir := range dirs { + if s.isRootModule(target, dir) { + roots = append(roots, dir) + if !s.forceAllDirs { + continue + } + } + + // if this isn't a root module, look at directories inside it + files, err := fs.ReadDir(target, filepath.ToSlash(dir)) + if err != nil { + continue + } + for _, file := range files { + realPath := filepath.Join(dir, file.Name()) + if symFS, ok := target.(extrafs.ReadLinkFS); ok { + realPath, err = symFS.ResolveSymlink(realPath, scanDir) + if err != nil { + s.debug.Log("failed to resolve symlink '%s': %s", file.Name(), err) + continue + } + } + if file.IsDir() { + others = append(others, realPath) + } else if statFS, ok := target.(fs.StatFS); ok { + info, err := statFS.Stat(filepath.ToSlash(realPath)) + if err != nil { + continue + } + if info.IsDir() { + others = append(others, realPath) + } + } + } + } + + if (len(roots) == 0 || s.forceAllDirs) && len(others) > 0 { + roots = append(roots, s.findRootModules(target, scanDir, others...)...) + } + + return s.removeNestedDirs(roots) +} + +func (s *Scanner) isRootModule(target fs.FS, dir string) bool { + files, err := fs.ReadDir(target, filepath.ToSlash(dir)) + if err != nil { + s.debug.Log("failed to read dir '%s' from filesystem [%s]: %s", dir, target, err) + return false + } + for _, file := range files { + if strings.HasSuffix(file.Name(), ".tf") || strings.HasSuffix(file.Name(), ".tf.json") { + return true + } + } + return false +} diff --git a/pkg/scanners/terraform/scanner_integration_test.go b/pkg/scanners/terraform/scanner_integration_test.go new file mode 100644 index 000000000000..94dffbb4ceed --- /dev/null +++ b/pkg/scanners/terraform/scanner_integration_test.go @@ -0,0 +1,132 @@ +package terraform + +import ( + "bytes" + "context" + "fmt" + "testing" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_ScanRemoteModule(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +module "s3_bucket" { + source = "terraform-aws-modules/s3-bucket/aws" + + bucket = "my-s3-bucket" +} +`, + "/rules/bucket_name.rego": ` +# METADATA +# schemas: +# - input: schema.input +# custom: +# avd_id: AVD-AWS-0001 +# input: +# selector: +# - type: cloud +# subtypes: +# - service: s3 +# provider: aws +package defsec.test.aws1 +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "" + res := result.new("The name of the bucket must not be empty", bucket) +}`, + }) + + debugLog := bytes.NewBuffer([]byte{}) + + scanner := New( + options.ScannerWithDebug(debugLog), + options.ScannerWithPolicyFilesystem(fs), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithEmbeddedLibraries(false), + options.ScannerWithRegoOnly(true), + ScannerWithAllDirectories(true), + ScannerWithSkipCachedModules(true), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, ".") + require.NoError(t, err) + + assert.Len(t, results.GetPassed(), 1) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } +} + +func Test_ScanChildUseRemoteModule(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +module "this" { + source = "./modules/s3" + bucket = "my-s3-bucket" +} +`, + "modules/s3/main.tf": ` +variable "bucket" { + type = string +} + +module "s3_bucket" { + source = "github.com/terraform-aws-modules/terraform-aws-s3-bucket?ref=v3.15.1" + bucket = var.bucket +} +`, + "rules/bucket_name.rego": ` +# METADATA +# schemas: +# - input: schema.input +# custom: +# avd_id: AVD-AWS-0001 +# input: +# selector: +# - type: cloud +# subtypes: +# - service: s3 +# provider: aws +package defsec.test.aws1 +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "" + res := result.new("The name of the bucket must not be empty", bucket) +}`, + }) + + debugLog := bytes.NewBuffer([]byte{}) + + scanner := New( + options.ScannerWithDebug(debugLog), + options.ScannerWithPolicyFilesystem(fs), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithEmbeddedLibraries(false), + options.ScannerWithRegoOnly(true), + ScannerWithAllDirectories(true), + ScannerWithSkipCachedModules(true), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, ".") + require.NoError(t, err) + + assert.Len(t, results.GetPassed(), 1) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } +} diff --git a/pkg/scanners/terraform/scanner_test.go b/pkg/scanners/terraform/scanner_test.go new file mode 100644 index 000000000000..07044f8d10bc --- /dev/null +++ b/pkg/scanners/terraform/scanner_test.go @@ -0,0 +1,1361 @@ +package terraform + +import ( + "bytes" + "context" + "fmt" + "strconv" + "testing" + + "github.com/aquasecurity/defsec/pkg/providers" + "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +var alwaysFailRule = scan.Rule{ + Provider: providers.AWSProvider, + Service: "service", + ShortCode: "abc", + Severity: severity.High, + CustomChecks: scan.CustomChecks{ + Terraform: &scan.TerraformCustomCheck{ + RequiredTypes: []string{}, + RequiredLabels: []string{}, + Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { + results.Add("oh no", resourceBlock) + return + }, + }, + }, +} + +const emptyBucketRule = ` +# METADATA +# schemas: +# - input: schema.input +# custom: +# avd_id: AVD-AWS-0001 +# input: +# selector: +# - type: cloud +# subtypes: +# - service: s3 +# provider: aws +package defsec.test.aws1 +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "" + res := result.new("The name of the bucket must not be empty", bucket) +} +` + +func scanWithOptions(t *testing.T, code string, opt ...options.ScannerOption) scan.Results { + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": code, + }) + + scanner := New(opt...) + results, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "project") + require.NoError(t, err) + return results +} + +func Test_OptionWithAlternativeIDProvider(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + options := []options.ScannerOption{ + ScannerWithAlternativeIDProvider(func(s string) []string { + return []string{"something", "altid", "blah"} + }), + } + results := scanWithOptions(t, ` +//tfsec:ignore:altid +resource "something" "else" {} +`, options...) + require.Len(t, results.GetFailed(), 0) + require.Len(t, results.GetIgnored(), 1) + +} + +func Test_TrivyOptionWithAlternativeIDProvider(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + options := []options.ScannerOption{ + ScannerWithAlternativeIDProvider(func(s string) []string { + return []string{"something", "altid", "blah"} + }), + } + results := scanWithOptions(t, ` +//trivy:ignore:altid +resource "something" "else" {} +`, options...) + require.Len(t, results.GetFailed(), 0) + require.Len(t, results.GetIgnored(), 1) + +} + +func Test_OptionWithSeverityOverrides(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + options := []options.ScannerOption{ + ScannerWithSeverityOverrides(map[string]string{"aws-service-abc": "LOW"}), + } + results := scanWithOptions(t, ` +resource "something" "else" {} +`, options...) + require.Len(t, results.GetFailed(), 1) + assert.Equal(t, severity.Low, results.GetFailed()[0].Severity()) +} + +func Test_OptionWithDebugWriter(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + buffer := bytes.NewBuffer([]byte{}) + + scannerOpts := []options.ScannerOption{ + options.ScannerWithDebug(buffer), + } + _ = scanWithOptions(t, ` +resource "something" "else" {} +`, scannerOpts...) + require.Greater(t, buffer.Len(), 0) +} + +func Test_OptionNoIgnores(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + scannerOpts := []options.ScannerOption{ + ScannerWithNoIgnores(), + } + results := scanWithOptions(t, ` +//tfsec:ignore:aws-service-abc +resource "something" "else" {} +`, scannerOpts...) + require.Len(t, results.GetFailed(), 1) + require.Len(t, results.GetIgnored(), 0) + +} + +func Test_OptionExcludeRules(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + options := []options.ScannerOption{ + ScannerWithExcludedRules([]string{"aws-service-abc"}), + } + results := scanWithOptions(t, ` +resource "something" "else" {} +`, options...) + require.Len(t, results.GetFailed(), 0) + require.Len(t, results.GetIgnored(), 1) + +} + +func Test_OptionIncludeRules(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + scannerOpts := []options.ScannerOption{ + ScannerWithIncludedRules([]string{"this-only"}), + } + results := scanWithOptions(t, ` +resource "something" "else" {} +`, scannerOpts...) + require.Len(t, results.GetFailed(), 0) + require.Len(t, results.GetIgnored(), 1) + +} + +func Test_OptionWithMinimumSeverity(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + scannerOpts := []options.ScannerOption{ + ScannerWithMinimumSeverity(severity.Critical), + } + results := scanWithOptions(t, ` +resource "something" "else" {} +`, scannerOpts...) + require.Len(t, results.GetFailed(), 0) + require.Len(t, results.GetIgnored(), 1) + +} + +func Test_OptionWithPolicyDirs(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tf": ` +resource "aws_s3_bucket" "my-bucket" { + bucket = "evil" +} +`, + "/rules/test.rego": ` +package defsec.abcdefg + +__rego_metadata__ := { + "id": "TEST123", + "avd_id": "AVD-TEST-0123", + "title": "Buckets should not be evil", + "short_code": "no-evil-buckets", + "severity": "CRITICAL", + "type": "DefSec Security Check", + "description": "You should not allow buckets to be evil", + "recommended_actions": "Use a good bucket instead", + "url": "https://google.com/search?q=is+my+bucket+evil", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "defsec", "subtypes": [{"service": "s3", "provider": "aws"}]}], +} + +deny[cause] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "evil" + cause := bucket.name +} +`, + }) + + debugLog := bytes.NewBuffer([]byte{}) + scanner := New( + options.ScannerWithDebug(debugLog), + options.ScannerWithPolicyFilesystem(fs), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + + failure := results.GetFailed()[0] + + assert.Equal(t, "AVD-TEST-0123", failure.Rule().AVDID) + + actualCode, err := failure.GetCode() + require.NoError(t, err) + for i := range actualCode.Lines { + actualCode.Lines[i].Highlighted = "" + } + assert.Equal(t, []scan.Line{ + { + Number: 2, + Content: "resource \"aws_s3_bucket\" \"my-bucket\" {", + IsCause: false, + FirstCause: false, + LastCause: false, + Annotation: "", + }, + { + Number: 3, + Content: "\tbucket = \"evil\"", + IsCause: true, + FirstCause: true, + LastCause: true, + Annotation: "", + }, + { + Number: 4, + Content: "}", + IsCause: false, + FirstCause: false, + LastCause: false, + Annotation: "", + }, + }, actualCode.Lines) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } + +} + +func Test_OptionWithPolicyNamespaces(t *testing.T) { + + tests := []struct { + includedNamespaces []string + policyNamespace string + wantFailure bool + }{ + { + includedNamespaces: nil, + policyNamespace: "blah", + wantFailure: false, + }, + { + includedNamespaces: nil, + policyNamespace: "appshield.something", + wantFailure: true, + }, + { + includedNamespaces: nil, + policyNamespace: "defsec.blah", + wantFailure: true, + }, + { + includedNamespaces: []string{"user"}, + policyNamespace: "users", + wantFailure: false, + }, + { + includedNamespaces: []string{"users"}, + policyNamespace: "something.users", + wantFailure: false, + }, + { + includedNamespaces: []string{"users"}, + policyNamespace: "users", + wantFailure: true, + }, + { + includedNamespaces: []string{"users"}, + policyNamespace: "users.my_rule", + wantFailure: true, + }, + { + includedNamespaces: []string{"a", "users", "b"}, + policyNamespace: "users", + wantFailure: true, + }, + { + includedNamespaces: []string{"user"}, + policyNamespace: "defsec", + wantFailure: true, + }, + } + + for i, test := range tests { + + t.Run(strconv.Itoa(i), func(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tf": ` +resource "aws_s3_bucket" "my-bucket" { + bucket = "evil" +} +`, + "/rules/test.rego": fmt.Sprintf(` +# METADATA +# custom: +# input: +# selector: +# - type: cloud +# subtypes: +# - service: s3 +# provider: aws +package %s + +deny[cause] { +bucket := input.aws.s3.buckets[_] +bucket.name.value == "evil" +cause := bucket.name +} + + `, test.policyNamespace), + }) + + scanner := New( + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithPolicyNamespaces(test.includedNamespaces...), + ) + + results, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "code") + require.NoError(t, err) + + var found bool + for _, result := range results.GetFailed() { + if result.RegoNamespace() == test.policyNamespace && result.RegoRule() == "deny" { + found = true + break + } + } + assert.Equal(t, test.wantFailure, found) + + }) + } + +} + +func Test_OptionWithStateFunc(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/main.tf": ` +resource "aws_s3_bucket" "my-bucket" { + bucket = "evil" +} +`, + }) + + var actual state.State + + debugLog := bytes.NewBuffer([]byte{}) + scanner := New( + options.ScannerWithDebug(debugLog), + ScannerWithStateFunc(func(s *state.State) { + require.NotNil(t, s) + actual = *s + }), + ) + + _, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "code") + require.NoError(t, err) + + assert.Equal(t, 1, len(actual.AWS.S3.Buckets)) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } + +} + +func Test_OptionWithRegoOnly(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tf": ` +resource "aws_s3_bucket" "my-bucket" { + bucket = "evil" +} +`, + "/rules/test.rego": ` +package defsec.abcdefg + +__rego_metadata__ := { + "id": "TEST123", + "avd_id": "AVD-TEST-0123", + "title": "Buckets should not be evil", + "short_code": "no-evil-buckets", + "severity": "CRITICAL", + "type": "DefSec Security Check", + "description": "You should not allow buckets to be evil", + "recommended_actions": "Use a good bucket instead", + "url": "https://google.com/search?q=is+my+bucket+evil", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "defsec", "subtypes": [{"service": "s3", "provider": "aws"}]}], +} + +deny[cause] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "evil" + cause := bucket.name +} +`, + }) + + debugLog := bytes.NewBuffer([]byte{}) + scanner := New( + options.ScannerWithDebug(debugLog), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + assert.Equal(t, "AVD-TEST-0123", results[0].Rule().AVDID) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } +} + +func Test_OptionWithRegoOnly_CodeHighlighting(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tf": ` +resource "aws_s3_bucket" "my-bucket" { + bucket = "evil" +} +`, + "/rules/test.rego": ` +package defsec.abcdefg + +__rego_metadata__ := { + "id": "TEST123", + "avd_id": "AVD-TEST-0123", + "title": "Buckets should not be evil", + "short_code": "no-evil-buckets", + "severity": "CRITICAL", + "type": "DefSec Security Check", + "description": "You should not allow buckets to be evil", + "recommended_actions": "Use a good bucket instead", + "url": "https://google.com/search?q=is+my+bucket+evil", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "defsec", "subtypes": [{"service": "s3", "provider": "aws"}]}], +} + +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "evil" + res := result.new("oh no", bucket.name) +} +`, + }) + + debugLog := bytes.NewBuffer([]byte{}) + scanner := New( + options.ScannerWithDebug(debugLog), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + options.ScannerWithEmbeddedLibraries(true), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + assert.Equal(t, "AVD-TEST-0123", results[0].Rule().AVDID) + assert.NotNil(t, results[0].Metadata().Range().GetFS()) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } +} + +func Test_OptionWithSkipDownloaded(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "test/main.tf": ` +module "s3-bucket" { + source = "terraform-aws-modules/s3-bucket/aws" + version = "3.14.0" + bucket = mybucket +} +`, + // creating our own rule for the reliability of the test + "/rules/test.rego": ` +package defsec.abcdefg + +__rego_input__ := { + "combine": false, + "selector": [{"type": "defsec", "subtypes": [{"service": "s3", "provider": "aws"}]}], +} + +deny[cause] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "mybucket" + cause := bucket.name +}`, + }) + + scanner := New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + results, err := scanner.ScanFS(context.TODO(), fs, "test") + assert.NoError(t, err) + assert.Greater(t, len(results.GetFailed()), 0) + + scanner = New(ScannerWithSkipDownloaded(true)) + results, err = scanner.ScanFS(context.TODO(), fs, "test") + assert.NoError(t, err) + assert.Len(t, results.GetFailed(), 0) + +} + +func Test_IAMPolicyRego(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tf": ` +resource "aws_sqs_queue_policy" "bad_example" { + queue_url = aws_sqs_queue.q.id + + policy = < 0 { + include = r1.LongID() + } else { + exclude = r1.LongID() + } + assert.Equal(t, test.expectedResults, len(results.GetFailed())) + if include != "" { + testutil.AssertRuleFound(t, include, results, "false negative found") + } + if exclude != "" { + testutil.AssertRuleNotFound(t, exclude, results, "false positive found") + } + }) + } +} diff --git a/test/deterministic_test.go b/test/deterministic_test.go new file mode 100644 index 000000000000..4c6a56b471d8 --- /dev/null +++ b/test/deterministic_test.go @@ -0,0 +1,51 @@ +package test + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/rules" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" +) + +func Test_DeterministicResults(t *testing.T) { + + reg := rules.Register(badRule) + defer rules.Deregister(reg) + + fs := testutil.CreateFS(t, map[string]string{ + "first.tf": ` +resource "problem" "uhoh" { + bad = true + for_each = other.thing +} + `, + "second.tf": ` +resource "other" "thing" { + for_each = local.list +} + `, + "third.tf": ` +locals { + list = { + a = 1, + b = 2, + } +} + `, + }) + + for i := 0; i < 100; i++ { + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), ".") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + require.Len(t, results.GetFailed(), 2) + } +} diff --git a/test/docker_test.go b/test/docker_test.go new file mode 100644 index 000000000000..2c7f3a93d761 --- /dev/null +++ b/test/docker_test.go @@ -0,0 +1,138 @@ +package test + +import ( + "context" + "fmt" + "os" + "testing" + + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/pkg/scanners/dockerfile" +) + +// func addFilesToMemFS(memfs *memoryfs.FS, typePolicy bool, folderName string) error { +// base := filepath.Base(folderName) +// if err := memfs.MkdirAll(base, 0o700); err != nil { +// return err +// } +// err := filepath.Walk(filepath.FromSlash(folderName), +// func(fpath string, info os.FileInfo, err error) error { +// if err != nil { +// return err +// } +// if info.IsDir() { +// return nil +// } +// if typePolicy && !rego.IsRegoFile(info.Name()) { +// return nil +// } +// data, err := os.ReadFile(fpath) +// if err != nil { +// return err +// } +// fileName := getFileName(fpath, info, typePolicy) +// if err := memfs.WriteFile(path.Join(base, fileName), data, 0o644); err != nil { +// return err +// } +// return nil +// }) +// +// if err != nil { +// return err +// } +// return nil +//} + +// TODO: Evaluate usefulness of this test +// func Test_Docker_RegoPoliciesFromDisk(t *testing.T) { +// t.Parallel() +// +// entries, err := os.ReadDir("./testdata/dockerfile") +// require.NoError(t, err) +// +// policiesPath, err := filepath.Abs("../rules") +// require.NoError(t, err) +// scanner := dockerfile.NewScanner( +// options.ScannerWithPolicyDirs(filepath.Base(policiesPath)), +// ) +// memfs := memoryfs.New() +// // add policies +// err = addFilesToMemFS(memfs, true, policiesPath) +// require.NoError(t, err) +// +// // add test data +// testDataPath, err := filepath.Abs("./testdata/dockerfile") +// require.NoError(t, err) +// err = addFilesToMemFS(memfs, false, testDataPath) +// require.NoError(t, err) +// +// results, err := scanner.ScanFS(context.TODO(), memfs, filepath.Base(testDataPath)) +// require.NoError(t, err) +// +// for _, entry := range entries { +// if !entry.IsDir() { +// continue +// } +// t.Run(entry.Name(), func(t *testing.T) { +// require.NoError(t, err) +// t.Run(entry.Name(), func(t *testing.T) { +// var matched int +// for _, result := range results { +// if result.Rule().HasID(entry.Name()) && result.Status() == scan.StatusFailed { +// if result.Description() != "Specify at least 1 USER command in Dockerfile with non-root user as argument" { +// assert.Greater(t, result.Range().GetStartLine(), 0) +// assert.Greater(t, result.Range().GetEndLine(), 0) +// } +// if !strings.HasSuffix(result.Range().GetFilename(), entry.Name()) { +// continue +// } +// matched++ +// } +// } +// assert.Equal(t, 1, matched, "Rule should be matched once") +// }) +// +// }) +// } +//} + +func Test_Docker_RegoPoliciesEmbedded(t *testing.T) { + t.Parallel() + + entries, err := os.ReadDir("./testdata/dockerfile") + require.NoError(t, err) + + scanner := dockerfile.NewScanner(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + srcFS := os.DirFS("../") + + results, err := scanner.ScanFS(context.TODO(), srcFS, "test/testdata/dockerfile") + require.NoError(t, err) + + for _, entry := range entries { + if !entry.IsDir() { + continue + } + t.Run(entry.Name(), func(t *testing.T) { + require.NoError(t, err) + t.Run(entry.Name(), func(t *testing.T) { + var matched bool + for _, result := range results { + if result.Rule().HasID(entry.Name()) && result.Status() == scan.StatusFailed { + if result.Description() != "Specify at least 1 USER command in Dockerfile with non-root user as argument" { + assert.Greater(t, result.Range().GetStartLine(), 0) + assert.Greater(t, result.Range().GetEndLine(), 0) + } + assert.Equal(t, fmt.Sprintf("test/testdata/dockerfile/%s/Dockerfile.denied", entry.Name()), result.Range().GetFilename()) + matched = true + } + } + assert.True(t, matched) + }) + + }) + } +} diff --git a/test/fs_test.go b/test/fs_test.go new file mode 100644 index 000000000000..2ce5b517027b --- /dev/null +++ b/test/fs_test.go @@ -0,0 +1,24 @@ +package test + +import ( + "context" + "os" + "testing" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/pkg/scanners/terraform" +) + +func Test_OS_FS(t *testing.T) { + s := terraform.New( + options.ScannerWithDebug(os.Stderr), + options.ScannerWithEmbeddedPolicies(true), + options.ScannerWithEmbeddedLibraries(true), + ) + results, err := s.ScanFS(context.TODO(), os.DirFS("tf"), "fail") + require.NoError(t, err) + assert.Greater(t, len(results.GetFailed()), 0) +} diff --git a/test/ignore_test.go b/test/ignore_test.go new file mode 100644 index 000000000000..a9a9c0fcfec7 --- /dev/null +++ b/test/ignore_test.go @@ -0,0 +1,529 @@ +package test + +import ( + "fmt" + "strings" + "testing" + + "github.com/aquasecurity/defsec/pkg/providers" + "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/stretchr/testify/assert" +) + +var exampleRule = scan.Rule{ + Provider: providers.AWSProvider, + Service: "service", + ShortCode: "abc123", + AVDID: "AWS-ABC-123", + Aliases: []string{"aws-other-abc123"}, + Severity: severity.High, + CustomChecks: scan.CustomChecks{ + Terraform: &scan.TerraformCustomCheck{ + RequiredLabels: []string{"bad"}, + Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { + attr := resourceBlock.GetAttribute("secure") + if attr.IsNil() { + results.Add("example problem", resourceBlock) + } + if attr.IsFalse() { + results.Add("example problem", attr) + } + return + }, + }, + }, +} + +func Test_IgnoreAll(t *testing.T) { + + var testCases = []struct { + name string + inputOptions string + assertLength int + }{ + {name: "IgnoreAll", inputOptions: ` +resource "bad" "my-rule" { + secure = false // tfsec:ignore:* +} +`, assertLength: 0}, + {name: "IgnoreLineAboveTheBlock", inputOptions: ` +// tfsec:ignore:* +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 0}, + {name: "IgnoreLineAboveTheBlockMatchingParamBool", inputOptions: ` +// tfsec:ignore:*[secure=false] +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 0}, + {name: "IgnoreLineAboveTheBlockNotMatchingParamBool", inputOptions: ` +// tfsec:ignore:*[secure=true] +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 1}, + {name: "IgnoreLineAboveTheBlockMatchingParamString", inputOptions: ` +// tfsec:ignore:*[name=myrule] +resource "bad" "my-rule" { + name = "myrule" + secure = false +} +`, assertLength: 0}, + {name: "IgnoreLineAboveTheBlockNotMatchingParamString", inputOptions: ` +// tfsec:ignore:*[name=myrule2] +resource "bad" "my-rule" { + name = "myrule" + secure = false +} +`, assertLength: 1}, + {name: "IgnoreLineAboveTheBlockMatchingParamInt", inputOptions: ` +// tfsec:ignore:*[port=123] +resource "bad" "my-rule" { + secure = false + port = 123 +} +`, assertLength: 0}, + {name: "IgnoreLineAboveTheBlockNotMatchingParamInt", inputOptions: ` +// tfsec:ignore:*[port=456] +resource "bad" "my-rule" { + secure = false + port = 123 +} +`, assertLength: 1}, + {name: "IgnoreLineStackedAboveTheBlock", inputOptions: ` +// tfsec:ignore:* +// tfsec:ignore:a +// tfsec:ignore:b +// tfsec:ignore:c +// tfsec:ignore:d +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 0}, + {name: "IgnoreLineStackedAboveTheBlockWithoutMatch", inputOptions: ` +#tfsec:ignore:* + +#tfsec:ignore:x +#tfsec:ignore:a +#tfsec:ignore:b +#tfsec:ignore:c +#tfsec:ignore:d +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 1}, + {name: "IgnoreLineStackedAboveTheBlockWithHashesWithoutSpaces", inputOptions: ` +#tfsec:ignore:* +#tfsec:ignore:a +#tfsec:ignore:b +#tfsec:ignore:c +#tfsec:ignore:d +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 0}, + {name: "IgnoreLineStackedAboveTheBlockWithoutSpaces", inputOptions: ` +//tfsec:ignore:* +//tfsec:ignore:a +//tfsec:ignore:b +//tfsec:ignore:c +//tfsec:ignore:d +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 0}, + {name: "IgnoreLineAboveTheLine", inputOptions: ` +resource "bad" "my-rule" { + # tfsec:ignore:aws-service-abc123 + secure = false +} +`, assertLength: 0}, + {name: "IgnoreWithExpDateIfDateBreachedThenDontIgnore", inputOptions: ` +resource "bad" "my-rule" { + secure = false # tfsec:ignore:aws-service-abc123:exp:2000-01-02 +} +`, assertLength: 1}, + {name: "IgnoreWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +resource "bad" "my-rule" { + secure = false # tfsec:ignore:aws-service-abc123:exp:2221-01-02 +} +`, assertLength: 0}, + {name: "IgnoreWithExpDateIfDateInvalidThenDropTheIgnore", inputOptions: ` +resource "bad" "my-rule" { + secure = false # tfsec:ignore:aws-service-abc123:exp:2221-13-02 +} +`, assertLength: 1}, + {name: "IgnoreAboveResourceBlockWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +#tfsec:ignore:aws-service-abc123:exp:2221-01-02 +resource "bad" "my-rule" { +} +`, assertLength: 0}, + {name: "IgnoreAboveResourceBlockWithExpDateAndMultipleIgnoresIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +# tfsec:ignore:aws-service-abc123:exp:2221-01-02 +resource "bad" "my-rule" { + +} +`, assertLength: 0}, + {name: "IgnoreForImpliedIAMResource", inputOptions: ` +terraform { +required_version = "~> 1.1.6" + +required_providers { +aws = { +source = "hashicorp/aws" +version = "~> 3.48" +} +} +} + +# Retrieve an IAM group defined outside of this Terraform config. + +# tfsec:ignore:aws-iam-enforce-mfa +data "aws_iam_group" "externally_defined_group" { +group_name = "group-name" # tfsec:ignore:aws-iam-enforce-mfa +} + +# Create an IAM policy and attach it to the group. + +# tfsec:ignore:aws-iam-enforce-mfa +resource "aws_iam_policy" "test_policy" { +name = "test-policy" # tfsec:ignore:aws-iam-enforce-mfa +policy = data.aws_iam_policy_document.test_policy.json # tfsec:ignore:aws-iam-enforce-mfa +} + +# tfsec:ignore:aws-iam-enforce-mfa +resource "aws_iam_group_policy_attachment" "test_policy_attachment" { +group = data.aws_iam_group.externally_defined_group.group_name # tfsec:ignore:aws-iam-enforce-mfa +policy_arn = aws_iam_policy.test_policy.arn # tfsec:ignore:aws-iam-enforce-mfa +} + +# tfsec:ignore:aws-iam-enforce-mfa +data "aws_iam_policy_document" "test_policy" { +statement { +sid = "PublishToCloudWatch" # tfsec:ignore:aws-iam-enforce-mfa +actions = [ +"cloudwatch:PutMetricData", # tfsec:ignore:aws-iam-enforce-mfa +] +resources = ["*"] # tfsec:ignore:aws-iam-enforce-mfa +} +} +`, assertLength: 0}, + {name: "TrivyIgnoreAll", inputOptions: ` +resource "bad" "my-rule" { + secure = false // trivy:ignore:* +} +`, assertLength: 0}, + {name: "TrivyIgnoreLineAboveTheBlock", inputOptions: ` +// trivy:ignore:* +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 0}, + {name: "TrivyIgnoreLineAboveTheBlockMatchingParamBool", inputOptions: ` +// trivy:ignore:*[secure=false] +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 0}, + {name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamBool", inputOptions: ` +// trivy:ignore:*[secure=true] +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 1}, + {name: "TrivyIgnoreLineAboveTheBlockMatchingParamString", inputOptions: ` +// trivy:ignore:*[name=myrule] +resource "bad" "my-rule" { + name = "myrule" + secure = false +} +`, assertLength: 0}, + {name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamString", inputOptions: ` +// trivy:ignore:*[name=myrule2] +resource "bad" "my-rule" { + name = "myrule" + secure = false +} +`, assertLength: 1}, + {name: "TrivyIgnoreLineAboveTheBlockMatchingParamInt", inputOptions: ` +// trivy:ignore:*[port=123] +resource "bad" "my-rule" { + secure = false + port = 123 +} +`, assertLength: 0}, + {name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamInt", inputOptions: ` +// trivy:ignore:*[port=456] +resource "bad" "my-rule" { + secure = false + port = 123 +} +`, assertLength: 1}, + {name: "TrivyIgnoreLineStackedAboveTheBlock", inputOptions: ` +// trivy:ignore:* +// trivy:ignore:a +// trivy:ignore:b +// trivy:ignore:c +// trivy:ignore:d +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 0}, + {name: "TrivyIgnoreLineStackedAboveTheBlockWithoutMatch", inputOptions: ` +#trivy:ignore:* + +#trivy:ignore:x +#trivy:ignore:a +#trivy:ignore:b +#trivy:ignore:c +#trivy:ignore:d +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 1}, + {name: "TrivyIgnoreLineStackedAboveTheBlockWithHashesWithoutSpaces", inputOptions: ` +#trivy:ignore:* +#trivy:ignore:a +#trivy:ignore:b +#trivy:ignore:c +#trivy:ignore:d +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 0}, + {name: "TrivyIgnoreLineStackedAboveTheBlockWithoutSpaces", inputOptions: ` +//trivy:ignore:* +//trivy:ignore:a +//trivy:ignore:b +//trivy:ignore:c +//trivy:ignore:d +resource "bad" "my-rule" { + secure = false +} +`, assertLength: 0}, + {name: "TrivyIgnoreLineAboveTheLine", inputOptions: ` +resource "bad" "my-rule" { + # trivy:ignore:aws-service-abc123 + secure = false +} +`, assertLength: 0}, + {name: "TrivyIgnoreWithExpDateIfDateBreachedThenDontIgnore", inputOptions: ` +resource "bad" "my-rule" { + secure = false # trivy:ignore:aws-service-abc123:exp:2000-01-02 +} +`, assertLength: 1}, + {name: "TrivyIgnoreWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +resource "bad" "my-rule" { + secure = false # trivy:ignore:aws-service-abc123:exp:2221-01-02 +} +`, assertLength: 0}, + {name: "TrivyIgnoreWithExpDateIfDateInvalidThenDropTheIgnore", inputOptions: ` +resource "bad" "my-rule" { + secure = false # trivy:ignore:aws-service-abc123:exp:2221-13-02 +} +`, assertLength: 1}, + {name: "TrivyIgnoreAboveResourceBlockWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +#trivy:ignore:aws-service-abc123:exp:2221-01-02 +resource "bad" "my-rule" { +} +`, assertLength: 0}, + {name: "TrivyIgnoreAboveResourceBlockWithExpDateAndMultipleIgnoresIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +# trivy:ignore:aws-service-abc123:exp:2221-01-02 +resource "bad" "my-rule" { + +} +`, assertLength: 0}, + {name: "TrivyIgnoreForImpliedIAMResource", inputOptions: ` +terraform { +required_version = "~> 1.1.6" + +required_providers { +aws = { +source = "hashicorp/aws" +version = "~> 3.48" +} +} +} + +# Retrieve an IAM group defined outside of this Terraform config. + +# trivy:ignore:aws-iam-enforce-mfa +data "aws_iam_group" "externally_defined_group" { +group_name = "group-name" # trivy:ignore:aws-iam-enforce-mfa +} + +# Create an IAM policy and attach it to the group. + +# trivy:ignore:aws-iam-enforce-mfa +resource "aws_iam_policy" "test_policy" { +name = "test-policy" # trivy:ignore:aws-iam-enforce-mfa +policy = data.aws_iam_policy_document.test_policy.json # trivy:ignore:aws-iam-enforce-mfa +} + +# trivy:ignore:aws-iam-enforce-mfa +resource "aws_iam_group_policy_attachment" "test_policy_attachment" { +group = data.aws_iam_group.externally_defined_group.group_name # trivy:ignore:aws-iam-enforce-mfa +policy_arn = aws_iam_policy.test_policy.arn # trivy:ignore:aws-iam-enforce-mfa +} + +# trivy:ignore:aws-iam-enforce-mfa +data "aws_iam_policy_document" "test_policy" { +statement { +sid = "PublishToCloudWatch" # trivy:ignore:aws-iam-enforce-mfa +actions = [ +"cloudwatch:PutMetricData", # trivy:ignore:aws-iam-enforce-mfa +] +resources = ["*"] # trivy:ignore:aws-iam-enforce-mfa +} +} +`, assertLength: 0}} + + reg := rules.Register(exampleRule) + defer rules.Deregister(reg) + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + results := scanHCL(t, tc.inputOptions) + assert.Len(t, results.GetFailed(), tc.assertLength) + }) + } +} + +func Test_IgnoreIgnoreWithExpiryAndWorkspaceAndWorkspaceSupplied(t *testing.T) { + reg := rules.Register(exampleRule) + defer rules.Deregister(reg) + + results := scanHCLWithWorkspace(t, ` +# tfsec:ignore:aws-service-abc123:exp:2221-01-02:ws:testworkspace +resource "bad" "my-rule" { +} +`, "testworkspace") + assert.Len(t, results.GetFailed(), 0) +} + +func Test_IgnoreInline(t *testing.T) { + reg := rules.Register(exampleRule) + defer rules.Deregister(reg) + + results := scanHCL(t, fmt.Sprintf(` + resource "bad" "sample" { + secure = false # tfsec:ignore:%s + } + `, exampleRule.LongID())) + assert.Len(t, results.GetFailed(), 0) +} + +func Test_IgnoreIgnoreWithExpiryAndWorkspaceButWrongWorkspaceSupplied(t *testing.T) { + reg := rules.Register(exampleRule) + defer rules.Deregister(reg) + + results := scanHCLWithWorkspace(t, ` +# tfsec:ignore:aws-service-abc123:exp:2221-01-02:ws:otherworkspace +resource "bad" "my-rule" { + +} +`, "testworkspace") + assert.Len(t, results.GetFailed(), 1) +} + +func Test_IgnoreWithAliasCodeStillIgnored(t *testing.T) { + reg := rules.Register(exampleRule) + defer rules.Deregister(reg) + + results := scanHCLWithWorkspace(t, ` +# tfsec:ignore:aws-other-abc123 +resource "bad" "my-rule" { + +} +`, "testworkspace") + assert.Len(t, results.GetFailed(), 0) +} + +func Test_TrivyIgnoreIgnoreWithExpiryAndWorkspaceAndWorkspaceSupplied(t *testing.T) { + reg := rules.Register(exampleRule) + defer rules.Deregister(reg) + + results := scanHCLWithWorkspace(t, ` +# trivy:ignore:aws-service-abc123:exp:2221-01-02:ws:testworkspace +resource "bad" "my-rule" { +} +`, "testworkspace") + assert.Len(t, results.GetFailed(), 0) +} + +func Test_TrivyIgnoreIgnoreWithExpiryAndWorkspaceButWrongWorkspaceSupplied(t *testing.T) { + reg := rules.Register(exampleRule) + defer rules.Deregister(reg) + + results := scanHCLWithWorkspace(t, ` +# trivy:ignore:aws-service-abc123:exp:2221-01-02:ws:otherworkspace +resource "bad" "my-rule" { + +} +`, "testworkspace") + assert.Len(t, results.GetFailed(), 1) +} + +func Test_TrivyIgnoreWithAliasCodeStillIgnored(t *testing.T) { + reg := rules.Register(exampleRule) + defer rules.Deregister(reg) + + results := scanHCLWithWorkspace(t, ` +# trivy:ignore:aws-other-abc123 +resource "bad" "my-rule" { + +} +`, "testworkspace") + assert.Len(t, results.GetFailed(), 0) +} + +func Test_TrivyIgnoreInline(t *testing.T) { + reg := rules.Register(exampleRule) + defer rules.Deregister(reg) + + results := scanHCL(t, fmt.Sprintf(` + resource "bad" "sample" { + secure = false # trivy:ignore:%s + } + `, exampleRule.LongID())) + assert.Len(t, results.GetFailed(), 0) +} + +func Test_IgnoreInlineByAVDID(t *testing.T) { + testCases := []struct { + input string + }{ + { + input: ` + resource "bad" "sample" { + secure = false # tfsec:ignore:%s + } + `, + }, + { + input: ` + resource "bad" "sample" { + secure = false # trivy:ignore:%s + } + `, + }, + } + + for _, tc := range testCases { + tc := tc + for _, id := range []string{exampleRule.AVDID, strings.ToLower(exampleRule.AVDID), exampleRule.ShortCode, exampleRule.LongID()} { + id := id + t.Run("", func(t *testing.T) { + reg := rules.Register(exampleRule) + defer rules.Deregister(reg) + results := scanHCL(t, fmt.Sprintf(tc.input, id)) + assert.Len(t, results.GetFailed(), 0) + }) + } + } +} diff --git a/test/json_test.go b/test/json_test.go new file mode 100644 index 000000000000..5f7f31446fa0 --- /dev/null +++ b/test/json_test.go @@ -0,0 +1,104 @@ +package test + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers" + "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func TestScanningJSON(t *testing.T) { + + var tests = []struct { + name string + source string + shouldFail bool + }{ + { + name: "check results are picked up in tf json configs", + source: ` + { + "provider": { + "aws": { + "profile": null, + "region": "eu-west-1" + } + }, + "resource": { + "bad": { + "thing": { + "type": "ingress", + "cidr_blocks": ["0.0.0.0/0"], + "description": "testing" + } + } + } + }`, + shouldFail: true, + }, + { + name: "check attributes are checked in tf json configs", + source: ` + { + "provider": { + "aws": { + "profile": null, + "region": "eu-west-1" + } + }, + "resource": { + "bad": { + "or_not": { + "secure": true + } + } + } + }`, + shouldFail: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + r1 := scan.Rule{ + Provider: providers.AWSProvider, + Service: "service", + ShortCode: "abc123", + Severity: severity.High, + CustomChecks: scan.CustomChecks{ + Terraform: &scan.TerraformCustomCheck{ + RequiredLabels: []string{"bad"}, + Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { + if resourceBlock.GetAttribute("secure").IsTrue() { + return + } + results.Add("something", resourceBlock) + return + }, + }, + }, + } + reg := rules.Register(r1) + defer rules.Deregister(reg) + + results := scanJSON(t, test.source) + var include, exclude string + if test.shouldFail { + include = r1.LongID() + } else { + exclude = r1.LongID() + } + if include != "" { + testutil.AssertRuleFound(t, include, results, "false negative found") + } + if exclude != "" { + testutil.AssertRuleNotFound(t, exclude, results, "false positive found") + } + }) + } +} diff --git a/test/kubernetes_test.go b/test/kubernetes_test.go new file mode 100644 index 000000000000..0987f0866d14 --- /dev/null +++ b/test/kubernetes_test.go @@ -0,0 +1,131 @@ +package test + +import ( + "context" + "fmt" + "os" + "strings" + "testing" + + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/pkg/scanners/kubernetes" +) + +func Test_Kubernetes_RegoPoliciesFromDisk(t *testing.T) { + t.Parallel() + + entries, err := os.ReadDir("./testdata/kubernetes") + require.NoError(t, err) + + scanner := kubernetes.NewScanner( + options.ScannerWithPerResultTracing(true), + options.ScannerWithEmbeddedPolicies(true), + options.ScannerWithEmbeddedLibraries(true), + ) + + srcFS := os.DirFS("../") + + results, err := scanner.ScanFS(context.TODO(), srcFS, "test/testdata/kubernetes") + require.NoError(t, err) + + for _, entry := range entries { + if !entry.IsDir() { + continue + } + if entry.Name() == "optional" { + continue + } + t.Run(entry.Name(), func(t *testing.T) { + var matched bool + for _, result := range results { + if result.Rule().HasID(entry.Name()) { + + failCase := fmt.Sprintf("test/testdata/kubernetes/%s/denied.yaml", entry.Name()) + passCase := fmt.Sprintf("test/testdata/kubernetes/%s/allowed.yaml", entry.Name()) + + switch result.Range().GetFilename() { + case failCase: + assert.Equal(t, scan.StatusFailed, result.Status(), "Rule should have failed, but didn't.") + assert.Greater(t, result.Range().GetStartLine(), 0, "We should have line numbers for a failure") + assert.Greater(t, result.Range().GetEndLine(), 0, "We should have line numbers for a failure") + matched = true + case passCase: + assert.Equal(t, scan.StatusPassed, result.Status(), "Rule should have passed, but didn't.") + matched = true + default: + if strings.Contains(result.Range().GetFilename(), entry.Name()) { + t.Fatal(result.Range().GetFilename()) + } + continue + } + + if t.Failed() { + fmt.Println("Test failed - rego trace follows:") + for _, trace := range result.Traces() { + fmt.Println(trace) + } + } + } + } + assert.True(t, matched, "Neither a pass or fail result was found for %s - did you add example code for it?", entry.Name()) + }) + } +} + +func Test_Kubernetes_RegoPoliciesEmbedded(t *testing.T) { + t.Parallel() + + entries, err := os.ReadDir("./testdata/kubernetes") + require.NoError(t, err) + + scanner := kubernetes.NewScanner(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true), options.ScannerWithEmbeddedLibraries(true)) + + srcFS := os.DirFS("../") + + results, err := scanner.ScanFS(context.TODO(), srcFS, "test/testdata/kubernetes") + require.NoError(t, err) + + for _, entry := range entries { + if !entry.IsDir() { + continue + } + if entry.Name() == "optional" { + continue + } + t.Run(entry.Name(), func(t *testing.T) { + var matched bool + for _, result := range results { + if result.Rule().HasID(entry.Name()) { + + failCase := fmt.Sprintf("test/testdata/kubernetes/%s/denied.yaml", entry.Name()) + passCase := fmt.Sprintf("test/testdata/kubernetes/%s/allowed.yaml", entry.Name()) + + switch result.Range().GetFilename() { + case failCase: + assert.Equal(t, scan.StatusFailed, result.Status(), "Rule should have failed, but didn't.") + assert.Greater(t, result.Range().GetStartLine(), 0, "We should have line numbers for a failure") + assert.Greater(t, result.Range().GetEndLine(), 0, "We should have line numbers for a failure") + matched = true + case passCase: + assert.Equal(t, scan.StatusPassed, result.Status(), "Rule should have passed, but didn't.") + matched = true + default: + continue + } + + if t.Failed() { + fmt.Println("Test failed - rego trace follows:") + for _, trace := range result.Traces() { + fmt.Println(trace) + } + } + } + } + assert.True(t, matched, "Neither a pass or fail result was found for %s - did you add example code for it?", entry.Name()) + }) + } +} diff --git a/test/module_test.go b/test/module_test.go new file mode 100644 index 000000000000..e0d6fdeca05f --- /dev/null +++ b/test/module_test.go @@ -0,0 +1,632 @@ +package test + +import ( + "bytes" + "context" + "fmt" + "os" + "testing" + + "github.com/aquasecurity/defsec/pkg/providers" + "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy-policies/checks/cloud/aws/iam" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" +) + +var badRule = scan.Rule{ + Provider: providers.AWSProvider, + Service: "service", + ShortCode: "abc", + Summary: "A stupid example check for a test.", + Impact: "You will look stupid", + Resolution: "Don't do stupid stuff", + Explanation: "Bad should not be set.", + Severity: severity.High, + CustomChecks: scan.CustomChecks{ + Terraform: &scan.TerraformCustomCheck{ + RequiredTypes: []string{"resource"}, + RequiredLabels: []string{"problem"}, + Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { + if attr := resourceBlock.GetAttribute("bad"); attr.IsTrue() { + results.Add("bad", attr) + } + return + }, + }, + }, +} + +// IMPORTANT: if this test is failing, you probably need to set the version of go-cty in go.mod to the same version that hcl uses. +func Test_GoCtyCompatibilityIssue(t *testing.T) { + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "/project/main.tf": ` +data "aws_vpc" "default" { + default = true +} + +module "test" { + source = "../modules/problem/" + cidr_block = data.aws_vpc.default.cidr_block +} +`, + "/modules/problem/main.tf": ` +variable "cidr_block" {} + +variable "open" { + default = false +} + +resource "aws_security_group" "this" { + name = "Test" + + ingress { + description = "HTTPs" + from_port = 443 + to_port = 443 + protocol = "tcp" + self = ! var.open + cidr_blocks = var.open ? [var.cidr_block] : null + } +} + +resource "problem" "uhoh" { + bad = true +} +`, + }) + + debug := bytes.NewBuffer([]byte{}) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true), options.ParserWithDebug(debug)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") + if t.Failed() { + fmt.Println(debug.String()) + } +} + +func Test_ProblemInModuleInSiblingDir(t *testing.T) { + + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "/project/main.tf": ` +module "something" { + source = "../modules/problem" +} +`, + "modules/problem/main.tf": ` +resource "problem" "uhoh" { + bad = true +} +`}, + ) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") + +} + +func Test_ProblemInModuleIgnored(t *testing.T) { + + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "/project/main.tf": ` +#tfsec:ignore:aws-service-abc +module "something" { + source = "../modules/problem" +} +`, + "modules/problem/main.tf": ` +resource "problem" "uhoh" { + bad = true +} +`}, + ) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleNotFound(t, badRule.LongID(), results, "") + +} + +func Test_ProblemInModuleInSubdirectory(t *testing.T) { + + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +module "something" { + source = "./modules/problem" +} +`, + "project/modules/problem/main.tf": ` +resource "problem" "uhoh" { + bad = true +} +`}) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") + +} + +func Test_ProblemInModuleInParentDir(t *testing.T) { + + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +module "something" { + source = "../problem" +} +`, + "problem/main.tf": ` +resource "problem" "uhoh" { + bad = true +} +`}) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") + +} + +func Test_ProblemInModuleReuse(t *testing.T) { + + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +module "something_good" { + source = "../modules/problem" + bad = false +} + +module "something_bad" { + source = "../modules/problem" + bad = true +} +`, + "modules/problem/main.tf": ` +variable "bad" { + default = false +} +resource "problem" "uhoh" { + bad = var.bad +} +`}) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") + +} + +func Test_ProblemInNestedModule(t *testing.T) { + + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +module "something" { + source = "../modules/a" +} +`, + "modules/a/main.tf": ` + module "something" { + source = "../../modules/b" +} +`, + "modules/b/main.tf": ` +module "something" { + source = "../c" +} +`, + "modules/c/main.tf": ` +resource "problem" "uhoh" { + bad = true +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true), options.ParserWithDebug(os.Stderr)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") + +} + +func Test_ProblemInReusedNestedModule(t *testing.T) { + + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +module "something" { + source = "../modules/a" + bad = false +} + +module "something-bad" { + source = "../modules/a" + bad = true +} +`, + "modules/a/main.tf": ` +variable "bad" { + default = false +} +module "something" { + source = "../../modules/b" + bad = var.bad +} +`, + "modules/b/main.tf": ` +variable "bad" { + default = false +} +module "something" { + source = "../c" + bad = var.bad +} +`, + "modules/c/main.tf": ` +variable "bad" { + default = false +} +resource "problem" "uhoh" { + bad = var.bad +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") +} + +func Test_ProblemInInitialisedModule(t *testing.T) { + + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +module "something" { + source = "../modules/somewhere" + bad = false +} +`, + "modules/somewhere/main.tf": ` +module "something_nested" { + count = 1 + source = "github.com/some/module.git" + bad = true +} + +variable "bad" { + default = false +} + +`, + "project/.terraform/modules/something.something_nested/main.tf": ` +variable "bad" { + default = false +} +resource "problem" "uhoh" { + bad = var.bad +} +`, + "project/.terraform/modules/modules.json": ` + {"Modules":[ + {"Key":"something","Source":"../modules/somewhere","Version":"2.35.0","Dir":"../modules/somewhere"}, + {"Key":"something.something_nested","Source":"git::https://github.com/some/module.git","Version":"2.35.0","Dir":".terraform/modules/something.something_nested"} + ]} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") +} + +func Test_ProblemInReusedInitialisedModule(t *testing.T) { + + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +module "something" { + source = "/nowhere" + bad = false +} +module "something2" { + source = "/nowhere" + bad = true +} +`, + "project/.terraform/modules/a/main.tf": ` +variable "bad" { + default = false +} +resource "problem" "uhoh" { + bad = var.bad +} +`, + "project/.terraform/modules/modules.json": ` + {"Modules":[{"Key":"something","Source":"/nowhere","Version":"2.35.0","Dir":".terraform/modules/a"},{"Key":"something2","Source":"/nowhere","Version":"2.35.0","Dir":".terraform/modules/a"}]} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") + +} + +func Test_ProblemInDuplicateModuleNameAndPath(t *testing.T) { + registered := rules.Register(badRule) + defer rules.Deregister(registered) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +module "something" { + source = "../modules/a" + bad = 0 +} + +module "something-bad" { + source = "../modules/a" + bad = 1 +} +`, + "modules/a/main.tf": ` +variable "bad" { + default = 0 +} +module "something" { + source = "../b" + bad = var.bad +} +`, + "modules/b/main.tf": ` +variable "bad" { + default = 0 +} +module "something" { + source = "../c" + bad = var.bad +} +`, + "modules/c/main.tf": ` +variable "bad" { + default = 0 +} +resource "problem" "uhoh" { + count = var.bad + bad = true +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") + +} + +func Test_Dynamic_Variables(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +resource "something" "this" { + + dynamic "blah" { + for_each = ["a"] + + content { + ok = true + } + } +} + +resource "bad" "thing" { + secure = something.this.blah[0].ok +} +`}) + + r1 := scan.Rule{ + Provider: providers.AWSProvider, + Service: "service", + ShortCode: "abc123", + Severity: severity.High, + CustomChecks: scan.CustomChecks{ + Terraform: &scan.TerraformCustomCheck{ + RequiredLabels: []string{"bad"}, + Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { + if resourceBlock.GetAttribute("secure").IsTrue() { + return + } + results.Add("example problem", resourceBlock) + return + }, + }, + }, + } + reg := rules.Register(r1) + defer rules.Deregister(reg) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleFound(t, r1.LongID(), results, "") +} + +func Test_Dynamic_Variables_FalsePositive(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +resource "something" "else" { + x = 1 + dynamic "blah" { + for_each = toset(["true"]) + + content { + ok = each.value + } + } +} + +resource "bad" "thing" { + secure = something.else.blah.ok +} +`}) + + r1 := scan.Rule{ + Provider: providers.AWSProvider, + Service: "service", + ShortCode: "abc123", + Severity: severity.High, + CustomChecks: scan.CustomChecks{ + Terraform: &scan.TerraformCustomCheck{ + RequiredLabels: []string{"bad"}, + Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { + if resourceBlock.GetAttribute("secure").IsTrue() { + return + } + results.Add("example problem", resourceBlock) + return + }, + }, + }, + } + reg := rules.Register(r1) + defer rules.Deregister(reg) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleNotFound(t, r1.LongID(), results, "") +} + +func Test_ReferencesPassedToNestedModule(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` + +resource "aws_iam_group" "developers" { + name = "developers" +} + +module "something" { + source = "../modules/a" + group = aws_iam_group.developers.name +} +`, + "modules/a/main.tf": ` +variable "group" { + type = string +} + +resource aws_iam_group_policy mfa { + group = var.group + policy = data.aws_iam_policy_document.policy.json +} + +data "aws_iam_policy_document" "policy" { + statement { + sid = "main" + effect = "Allow" + + actions = ["s3:*"] + resources = ["*"] + condition { + test = "Bool" + variable = "aws:MultiFactorAuthPresent" + values = ["true"] + } + } +} +`}) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := executor.New().Execute(modules) + testutil.AssertRuleNotFound(t, iam.CheckEnforceGroupMFA.LongID(), results, "") + +} diff --git a/test/performance_test.go b/test/performance_test.go new file mode 100644 index 000000000000..2e8896e744da --- /dev/null +++ b/test/performance_test.go @@ -0,0 +1,59 @@ +package test + +import ( + "context" + "fmt" + "io/fs" + "testing" + + "github.com/aquasecurity/defsec/pkg/rules" + + "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" +) + +func BenchmarkCalculate(b *testing.B) { + + f, err := createBadBlocks() + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + p := parser.New(f, "", parser.OptionStopOnHCLError(true)) + if err := p.ParseFS(context.TODO(), "project"); err != nil { + b.Fatal(err) + } + modules, _, err := p.EvaluateAll(context.TODO()) + if err != nil { + b.Fatal(err) + } + _, _, _ = executor.New().Execute(modules) + } +} + +func createBadBlocks() (fs.FS, error) { + + files := make(map[string]string) + + files["/project/main.tf"] = ` +module "something" { + source = "../modules/problem" +} +` + + for _, rule := range rules.GetRegistered() { + if rule.GetRule().Terraform == nil { + continue + } + for i, bad := range rule.GetRule().Terraform.BadExamples { + filename := fmt.Sprintf("/modules/problem/%s-%d.tf", rule.GetRule().LongID(), i) + files[filename] = bad + } + } + + f := testutil.CreateFS(&testing.T{}, files) + return f, nil +} diff --git a/test/rules_test.go b/test/rules_test.go new file mode 100644 index 000000000000..4772d803e0eb --- /dev/null +++ b/test/rules_test.go @@ -0,0 +1,41 @@ +package test + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/rules" +) + +func TestAVDIDs(t *testing.T) { + existing := make(map[string]struct{}) + for _, rule := range rules.GetRegistered(framework.ALL) { + t.Run(rule.LongID(), func(t *testing.T) { + if rule.GetRule().AVDID == "" { + t.Errorf("Rule has no AVD ID: %#v", rule) + return + } + if _, ok := existing[rule.GetRule().AVDID]; ok { + t.Errorf("Rule detected with duplicate AVD ID: %s", rule.GetRule().AVDID) + } + }) + existing[rule.GetRule().AVDID] = struct{}{} + } +} + +//func TestRulesAgainstExampleCode(t *testing.T) { +// for _, rule := range rules.GetRegistered(framework.ALL) { +// testName := fmt.Sprintf("%s/%s", rule.GetRule().AVDID, rule.LongID()) +// t.Run(testName, func(t *testing.T) { +// rule := rule +// t.Parallel() +// +// t.Run("avd docs", func(t *testing.T) { +// provider := strings.ToLower(rule.GetRule().Provider.ConstName()) +// service := strings.ToLower(strings.ReplaceAll(rule.GetRule().Service, "-", "")) +// _, err := os.Stat(filepath.Join("..", "avd_docs", provider, service, rule.GetRule().AVDID, "docs.md")) +// require.NoError(t, err) +// }) +// }) +// } +//} diff --git a/test/setup_test.go b/test/setup_test.go new file mode 100644 index 000000000000..ec79f26a5398 --- /dev/null +++ b/test/setup_test.go @@ -0,0 +1,59 @@ +package test + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy-iac/test/testutil" + tfScanner "github.com/aquasecurity/trivy/pkg/scanners/terraform" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" +) + +func createModulesFromSource(t *testing.T, source string, ext string) terraform.Modules { + fs := testutil.CreateFS(t, map[string]string{ + "source" + ext: source, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + if err := p.ParseFS(context.TODO(), "."); err != nil { + t.Fatal(err) + } + modules, _, err := p.EvaluateAll(context.TODO()) + if err != nil { + t.Fatalf("parse error: %s", err) + } + return modules +} + +func scanHCLWithWorkspace(t *testing.T, source string, workspace string) scan.Results { + return scanHCL(t, source, tfScanner.ScannerWithWorkspaceName(workspace)) +} + +func scanHCL(t *testing.T, source string, opts ...options.ScannerOption) scan.Results { + + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": source, + }) + + localScanner := tfScanner.New(append(opts, options.ScannerWithEmbeddedPolicies(false))...) + results, err := localScanner.ScanFS(context.TODO(), fs, ".") + require.NoError(t, err) + return results +} + +func scanJSON(t *testing.T, source string) scan.Results { + + fs := testutil.CreateFS(t, map[string]string{ + "main.tf.json": source, + }) + + s := tfScanner.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + results, _, err := s.ScanFSWithMetrics(context.TODO(), fs, ".") + require.NoError(t, err) + return results +} diff --git a/test/testdata/dockerfile/DS001/Dockerfile.allowed b/test/testdata/dockerfile/DS001/Dockerfile.allowed new file mode 100644 index 000000000000..ee5c6cc930dc --- /dev/null +++ b/test/testdata/dockerfile/DS001/Dockerfile.allowed @@ -0,0 +1,3 @@ +FROM debian:9 +RUN apt-get update && apt-get -y install vim && apt-get clean +USER foo diff --git a/test/testdata/dockerfile/DS001/Dockerfile.denied b/test/testdata/dockerfile/DS001/Dockerfile.denied new file mode 100644 index 000000000000..5e2b193a0d4e --- /dev/null +++ b/test/testdata/dockerfile/DS001/Dockerfile.denied @@ -0,0 +1,3 @@ +FROM debian:latest +RUN apt-get update && apt-get -y install vim && apt-get clean +USER foo diff --git a/test/testdata/dockerfile/DS002/Dockerfile.allowed b/test/testdata/dockerfile/DS002/Dockerfile.allowed new file mode 100644 index 000000000000..8bb3de30ba3b --- /dev/null +++ b/test/testdata/dockerfile/DS002/Dockerfile.allowed @@ -0,0 +1,3 @@ +FROM debian:9 +RUN apt-get update && apt-get -y install vim && apt-get clean +USER foo \ No newline at end of file diff --git a/test/testdata/dockerfile/DS002/Dockerfile.denied b/test/testdata/dockerfile/DS002/Dockerfile.denied new file mode 100644 index 000000000000..9b996cc7b47d --- /dev/null +++ b/test/testdata/dockerfile/DS002/Dockerfile.denied @@ -0,0 +1,2 @@ +FROM debian:9 +RUN apt-get update && apt-get -y install vim && apt-get clean diff --git a/test/testdata/dockerfile/DS004/Dockerfile.allowed b/test/testdata/dockerfile/DS004/Dockerfile.allowed new file mode 100644 index 000000000000..8af97be727f6 --- /dev/null +++ b/test/testdata/dockerfile/DS004/Dockerfile.allowed @@ -0,0 +1,3 @@ +FROM alpine:3.13 +USER mike +EXPOSE 8080 diff --git a/test/testdata/dockerfile/DS004/Dockerfile.denied b/test/testdata/dockerfile/DS004/Dockerfile.denied new file mode 100644 index 000000000000..91016100d36f --- /dev/null +++ b/test/testdata/dockerfile/DS004/Dockerfile.denied @@ -0,0 +1,3 @@ +FROM alpine:3.13 +USER mike +EXPOSE 22 \ No newline at end of file diff --git a/test/testdata/dockerfile/DS005/Dockerfile.allowed b/test/testdata/dockerfile/DS005/Dockerfile.allowed new file mode 100644 index 000000000000..28d89b4361f6 --- /dev/null +++ b/test/testdata/dockerfile/DS005/Dockerfile.allowed @@ -0,0 +1,3 @@ +FROM alpine:3.13 +USER mike +ADD "/target/resources.tar.gz" "resources" diff --git a/test/testdata/dockerfile/DS005/Dockerfile.denied b/test/testdata/dockerfile/DS005/Dockerfile.denied new file mode 100644 index 000000000000..98c1249f9f34 --- /dev/null +++ b/test/testdata/dockerfile/DS005/Dockerfile.denied @@ -0,0 +1,4 @@ +FROM alpine:3.13 +USER mike +ADD "/target/resources.tar.gz" "resources.jar" +ADD "/target/app.jar" "app.jar" \ No newline at end of file diff --git a/test/testdata/dockerfile/DS006/Dockerfile.allowed b/test/testdata/dockerfile/DS006/Dockerfile.allowed new file mode 100644 index 000000000000..529198acf3b7 --- /dev/null +++ b/test/testdata/dockerfile/DS006/Dockerfile.allowed @@ -0,0 +1,6 @@ +FROM golang:1.7.3 as dep +COPY /binary / + +FROM alpine:3.13 +USER mike +ENTRYPOINT [ "/opt/app/run.sh --port 8080" ] \ No newline at end of file diff --git a/test/testdata/dockerfile/DS006/Dockerfile.denied b/test/testdata/dockerfile/DS006/Dockerfile.denied new file mode 100644 index 000000000000..cdb11213d551 --- /dev/null +++ b/test/testdata/dockerfile/DS006/Dockerfile.denied @@ -0,0 +1,6 @@ +FROM golang:1.7.3 as dep +COPY --from=dep /binary / + +FROM alpine:3.13 +USER mike +ENTRYPOINT [ "/opt/app/run.sh --port 8080" ] \ No newline at end of file diff --git a/test/testdata/dockerfile/DS007/Dockerfile.allowed b/test/testdata/dockerfile/DS007/Dockerfile.allowed new file mode 100644 index 000000000000..37b3bb398312 --- /dev/null +++ b/test/testdata/dockerfile/DS007/Dockerfile.allowed @@ -0,0 +1,6 @@ +FROM golang:1.7.3 as dep +COPY /binary / + +FROM alpine:3.13 +USER mike +ENTRYPOINT [ "/opt/app/run.sh --port 8080" ] \ No newline at end of file diff --git a/test/testdata/dockerfile/DS007/Dockerfile.denied b/test/testdata/dockerfile/DS007/Dockerfile.denied new file mode 100644 index 000000000000..228966f1f1ad --- /dev/null +++ b/test/testdata/dockerfile/DS007/Dockerfile.denied @@ -0,0 +1,8 @@ +FROM golang:1.7.3 as dep +COPY dep /binary / +ENTRYPOINT [ "/opt/app/run.sh --port 8080" ] +ENTRYPOINT [ "/opt/app/run.sh --port 8080" ] + +FROM alpine:3.13 +USER mike +ENTRYPOINT [ "/opt/app/run.sh --port 8080" ] \ No newline at end of file diff --git a/test/testdata/dockerfile/DS008/Dockerfile.allowed b/test/testdata/dockerfile/DS008/Dockerfile.allowed new file mode 100644 index 000000000000..f66bb31d8ef7 --- /dev/null +++ b/test/testdata/dockerfile/DS008/Dockerfile.allowed @@ -0,0 +1,3 @@ +FROM alpine:3.13 +USER mike +EXPOSE 65530 8080 diff --git a/test/testdata/dockerfile/DS008/Dockerfile.denied b/test/testdata/dockerfile/DS008/Dockerfile.denied new file mode 100644 index 000000000000..89c465a66d44 --- /dev/null +++ b/test/testdata/dockerfile/DS008/Dockerfile.denied @@ -0,0 +1,3 @@ +FROM alpine:3.13 +USER mike +EXPOSE 65536 8080 diff --git a/test/testdata/dockerfile/DS009/Dockerfile.allowed b/test/testdata/dockerfile/DS009/Dockerfile.allowed new file mode 100644 index 000000000000..1db32e18327e --- /dev/null +++ b/test/testdata/dockerfile/DS009/Dockerfile.allowed @@ -0,0 +1,3 @@ +FROM alpine:3.13 +USER mike +WORKDIR /path/to/workdir diff --git a/test/testdata/dockerfile/DS009/Dockerfile.denied b/test/testdata/dockerfile/DS009/Dockerfile.denied new file mode 100644 index 000000000000..422d65f083b7 --- /dev/null +++ b/test/testdata/dockerfile/DS009/Dockerfile.denied @@ -0,0 +1,3 @@ +FROM alpine:3.13 +USER mike +WORKDIR path/to/workdir diff --git a/test/testdata/dockerfile/DS010/Dockerfile.allowed b/test/testdata/dockerfile/DS010/Dockerfile.allowed new file mode 100644 index 000000000000..67232624f130 --- /dev/null +++ b/test/testdata/dockerfile/DS010/Dockerfile.allowed @@ -0,0 +1,3 @@ +FROM alpine:3.13 +RUN pip install --upgrade pip +USER mike diff --git a/test/testdata/dockerfile/DS010/Dockerfile.denied b/test/testdata/dockerfile/DS010/Dockerfile.denied new file mode 100644 index 000000000000..cd63e40e132c --- /dev/null +++ b/test/testdata/dockerfile/DS010/Dockerfile.denied @@ -0,0 +1,3 @@ +FROM alpine:3.13 +RUN sudo pip install --upgrade pip +USER mike diff --git a/test/testdata/dockerfile/DS011/Dockerfile.allowed b/test/testdata/dockerfile/DS011/Dockerfile.allowed new file mode 100644 index 000000000000..c5d7133a7b7e --- /dev/null +++ b/test/testdata/dockerfile/DS011/Dockerfile.allowed @@ -0,0 +1,3 @@ +FROM alpine:3.13 +USER mike +COPY ["package.json", "yarn.lock", "myapp/"] diff --git a/test/testdata/dockerfile/DS011/Dockerfile.denied b/test/testdata/dockerfile/DS011/Dockerfile.denied new file mode 100644 index 000000000000..72df0188ffb9 --- /dev/null +++ b/test/testdata/dockerfile/DS011/Dockerfile.denied @@ -0,0 +1,3 @@ +FROM alpine:3.13 +USER mike +COPY ["package.json", "yarn.lock", "myapp"] diff --git a/test/testdata/dockerfile/DS012/Dockerfile.allowed b/test/testdata/dockerfile/DS012/Dockerfile.allowed new file mode 100644 index 000000000000..a3eeb0f4318a --- /dev/null +++ b/test/testdata/dockerfile/DS012/Dockerfile.allowed @@ -0,0 +1,10 @@ +FROM baseImage:1.1 +RUN test + +FROM debian:jesse2 as build2 +USER mike +RUN stuff + +FROM debian:jesse1 as build1 +USER mike +RUN more_stuff \ No newline at end of file diff --git a/test/testdata/dockerfile/DS012/Dockerfile.denied b/test/testdata/dockerfile/DS012/Dockerfile.denied new file mode 100644 index 000000000000..86e7882d3ada --- /dev/null +++ b/test/testdata/dockerfile/DS012/Dockerfile.denied @@ -0,0 +1,10 @@ +FROM baseImage:1.1 +RUN test + +FROM debian:jesse2 as build +USER mike +RUN stuff + +FROM debian:jesse1 as build +USER mike +RUN more_stuff \ No newline at end of file diff --git a/test/testdata/dockerfile/DS013/Dockerfile.allowed b/test/testdata/dockerfile/DS013/Dockerfile.allowed new file mode 100644 index 000000000000..c14262268770 --- /dev/null +++ b/test/testdata/dockerfile/DS013/Dockerfile.allowed @@ -0,0 +1,4 @@ +FROM nginx:2.2 +WORKDIR /usr/share/nginx/html +USER mike +CMD cd /usr/share/nginx/html && sed -e s/Docker/\"$AUTHOR\"/ Hello_docker.html > index.html ; nginx -g 'daemon off;' \ No newline at end of file diff --git a/test/testdata/dockerfile/DS013/Dockerfile.denied b/test/testdata/dockerfile/DS013/Dockerfile.denied new file mode 100644 index 000000000000..e5a769aadc86 --- /dev/null +++ b/test/testdata/dockerfile/DS013/Dockerfile.denied @@ -0,0 +1,4 @@ +FROM nginx:2.2 +RUN cd /usr/share/nginx/html +USER mike +CMD cd /usr/share/nginx/html && sed -e s/Docker/\"$AUTHOR\"/ Hello_docker.html > index.html ; nginx -g 'daemon off;' \ No newline at end of file diff --git a/test/testdata/dockerfile/DS014/Dockerfile.allowed b/test/testdata/dockerfile/DS014/Dockerfile.allowed new file mode 100644 index 000000000000..b46d24c9f879 --- /dev/null +++ b/test/testdata/dockerfile/DS014/Dockerfile.allowed @@ -0,0 +1,7 @@ +FROM debian:stable-20210621 +RUN curl http://bing.com +RUN curl http://google.com + +FROM baseimage:1.0 +USER mike +RUN curl http://bing.com diff --git a/test/testdata/dockerfile/DS014/Dockerfile.denied b/test/testdata/dockerfile/DS014/Dockerfile.denied new file mode 100644 index 000000000000..c5ec6eff395a --- /dev/null +++ b/test/testdata/dockerfile/DS014/Dockerfile.denied @@ -0,0 +1,7 @@ +FROM debian:stable-20210621 +RUN wget http://bing.com +RUN curl http://google.com + +FROM baseimage:1.0 +USER mike +RUN curl http://bing.com diff --git a/test/testdata/dockerfile/DS015/Dockerfile.allowed b/test/testdata/dockerfile/DS015/Dockerfile.allowed new file mode 100644 index 000000000000..5ab6a65688ef --- /dev/null +++ b/test/testdata/dockerfile/DS015/Dockerfile.allowed @@ -0,0 +1,5 @@ +FROM alpine:3.5 +RUN yum install && yum clean all +RUN pip install --no-cache-dir -r /usr/src/app/requirements.txt +USER mike +CMD python /usr/src/app/app.py \ No newline at end of file diff --git a/test/testdata/dockerfile/DS015/Dockerfile.denied b/test/testdata/dockerfile/DS015/Dockerfile.denied new file mode 100644 index 000000000000..e1ba5704d7d5 --- /dev/null +++ b/test/testdata/dockerfile/DS015/Dockerfile.denied @@ -0,0 +1,5 @@ +FROM alpine:3.5 +RUN yum install vim +RUN pip install --no-cache-dir -r /usr/src/app/requirements.txt +USER mike +CMD python /usr/src/app/app.py \ No newline at end of file diff --git a/test/testdata/dockerfile/DS016/Dockerfile.allowed b/test/testdata/dockerfile/DS016/Dockerfile.allowed new file mode 100644 index 000000000000..46f07fda1436 --- /dev/null +++ b/test/testdata/dockerfile/DS016/Dockerfile.allowed @@ -0,0 +1,5 @@ +FROM golang:1.7.3 +USER mike +CMD ./apps +FROM alpine:3.13 +CMD ./app diff --git a/test/testdata/dockerfile/DS016/Dockerfile.denied b/test/testdata/dockerfile/DS016/Dockerfile.denied new file mode 100644 index 000000000000..e861f0a0d284 --- /dev/null +++ b/test/testdata/dockerfile/DS016/Dockerfile.denied @@ -0,0 +1,6 @@ +FROM golang:1.7.3 +USER mike +CMD ./app +CMD ./apps +FROM alpine:3.13 +CMD ./app diff --git a/test/testdata/dockerfile/DS017/Dockerfile.allowed b/test/testdata/dockerfile/DS017/Dockerfile.allowed new file mode 100644 index 000000000000..d92984d79ae8 --- /dev/null +++ b/test/testdata/dockerfile/DS017/Dockerfile.allowed @@ -0,0 +1,4 @@ +FROM ubuntu:18.04 +RUN apt-get update && apt-get install -y --no-install-recommends mysql-client && rm -rf /var/lib/apt/lists/* && apt-get clean +USER mike +ENTRYPOINT mysql \ No newline at end of file diff --git a/test/testdata/dockerfile/DS017/Dockerfile.denied b/test/testdata/dockerfile/DS017/Dockerfile.denied new file mode 100644 index 000000000000..e9bf2a9abd1e --- /dev/null +++ b/test/testdata/dockerfile/DS017/Dockerfile.denied @@ -0,0 +1,5 @@ +FROM ubuntu:18.04 +RUN apt-get update +RUN apt-get install -y --no-install-recommends mysql-client && rm -rf /var/lib/apt/lists/* && apt-get clean +USER mike +ENTRYPOINT mysql \ No newline at end of file diff --git a/test/testdata/dockerfile/DS019/Dockerfile.allowed b/test/testdata/dockerfile/DS019/Dockerfile.allowed new file mode 100644 index 000000000000..0b97504659ec --- /dev/null +++ b/test/testdata/dockerfile/DS019/Dockerfile.allowed @@ -0,0 +1,5 @@ +FROM fedora:27 +USER mike +RUN set -uex && dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo && sed -i 's/\\$releasever/26/g' /etc/yum.repos.d/docker-ce.repo && dnf install -vy docker-ce && dnf clean all +HEALTHCHECK CMD curl --fail http://localhost:3000 || exit 1 + diff --git a/test/testdata/dockerfile/DS019/Dockerfile.denied b/test/testdata/dockerfile/DS019/Dockerfile.denied new file mode 100644 index 000000000000..47c2c25fd495 --- /dev/null +++ b/test/testdata/dockerfile/DS019/Dockerfile.denied @@ -0,0 +1,4 @@ +FROM fedora:27 +USER mike +RUN set -uex && dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo && sed -i 's/\\$releasever/26/g' /etc/yum.repos.d/docker-ce.repo && dnf install -vy docker-ce +HEALTHCHECK CMD curl --fail http://localhost:3000 || exit 1 diff --git a/test/testdata/dockerfile/DS020/Dockerfile.allowed b/test/testdata/dockerfile/DS020/Dockerfile.allowed new file mode 100644 index 000000000000..b76d238a9c8d --- /dev/null +++ b/test/testdata/dockerfile/DS020/Dockerfile.allowed @@ -0,0 +1,5 @@ +FROM alpine:3.5 +RUN zypper install bash && zypper clean +RUN pip install --no-cache-dir -r /usr/src/app/requirements.txt +USER mike +CMD python /usr/src/app/app.py \ No newline at end of file diff --git a/test/testdata/dockerfile/DS020/Dockerfile.denied b/test/testdata/dockerfile/DS020/Dockerfile.denied new file mode 100644 index 000000000000..22235094173c --- /dev/null +++ b/test/testdata/dockerfile/DS020/Dockerfile.denied @@ -0,0 +1,5 @@ +FROM alpine:3.5 +RUN zypper install bash +RUN pip install --no-cache-dir -r /usr/src/app/requirements.txt +USER mike +CMD python /usr/src/app/app.py \ No newline at end of file diff --git a/test/testdata/dockerfile/DS021/Dockerfile.allowed b/test/testdata/dockerfile/DS021/Dockerfile.allowed new file mode 100644 index 000000000000..84d2c55941bd --- /dev/null +++ b/test/testdata/dockerfile/DS021/Dockerfile.allowed @@ -0,0 +1,3 @@ +FROM node:12 +USER mike +RUN apt-get -fmy install apt-utils && apt-get clean \ No newline at end of file diff --git a/test/testdata/dockerfile/DS021/Dockerfile.denied b/test/testdata/dockerfile/DS021/Dockerfile.denied new file mode 100644 index 000000000000..988e111d5d04 --- /dev/null +++ b/test/testdata/dockerfile/DS021/Dockerfile.denied @@ -0,0 +1,3 @@ +FROM node:12 +USER mike +RUN apt-get install apt-utils && apt-get clean \ No newline at end of file diff --git a/test/testdata/dockerfile/DS022/Dockerfile.allowed b/test/testdata/dockerfile/DS022/Dockerfile.allowed new file mode 100644 index 000000000000..eaa7e488d692 --- /dev/null +++ b/test/testdata/dockerfile/DS022/Dockerfile.allowed @@ -0,0 +1,2 @@ +FROM busybox:1.33.1 +USER mike \ No newline at end of file diff --git a/test/testdata/dockerfile/DS022/Dockerfile.denied b/test/testdata/dockerfile/DS022/Dockerfile.denied new file mode 100644 index 000000000000..aebd38f065da --- /dev/null +++ b/test/testdata/dockerfile/DS022/Dockerfile.denied @@ -0,0 +1,3 @@ +FROM busybox:1.33.1 +USER mike +MAINTAINER Lukas Martinelli \ No newline at end of file diff --git a/test/testdata/dockerfile/DS023/Dockerfile.allowed b/test/testdata/dockerfile/DS023/Dockerfile.allowed new file mode 100644 index 000000000000..29c48f20c700 --- /dev/null +++ b/test/testdata/dockerfile/DS023/Dockerfile.allowed @@ -0,0 +1,7 @@ +FROM busybox:1.33.1 +HEALTHCHECK CMD /bin/healthcheck + +FROM alpine:3.13 +HEALTHCHECK CMD /bin/healthcheck +USER mike +CMD ./app diff --git a/test/testdata/dockerfile/DS023/Dockerfile.denied b/test/testdata/dockerfile/DS023/Dockerfile.denied new file mode 100644 index 000000000000..6dc49ab5f151 --- /dev/null +++ b/test/testdata/dockerfile/DS023/Dockerfile.denied @@ -0,0 +1,8 @@ +FROM busybox:1.33.1 +HEALTHCHECK CMD curl http://localhost:8080 +HEALTHCHECK CMD /bin/healthcheck + +FROM alpine:3.13 +HEALTHCHECK CMD /bin/healthcheck +USER mike +CMD ./app diff --git a/test/testdata/dockerfile/DS024/Dockerfile.allowed b/test/testdata/dockerfile/DS024/Dockerfile.allowed new file mode 100644 index 000000000000..b551287049e7 --- /dev/null +++ b/test/testdata/dockerfile/DS024/Dockerfile.allowed @@ -0,0 +1,4 @@ +FROM debian:9.13 +RUN apt-get update && apt-get install -y curl && apt-get clean +USER mike +CMD python /usr/src/app/app.py diff --git a/test/testdata/dockerfile/DS024/Dockerfile.denied b/test/testdata/dockerfile/DS024/Dockerfile.denied new file mode 100644 index 000000000000..7bc3ae8975a3 --- /dev/null +++ b/test/testdata/dockerfile/DS024/Dockerfile.denied @@ -0,0 +1,4 @@ +FROM debian:9.13 +RUN apt-get update && apt-get dist-upgrade && apt-get -y install curl && apt-get clean +USER mike +CMD python /usr/src/app/app.py diff --git a/test/testdata/kubernetes/KSV001/allowed.yaml b/test/testdata/kubernetes/KSV001/allowed.yaml new file mode 100644 index 000000000000..f40d17d24580 --- /dev/null +++ b/test/testdata/kubernetes/KSV001/allowed.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + allowPrivilegeEscalation: false diff --git a/test/testdata/kubernetes/KSV001/denied.yaml b/test/testdata/kubernetes/KSV001/denied.yaml new file mode 100644 index 000000000000..3622b1bfbcd2 --- /dev/null +++ b/test/testdata/kubernetes/KSV001/denied.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + capabilities: + drop: + - all diff --git a/test/testdata/kubernetes/KSV002/allowed.yaml b/test/testdata/kubernetes/KSV002/allowed.yaml new file mode 100644 index 000000000000..c98da678b4bb --- /dev/null +++ b/test/testdata/kubernetes/KSV002/allowed.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + annotations: + container.apparmor.security.beta.kubernetes.io/hello: runtime/default + name: hello-apparmor +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello AppArmor!' && sleep 1h + image: busybox + name: hello \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV002/denied.yaml b/test/testdata/kubernetes/KSV002/denied.yaml new file mode 100644 index 000000000000..a127b4b47b9d --- /dev/null +++ b/test/testdata/kubernetes/KSV002/denied.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + annotations: + container.apparmor.security.beta.kubernetes.io/hello: custom + name: hello-apparmor +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello AppArmor!' && sleep 1h + image: busybox + name: hello \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV003/allowed.yaml b/test/testdata/kubernetes/KSV003/allowed.yaml new file mode 100644 index 000000000000..3622b1bfbcd2 --- /dev/null +++ b/test/testdata/kubernetes/KSV003/allowed.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + capabilities: + drop: + - all diff --git a/test/testdata/kubernetes/KSV003/denied.yaml b/test/testdata/kubernetes/KSV003/denied.yaml new file mode 100644 index 000000000000..07754a354ca3 --- /dev/null +++ b/test/testdata/kubernetes/KSV003/denied.yaml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV005/allowed.yaml b/test/testdata/kubernetes/KSV005/allowed.yaml new file mode 100644 index 000000000000..ff08b26f90bc --- /dev/null +++ b/test/testdata/kubernetes/KSV005/allowed.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-sys-admin-capabilities +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV005/denied.yaml b/test/testdata/kubernetes/KSV005/denied.yaml new file mode 100644 index 000000000000..c34e9fad024a --- /dev/null +++ b/test/testdata/kubernetes/KSV005/denied.yaml @@ -0,0 +1,17 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-sys-admin-capabilities +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + securityContext: + capabilities: + add: + - SYS_ADMIN diff --git a/test/testdata/kubernetes/KSV006/allowed.yaml b/test/testdata/kubernetes/KSV006/allowed.yaml new file mode 100644 index 000000000000..04f1710d7c51 --- /dev/null +++ b/test/testdata/kubernetes/KSV006/allowed.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-docker-socket +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + volumes: + - name: test-volume diff --git a/test/testdata/kubernetes/KSV006/denied.yaml b/test/testdata/kubernetes/KSV006/denied.yaml new file mode 100644 index 000000000000..d7335ac91d56 --- /dev/null +++ b/test/testdata/kubernetes/KSV006/denied.yaml @@ -0,0 +1,18 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-docker-socket +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + volumes: + - name: test-volume + hostPath: + path: "/var/run/docker.sock" + type: Directory diff --git a/test/testdata/kubernetes/KSV008/allowed.yaml b/test/testdata/kubernetes/KSV008/allowed.yaml new file mode 100644 index 000000000000..6dd4513d063a --- /dev/null +++ b/test/testdata/kubernetes/KSV008/allowed.yaml @@ -0,0 +1,14 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-ipc +spec: + hostIPC: false + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV008/denied.yaml b/test/testdata/kubernetes/KSV008/denied.yaml new file mode 100644 index 000000000000..826f58a65485 --- /dev/null +++ b/test/testdata/kubernetes/KSV008/denied.yaml @@ -0,0 +1,14 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-ipc +spec: + hostIPC: true + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV009/allowed.yaml b/test/testdata/kubernetes/KSV009/allowed.yaml new file mode 100644 index 000000000000..61d615b1a8b0 --- /dev/null +++ b/test/testdata/kubernetes/KSV009/allowed.yaml @@ -0,0 +1,14 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-host-network +spec: + hostNetwork: false + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV009/denied.yaml b/test/testdata/kubernetes/KSV009/denied.yaml new file mode 100644 index 000000000000..2b862ca596ef --- /dev/null +++ b/test/testdata/kubernetes/KSV009/denied.yaml @@ -0,0 +1,14 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-host-network +spec: + hostNetwork: true + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV010/allowed.yaml b/test/testdata/kubernetes/KSV010/allowed.yaml new file mode 100644 index 000000000000..b215b5c7faf3 --- /dev/null +++ b/test/testdata/kubernetes/KSV010/allowed.yaml @@ -0,0 +1,14 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-host-network +spec: + hostPID: false + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV010/denied.yaml b/test/testdata/kubernetes/KSV010/denied.yaml new file mode 100644 index 000000000000..69acff1a2d92 --- /dev/null +++ b/test/testdata/kubernetes/KSV010/denied.yaml @@ -0,0 +1,14 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-host-network +spec: + hostPID: true + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV011/allowed.yaml b/test/testdata/kubernetes/KSV011/allowed.yaml new file mode 100644 index 000000000000..f271ed677908 --- /dev/null +++ b/test/testdata/kubernetes/KSV011/allowed.yaml @@ -0,0 +1,16 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + resources: + limits: + cpu: 500m diff --git a/test/testdata/kubernetes/KSV011/denied.yaml b/test/testdata/kubernetes/KSV011/denied.yaml new file mode 100644 index 000000000000..71287dea0a04 --- /dev/null +++ b/test/testdata/kubernetes/KSV011/denied.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV012/allowed.yaml b/test/testdata/kubernetes/KSV012/allowed.yaml new file mode 100644 index 000000000000..0811a40e50a0 --- /dev/null +++ b/test/testdata/kubernetes/KSV012/allowed.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + runAsNonRoot: true diff --git a/test/testdata/kubernetes/KSV012/denied.yaml b/test/testdata/kubernetes/KSV012/denied.yaml new file mode 100644 index 000000000000..07754a354ca3 --- /dev/null +++ b/test/testdata/kubernetes/KSV012/denied.yaml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV013/allowed.yaml b/test/testdata/kubernetes/KSV013/allowed.yaml new file mode 100644 index 000000000000..f46dae03dfa3 --- /dev/null +++ b/test/testdata/kubernetes/KSV013/allowed.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-tag +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox:1.33.1 + name: hello diff --git a/test/testdata/kubernetes/KSV013/denied.yaml b/test/testdata/kubernetes/KSV013/denied.yaml new file mode 100644 index 000000000000..d6fd19396048 --- /dev/null +++ b/test/testdata/kubernetes/KSV013/denied.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-tag +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox:latest + name: hello diff --git a/test/testdata/kubernetes/KSV014/allowed.yaml b/test/testdata/kubernetes/KSV014/allowed.yaml new file mode 100644 index 000000000000..0ff96a444328 --- /dev/null +++ b/test/testdata/kubernetes/KSV014/allowed.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-fs-not-readonly +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + securityContext: + readOnlyRootFilesystem: true diff --git a/test/testdata/kubernetes/KSV014/denied.yaml b/test/testdata/kubernetes/KSV014/denied.yaml new file mode 100644 index 000000000000..c15b769f4c5e --- /dev/null +++ b/test/testdata/kubernetes/KSV014/denied.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-fs-not-readonly +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + securityContext: + readOnlyRootFilesystem: false diff --git a/test/testdata/kubernetes/KSV015/allowed.yaml b/test/testdata/kubernetes/KSV015/allowed.yaml new file mode 100644 index 000000000000..fd55236361b1 --- /dev/null +++ b/test/testdata/kubernetes/KSV015/allowed.yaml @@ -0,0 +1,16 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + resources: + requests: + cpu: 250m diff --git a/test/testdata/kubernetes/KSV015/denied.yaml b/test/testdata/kubernetes/KSV015/denied.yaml new file mode 100644 index 000000000000..71287dea0a04 --- /dev/null +++ b/test/testdata/kubernetes/KSV015/denied.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV016/allowed.yaml b/test/testdata/kubernetes/KSV016/allowed.yaml new file mode 100644 index 000000000000..c43f990f1ab8 --- /dev/null +++ b/test/testdata/kubernetes/KSV016/allowed.yaml @@ -0,0 +1,16 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + resources: + requests: + memory: 64Mi diff --git a/test/testdata/kubernetes/KSV016/denied.yaml b/test/testdata/kubernetes/KSV016/denied.yaml new file mode 100644 index 000000000000..71287dea0a04 --- /dev/null +++ b/test/testdata/kubernetes/KSV016/denied.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV017/allowed.yaml b/test/testdata/kubernetes/KSV017/allowed.yaml new file mode 100644 index 000000000000..b608e5c78113 --- /dev/null +++ b/test/testdata/kubernetes/KSV017/allowed.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-privileged +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV017/denied.yaml b/test/testdata/kubernetes/KSV017/denied.yaml new file mode 100644 index 000000000000..620f6497f9aa --- /dev/null +++ b/test/testdata/kubernetes/KSV017/denied.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-privileged +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + securityContext: + privileged: true diff --git a/test/testdata/kubernetes/KSV018/allowed.yaml b/test/testdata/kubernetes/KSV018/allowed.yaml new file mode 100644 index 000000000000..eb00e56e4c7e --- /dev/null +++ b/test/testdata/kubernetes/KSV018/allowed.yaml @@ -0,0 +1,16 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + resources: + limits: + memory: 128Mi diff --git a/test/testdata/kubernetes/KSV018/denied.yaml b/test/testdata/kubernetes/KSV018/denied.yaml new file mode 100644 index 000000000000..6bf001e3c075 --- /dev/null +++ b/test/testdata/kubernetes/KSV018/denied.yaml @@ -0,0 +1,14 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + diff --git a/test/testdata/kubernetes/KSV020/allowed.yaml b/test/testdata/kubernetes/KSV020/allowed.yaml new file mode 100644 index 000000000000..36f7916bbea0 --- /dev/null +++ b/test/testdata/kubernetes/KSV020/allowed.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-gid +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + securityContext: + runAsUser: 10004 diff --git a/test/testdata/kubernetes/KSV020/denied.yaml b/test/testdata/kubernetes/KSV020/denied.yaml new file mode 100644 index 000000000000..e9dbef332273 --- /dev/null +++ b/test/testdata/kubernetes/KSV020/denied.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-gid +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV021/allowed.yaml b/test/testdata/kubernetes/KSV021/allowed.yaml new file mode 100644 index 000000000000..f176cb07ce5c --- /dev/null +++ b/test/testdata/kubernetes/KSV021/allowed.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-gid +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + securityContext: + runAsGroup: 10004 diff --git a/test/testdata/kubernetes/KSV021/denied.yaml b/test/testdata/kubernetes/KSV021/denied.yaml new file mode 100644 index 000000000000..e9dbef332273 --- /dev/null +++ b/test/testdata/kubernetes/KSV021/denied.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-gid +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV022/allowed.yaml b/test/testdata/kubernetes/KSV022/allowed.yaml new file mode 100644 index 000000000000..1e4b014e5ece --- /dev/null +++ b/test/testdata/kubernetes/KSV022/allowed.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-add-capabilities +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV022/denied.yaml b/test/testdata/kubernetes/KSV022/denied.yaml new file mode 100644 index 000000000000..3e5b7aec50a7 --- /dev/null +++ b/test/testdata/kubernetes/KSV022/denied.yaml @@ -0,0 +1,17 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-add-capabilities +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + securityContext: + capabilities: + add: + - NET_BIND_SERVICE \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV023/allowed.yaml b/test/testdata/kubernetes/KSV023/allowed.yaml new file mode 100644 index 000000000000..8c19827425fb --- /dev/null +++ b/test/testdata/kubernetes/KSV023/allowed.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-host-path +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV023/denied.yaml b/test/testdata/kubernetes/KSV023/denied.yaml new file mode 100644 index 000000000000..da474eb987d0 --- /dev/null +++ b/test/testdata/kubernetes/KSV023/denied.yaml @@ -0,0 +1,17 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-host-path +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + volumes: + - hostPath: + path: "/sys" + type: '' diff --git a/test/testdata/kubernetes/KSV024/allowed.yaml b/test/testdata/kubernetes/KSV024/allowed.yaml new file mode 100644 index 000000000000..24b1c9757c12 --- /dev/null +++ b/test/testdata/kubernetes/KSV024/allowed.yaml @@ -0,0 +1,13 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-host-ports +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV024/denied.yaml b/test/testdata/kubernetes/KSV024/denied.yaml new file mode 100644 index 000000000000..f23d66ed4817 --- /dev/null +++ b/test/testdata/kubernetes/KSV024/denied.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-host-ports +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + ports: + - hostPort: 8080 \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV025/allowed.yaml b/test/testdata/kubernetes/KSV025/allowed.yaml new file mode 100644 index 000000000000..508ad7b2ec51 --- /dev/null +++ b/test/testdata/kubernetes/KSV025/allowed.yaml @@ -0,0 +1,14 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-selinux +spec: + securityContext: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV025/denied.yaml b/test/testdata/kubernetes/KSV025/denied.yaml new file mode 100644 index 000000000000..9fbaa41da184 --- /dev/null +++ b/test/testdata/kubernetes/KSV025/denied.yaml @@ -0,0 +1,16 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-selinux +spec: + securityContext: + seLinuxOptions: + type: custom + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV026/allowed.yaml b/test/testdata/kubernetes/KSV026/allowed.yaml new file mode 100644 index 000000000000..9ff2d7bcfdb5 --- /dev/null +++ b/test/testdata/kubernetes/KSV026/allowed.yaml @@ -0,0 +1,17 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-sysctls +spec: + securityContext: + sysctls: + - name: kernel.shm_rmid_forced + value: '0' + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV026/denied.yaml b/test/testdata/kubernetes/KSV026/denied.yaml new file mode 100644 index 000000000000..69eed5d69b03 --- /dev/null +++ b/test/testdata/kubernetes/KSV026/denied.yaml @@ -0,0 +1,19 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-sysctls +spec: + securityContext: + sysctls: + - name: net.core.somaxconn + value: '1024' + - name: kernel.msgmax + value: '65536' + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello diff --git a/test/testdata/kubernetes/KSV027/allowed.yaml b/test/testdata/kubernetes/KSV027/allowed.yaml new file mode 100644 index 000000000000..40b8c24aff06 --- /dev/null +++ b/test/testdata/kubernetes/KSV027/allowed.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-proc-mount +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + ports: + - hostPort: 8080 diff --git a/test/testdata/kubernetes/KSV027/denied.yaml b/test/testdata/kubernetes/KSV027/denied.yaml new file mode 100644 index 000000000000..40354e4e8427 --- /dev/null +++ b/test/testdata/kubernetes/KSV027/denied.yaml @@ -0,0 +1,17 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-proc-mount +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + ports: + - hostPort: 8080 + securityContext: + procMount: Unmasked diff --git a/test/testdata/kubernetes/KSV028/allowed.yaml b/test/testdata/kubernetes/KSV028/allowed.yaml new file mode 100644 index 000000000000..a2f93da0dde0 --- /dev/null +++ b/test/testdata/kubernetes/KSV028/allowed.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-volume-types +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + volumes: + - name: volume-a diff --git a/test/testdata/kubernetes/KSV028/denied.yaml b/test/testdata/kubernetes/KSV028/denied.yaml new file mode 100644 index 000000000000..57fc35cfc7a5 --- /dev/null +++ b/test/testdata/kubernetes/KSV028/denied.yaml @@ -0,0 +1,24 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-volume-types +spec: + containers: + - command: + - sh + - "-c" + - echo 'Hello' && sleep 1h + image: busybox + name: hello + volumes: + - name: volume-a + scaleIO: + gateway: https://localhost:443/api + system: scaleio + protectionDomain: sd0 + storagePool: sp1 + volumeName: vol-a + secretRef: + name: sio-secret + fsType: xfs diff --git a/test/testdata/kubernetes/KSV030/allowed.yaml b/test/testdata/kubernetes/KSV030/allowed.yaml new file mode 100644 index 000000000000..48b8c1d4ed3a --- /dev/null +++ b/test/testdata/kubernetes/KSV030/allowed.yaml @@ -0,0 +1,14 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + seccompProfile: + type: RuntimeDefault + localhostProfile: profiles/audit.json \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV030/denied.yaml b/test/testdata/kubernetes/KSV030/denied.yaml new file mode 100644 index 000000000000..45b3bd316652 --- /dev/null +++ b/test/testdata/kubernetes/KSV030/denied.yaml @@ -0,0 +1,14 @@ +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + seccompProfile: + type: LocalPort + localhostProfile: profiles/audit.json \ No newline at end of file diff --git a/test/testdata/kubernetes/KSV036/allowed.yaml b/test/testdata/kubernetes/KSV036/allowed.yaml new file mode 100644 index 000000000000..42a9ded8b589 --- /dev/null +++ b/test/testdata/kubernetes/KSV036/allowed.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: Pod +metadata: + name: mypod + namespace: test + labels: + name: mypod +spec: + containers: + - name: mypod + image: nginx + diff --git a/test/testdata/kubernetes/KSV036/denied.yaml b/test/testdata/kubernetes/KSV036/denied.yaml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/testdata/kubernetes/KSV037/allowed.yaml b/test/testdata/kubernetes/KSV037/allowed.yaml new file mode 100644 index 000000000000..99c22f0afff9 --- /dev/null +++ b/test/testdata/kubernetes/KSV037/allowed.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Pod +metadata: + name: mypod + namespace: test + labels: + name: mypod +spec: + automountServiceAccountToken: true + containers: + - name: mypod + image: nginx + diff --git a/test/testdata/kubernetes/KSV037/denied.yaml b/test/testdata/kubernetes/KSV037/denied.yaml new file mode 100644 index 000000000000..c42d41e0349c --- /dev/null +++ b/test/testdata/kubernetes/KSV037/denied.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: Pod +metadata: + name: mypod + namespace: kube-system + labels: + name: mypod +spec: + containers: + - name: mypod + image: nginx + diff --git a/test/testdata/kubernetes/KSV038/allowed.yaml b/test/testdata/kubernetes/KSV038/allowed.yaml new file mode 100644 index 000000000000..ccdac794b92d --- /dev/null +++ b/test/testdata/kubernetes/KSV038/allowed.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: NetworkPolicy +metadata: + name: hello-cpu-limit +spec: + podSelector: + matchLabels: + role: db diff --git a/test/testdata/kubernetes/KSV038/denied.yaml b/test/testdata/kubernetes/KSV038/denied.yaml new file mode 100644 index 000000000000..ed554daccad6 --- /dev/null +++ b/test/testdata/kubernetes/KSV038/denied.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: NetworkPolicy +metadata: + name: hello-cpu-limit +spec: + something: true diff --git a/test/testdata/kubernetes/KSV102/allowed.yaml b/test/testdata/kubernetes/KSV102/allowed.yaml new file mode 100644 index 000000000000..3b6b9f49d5ba --- /dev/null +++ b/test/testdata/kubernetes/KSV102/allowed.yaml @@ -0,0 +1,24 @@ +--- +apiVersion: apps/v1beta2 +kind: Deployment +metadata: + name: Onga +spec: + template: + spec: + containers: + - name: carts-db + image: mongo + securityContext: + runAsNonRoot: true + allowPrivilegeEscalation: true + initContainers: + - name: init-svc + image: busybox:1.28 + securityContext: + allowPrivilegeEscalation: false + metadata: + name: None + labels: + app: example + tier: backend diff --git a/test/testdata/kubernetes/KSV102/denied.yaml b/test/testdata/kubernetes/KSV102/denied.yaml new file mode 100644 index 000000000000..c760bc6880a2 --- /dev/null +++ b/test/testdata/kubernetes/KSV102/denied.yaml @@ -0,0 +1,19 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mongo-deployment +spec: + template: + spec: + containers: + - name: carts-db + image: tiller + securityContext: + runAsNonRoot: true + allowPrivilegeEscalation: true + initContainers: + - name: init-svc + image: busybox:1.28 + securityContext: + allowPrivilegeEscalation: false diff --git a/test/testdata/kubernetes/optional/KSV004/allowed.yaml b/test/testdata/kubernetes/optional/KSV004/allowed.yaml new file mode 100644 index 000000000000..3622b1bfbcd2 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV004/allowed.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + capabilities: + drop: + - all diff --git a/test/testdata/kubernetes/optional/KSV004/denied.yaml b/test/testdata/kubernetes/optional/KSV004/denied.yaml new file mode 100644 index 000000000000..dc02a2664512 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV004/denied.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + capabilities: diff --git a/test/testdata/kubernetes/optional/KSV007/allowed.yaml b/test/testdata/kubernetes/optional/KSV007/allowed.yaml new file mode 100644 index 000000000000..86b256077c21 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV007/allowed.yaml @@ -0,0 +1,5 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: diff --git a/test/testdata/kubernetes/optional/KSV007/denied.yaml b/test/testdata/kubernetes/optional/KSV007/denied.yaml new file mode 100644 index 000000000000..a9480234d151 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV007/denied.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + hostAliases: + - ip: "127.0.0.1" + hostnames: + - "foo.local" + - "bar.local" diff --git a/test/testdata/kubernetes/optional/KSV032/allowed.yaml b/test/testdata/kubernetes/optional/KSV032/allowed.yaml new file mode 100644 index 000000000000..5809dcb0d328 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV032/allowed.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - name: hello + image: azurecr.io/something diff --git a/test/testdata/kubernetes/optional/KSV032/denied.yaml b/test/testdata/kubernetes/optional/KSV032/denied.yaml new file mode 100644 index 000000000000..0d9857cad0a5 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV032/denied.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - name: hello + image: blah/something diff --git a/test/testdata/kubernetes/optional/KSV033/allowed.yaml b/test/testdata/kubernetes/optional/KSV033/allowed.yaml new file mode 100644 index 000000000000..4c8bfa5783aa --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV033/allowed.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - name: hello + image: gcr.io/something diff --git a/test/testdata/kubernetes/optional/KSV033/denied.yaml b/test/testdata/kubernetes/optional/KSV033/denied.yaml new file mode 100644 index 000000000000..0d9857cad0a5 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV033/denied.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - name: hello + image: blah/something diff --git a/test/testdata/kubernetes/optional/KSV034/allowed.yaml b/test/testdata/kubernetes/optional/KSV034/allowed.yaml new file mode 100644 index 000000000000..5809dcb0d328 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV034/allowed.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - name: hello + image: azurecr.io/something diff --git a/test/testdata/kubernetes/optional/KSV034/denied.yaml b/test/testdata/kubernetes/optional/KSV034/denied.yaml new file mode 100644 index 000000000000..b7f7eef038fd --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV034/denied.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - name: hello + image: ghcr.io/something diff --git a/test/testdata/kubernetes/optional/KSV035/allowed.yaml b/test/testdata/kubernetes/optional/KSV035/allowed.yaml new file mode 100644 index 000000000000..feaa3199c175 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV035/allowed.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - name: hello + image: ecr.us-east-2.amazonaws.com/something diff --git a/test/testdata/kubernetes/optional/KSV035/denied.yaml b/test/testdata/kubernetes/optional/KSV035/denied.yaml new file mode 100644 index 000000000000..0d9857cad0a5 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV035/denied.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - name: hello + image: blah/something diff --git a/test/testdata/kubernetes/optional/KSV039/allowed.yaml b/test/testdata/kubernetes/optional/KSV039/allowed.yaml new file mode 100644 index 000000000000..7844f5db6d64 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV039/allowed.yaml @@ -0,0 +1,35 @@ +--- +apiVersion: v1 +kind: LimitRange +metadata: + name: core-resource-limits +spec: + limits: + - type: Pod + default: + cpu: '2' + memory: 1Gi + defaultRequest: + cpu: '2' + memory: 1Gi + max: + cpu: '2' + memory: 1Gi + min: + cpu: 200m + memory: 6Mi + - type: Container + max: + cpu: '2' + memory: 1Gi + min: + cpu: 100m + memory: 4Mi + default: + cpu: 300m + memory: 200Mi + defaultRequest: + cpu: 200m + memory: 100Mi + maxLimitRequestRatio: + cpu: '10' \ No newline at end of file diff --git a/test/testdata/kubernetes/optional/KSV039/denied.yaml b/test/testdata/kubernetes/optional/KSV039/denied.yaml new file mode 100644 index 000000000000..b53d29718168 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV039/denied.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: LimitRange +metadata: + name: cpu-limit-range +spec: + limits: + - default: + cpu: 1 + defaultRequest: + cpu: 0.5 + type: Container diff --git a/test/testdata/kubernetes/optional/KSV040/allowed.yaml b/test/testdata/kubernetes/optional/KSV040/allowed.yaml new file mode 100644 index 000000000000..cf9bbf2d7870 --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV040/allowed.yaml @@ -0,0 +1,11 @@ +--- +apiVersion: v1 +kind: ResourceQuota +metadata: + name: mem-cpu-demo +spec: + hard: + requests.cpu: '1' + requests.memory: 1Gi + limits.cpu: '2' + limits.memory: 2Gi diff --git a/test/testdata/kubernetes/optional/KSV040/denied.yaml b/test/testdata/kubernetes/optional/KSV040/denied.yaml new file mode 100644 index 000000000000..b73d6e67d2ee --- /dev/null +++ b/test/testdata/kubernetes/optional/KSV040/denied.yaml @@ -0,0 +1,10 @@ +--- +apiVersion: v1 +kind: ResourceQuota +metadata: + name: mem-cpu-demo +spec: + hard: + requests.cpu: '1' + requests.memory: 1Gi + limits.cpu: '2' diff --git a/test/testutil/util.go b/test/testutil/util.go new file mode 100644 index 000000000000..ea29df78a9ae --- /dev/null +++ b/test/testutil/util.go @@ -0,0 +1,113 @@ +package testutil + +import ( + "encoding/json" + "io/fs" + "path/filepath" + "strings" + "testing" + + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/liamg/memoryfs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func AssertRuleFound(t *testing.T, ruleID string, results scan.Results, message string, args ...interface{}) { + found := ruleIDInResults(ruleID, results.GetFailed()) + assert.True(t, found, append([]interface{}{message}, args...)...) + for _, result := range results.GetFailed() { + if result.Rule().LongID() == ruleID { + m := result.Metadata() + meta := &m + for meta != nil { + assert.NotNil(t, meta.Range(), 0) + assert.Greater(t, meta.Range().GetStartLine(), 0) + assert.Greater(t, meta.Range().GetEndLine(), 0) + meta = meta.Parent() + } + } + } +} + +func AssertRuleNotFound(t *testing.T, ruleID string, results scan.Results, message string, args ...interface{}) { + found := ruleIDInResults(ruleID, results.GetFailed()) + assert.False(t, found, append([]interface{}{message}, args...)...) +} + +func ruleIDInResults(ruleID string, results scan.Results) bool { + for _, res := range results { + if res.Rule().LongID() == ruleID { + return true + } + } + return false +} + +func CreateFS(t *testing.T, files map[string]string) fs.FS { + memfs := memoryfs.New() + for name, content := range files { + name := strings.TrimPrefix(name, "/") + err := memfs.MkdirAll(filepath.Dir(name), 0o700) + require.NoError(t, err) + err = memfs.WriteFile(name, []byte(content), 0o644) + require.NoError(t, err) + } + return memfs +} + +func AssertDefsecEqual(t *testing.T, expected interface{}, actual interface{}) { + expectedJson, err := json.MarshalIndent(expected, "", "\t") + require.NoError(t, err) + actualJson, err := json.MarshalIndent(actual, "", "\t") + require.NoError(t, err) + + if expectedJson[0] == '[' { + var expectedSlice []map[string]interface{} + require.NoError(t, json.Unmarshal(expectedJson, &expectedSlice)) + var actualSlice []map[string]interface{} + require.NoError(t, json.Unmarshal(actualJson, &actualSlice)) + expectedSlice = purgeMetadataSlice(expectedSlice) + actualSlice = purgeMetadataSlice(actualSlice) + assert.Equal(t, expectedSlice, actualSlice, "defsec adapted and expected values do not match") + } else { + var expectedMap map[string]interface{} + require.NoError(t, json.Unmarshal(expectedJson, &expectedMap)) + var actualMap map[string]interface{} + require.NoError(t, json.Unmarshal(actualJson, &actualMap)) + expectedMap = purgeMetadata(expectedMap) + actualMap = purgeMetadata(actualMap) + assert.Equal(t, expectedMap, actualMap, "defsec adapted and expected values do not match") + } +} + +func purgeMetadata(input map[string]interface{}) map[string]interface{} { + for k, v := range input { + if k == "metadata" || k == "Metadata" { + delete(input, k) + continue + } + if v, ok := v.(map[string]interface{}); ok { + input[k] = purgeMetadata(v) + } + if v, ok := v.([]interface{}); ok { + if len(v) > 0 { + if _, ok := v[0].(map[string]interface{}); ok { + maps := make([]map[string]interface{}, len(v)) + for i := range v { + maps[i] = v[i].(map[string]interface{}) + } + input[k] = purgeMetadataSlice(maps) + } + } + } + } + return input +} + +func purgeMetadataSlice(input []map[string]interface{}) []map[string]interface{} { + for i := range input { + input[i] = purgeMetadata(input[i]) + } + return input +} diff --git a/test/tf/fail/main.tf b/test/tf/fail/main.tf new file mode 100644 index 000000000000..afa28405f67c --- /dev/null +++ b/test/tf/fail/main.tf @@ -0,0 +1,3 @@ +resource "aws_s3_bucket" "bad" { + +} diff --git a/test/wildcard_test.go b/test/wildcard_test.go new file mode 100644 index 000000000000..c8a0d37ce0a6 --- /dev/null +++ b/test/wildcard_test.go @@ -0,0 +1,85 @@ +package test + +import ( + "fmt" + "testing" + + "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/trivy-iac/test/testutil" +) + +func Test_WildcardMatchingOnRequiredLabels(t *testing.T) { + + tests := []struct { + input string + pattern string + expectedFailure bool + }{ + { + pattern: "aws_*", + input: `resource "aws_instance" "blah" {}`, + expectedFailure: true, + }, + { + pattern: "gcp_*", + input: `resource "aws_instance" "blah" {}`, + expectedFailure: false, + }, + { + pattern: "x_aws_*", + input: `resource "aws_instance" "blah" {}`, + expectedFailure: false, + }, + { + pattern: "aws_security_group*", + input: `resource "aws_security_group" "blah" {}`, + expectedFailure: true, + }, + { + pattern: "aws_security_group*", + input: `resource "aws_security_group_rule" "blah" {}`, + expectedFailure: true, + }, + } + + for i, test := range tests { + + code := fmt.Sprintf("wild%d", i) + + t.Run(code, func(t *testing.T) { + + rule := scan.Rule{ + Service: "service", + ShortCode: code, + Summary: "blah", + Provider: "custom", + Severity: severity.High, + CustomChecks: scan.CustomChecks{ + Terraform: &scan.TerraformCustomCheck{ + RequiredTypes: []string{"resource"}, + RequiredLabels: []string{test.pattern}, + Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { + results.Add("Custom check failed for resource.", resourceBlock) + return + }, + }, + }, + } + reg := rules.Register(rule) + defer rules.Deregister(reg) + + results := scanHCL(t, test.input) + + if test.expectedFailure { + testutil.AssertRuleFound(t, fmt.Sprintf("custom-service-%s", code), results, "") + } else { + testutil.AssertRuleNotFound(t, fmt.Sprintf("custom-service-%s", code), results, "") + } + }) + } + +} From f769b1dd418853ea2e247557acd28cf20cb12752 Mon Sep 17 00:00:00 2001 From: Simar Date: Thu, 25 Jan 2024 22:20:33 -0700 Subject: [PATCH 02/13] fix lint --- internal/adapters/arm/adapt.go | 7 ++- internal/adapters/arm/storage/adapt.go | 3 +- internal/adapters/cloudformation/adapt.go | 2 +- .../aws/accessanalyzer/accessanalyzer.go | 2 +- internal/adapters/cloudformation/aws/adapt.go | 2 +- .../aws/apigateway/apigateway.go | 2 +- .../cloudformation/aws/athena/athena.go | 2 +- .../aws/cloudfront/cloudfront.go | 2 +- .../aws/cloudtrail/cloudtrail.go | 2 +- .../aws/cloudwatch/cloudwatch.go | 2 +- .../cloudformation/aws/codebuild/codebuild.go | 2 +- .../cloudformation/aws/config/config.go | 2 +- .../aws/documentdb/documentdb.go | 2 +- .../cloudformation/aws/dynamodb/dynamodb.go | 2 +- .../adapters/cloudformation/aws/ec2/ec2.go | 2 +- .../adapters/cloudformation/aws/ec2/nacl.go | 4 +- .../adapters/cloudformation/aws/ecr/ecr.go | 2 +- .../cloudformation/aws/ecr/repository.go | 8 ++-- .../adapters/cloudformation/aws/ecs/ecs.go | 2 +- .../adapters/cloudformation/aws/efs/efs.go | 2 +- .../adapters/cloudformation/aws/eks/eks.go | 2 +- .../aws/elasticache/elasticache.go | 2 +- .../aws/elasticsearch/elasticsearch.go | 2 +- .../adapters/cloudformation/aws/elb/elb.go | 2 +- .../adapters/cloudformation/aws/iam/iam.go | 2 +- .../adapters/cloudformation/aws/iam/policy.go | 3 +- .../cloudformation/aws/kinesis/kinesis.go | 2 +- .../cloudformation/aws/lambda/lambda.go | 2 +- internal/adapters/cloudformation/aws/mq/mq.go | 2 +- .../adapters/cloudformation/aws/msk/msk.go | 2 +- .../cloudformation/aws/neptune/neptune.go | 2 +- .../adapters/cloudformation/aws/rds/rds.go | 2 +- .../cloudformation/aws/redshift/redshift.go | 2 +- .../adapters/cloudformation/aws/s3/bucket.go | 3 +- internal/adapters/cloudformation/aws/s3/s3.go | 2 +- .../cloudformation/aws/sam/function.go | 3 +- .../adapters/cloudformation/aws/sam/sam.go | 2 +- .../cloudformation/aws/sam/state_machines.go | 3 +- .../adapters/cloudformation/aws/sns/sns.go | 2 +- .../adapters/cloudformation/aws/sqs/queue.go | 8 ++-- .../adapters/cloudformation/aws/sqs/sqs.go | 2 +- .../adapters/cloudformation/aws/ssm/ssm.go | 2 +- .../aws/workspaces/workspaces.go | 2 +- .../adapters/terraform/aws/ec2/autoscaling.go | 6 +-- internal/adapters/terraform/aws/ecr/adapt.go | 3 +- .../adapters/terraform/aws/iam/convert.go | 8 ++-- .../adapters/terraform/aws/iam/passwords.go | 6 +-- .../adapters/terraform/aws/iam/policies.go | 3 +- internal/adapters/terraform/aws/s3/bucket.go | 2 +- internal/adapters/terraform/aws/sqs/adapt.go | 10 ++-- .../terraform/aws/workspaces/adapt.go | 6 +-- .../adapters/terraform/azure/compute/adapt.go | 16 +++---- .../terraform/azure/keyvault/adapt.go | 6 +-- .../adapters/terraform/azure/network/adapt.go | 8 ++-- .../adapters/terraform/azure/storage/adapt.go | 3 +- .../terraform/cloudstack/compute/adapt.go | 6 +-- .../terraform/digitalocean/compute/adapt.go | 4 +- .../terraform/digitalocean/spaces/adapt.go | 3 +- .../terraform/google/compute/instances.go | 3 +- .../terraform/google/compute/metadata.go | 3 +- .../terraform/google/compute/networks.go | 3 +- .../adapters/terraform/google/gke/adapt.go | 5 +- .../adapters/terraform/google/iam/adapt.go | 11 +++-- .../terraform/google/iam/folder_iam.go | 6 +-- .../adapters/terraform/google/iam/folders.go | 9 ++-- .../adapters/terraform/google/iam/org_iam.go | 9 ++-- .../terraform/google/iam/project_iam.go | 14 +++--- .../adapters/terraform/google/iam/projects.go | 6 +-- .../adapters/terraform/google/kms/adapt.go | 6 +-- .../adapters/terraform/google/sql/adapt.go | 6 +-- .../terraform/google/storage/adapt.go | 2 +- .../adapters/terraform/google/storage/iam.go | 8 ++-- .../adapters/terraform/kubernetes/adapt.go | 4 +- .../terraform/openstack/networking.go | 3 +- .../adapters/terraform/tftestutil/testutil.go | 5 +- pkg/detection/detect.go | 2 +- .../azure/arm/parser/armjson/decode_object.go | 2 +- .../azure/arm/parser/armjson/parse_object.go | 2 +- pkg/scanners/azure/arm/parser/parser.go | 3 +- pkg/scanners/azure/arm/parser/template.go | 2 +- pkg/scanners/azure/arm/scanner.go | 16 +++---- pkg/scanners/azure/functions/copy_index.go | 2 +- pkg/scanners/azure/functions/create_object.go | 2 +- pkg/scanners/azure/functions/intersection.go | 2 +- pkg/scanners/azure/functions/max.go | 3 +- pkg/scanners/azure/functions/min.go | 3 +- pkg/scanners/azure/functions/replace.go | 6 +-- pkg/scanners/azure/functions/split.go | 3 +- pkg/scanners/azure/functions/union.go | 8 ++-- .../cloudformation/parser/fn_builtin.go | 6 +-- pkg/scanners/cloudformation/parser/fn_ref.go | 31 ++++++------ .../cloudformation/parser/fn_split.go | 2 +- pkg/scanners/cloudformation/parser/fn_sub.go | 4 +- .../cloudformation/parser/parameter.go | 48 +++++++++---------- pkg/scanners/cloudformation/parser/parser.go | 38 +++++++-------- .../cloudformation/parser/property.go | 9 ++-- .../cloudformation/parser/property_helpers.go | 1 - .../cloudformation/parser/resource.go | 4 +- pkg/scanners/cloudformation/parser/util.go | 11 +++-- pkg/scanners/cloudformation/scanner.go | 25 +++++----- pkg/scanners/dockerfile/parser/parser.go | 12 ++--- pkg/scanners/dockerfile/scanner.go | 21 ++++---- pkg/scanners/helm/parser/parser.go | 19 ++++---- pkg/scanners/helm/parser/parser_tar.go | 3 +- pkg/scanners/helm/parser/vals.go | 4 +- pkg/scanners/helm/scanner.go | 18 +++---- pkg/scanners/json/parser/parser.go | 4 +- pkg/scanners/json/scanner.go | 16 +++---- pkg/scanners/kubernetes/parser/manifest.go | 2 +- pkg/scanners/kubernetes/parser/parser.go | 12 ++--- pkg/scanners/kubernetes/scanner.go | 5 +- pkg/scanners/terraform/executor/executor.go | 7 ++- pkg/scanners/terraform/executor/pool.go | 6 +-- pkg/scanners/terraform/executor/statistics.go | 3 +- pkg/scanners/terraform/options.go | 1 - pkg/scanners/terraform/parser/evaluator.go | 8 ++-- .../terraform/parser/funcs/collection.go | 8 ++-- pkg/scanners/terraform/parser/funcs/crypto.go | 6 +-- .../terraform/parser/funcs/datetime.go | 2 +- .../terraform/parser/funcs/defaults.go | 5 +- .../terraform/parser/funcs/encoding.go | 2 +- pkg/scanners/terraform/parser/funcs/number.go | 2 +- pkg/scanners/terraform/parser/funcs/string.go | 2 +- pkg/scanners/terraform/parser/functions.go | 3 +- pkg/scanners/terraform/parser/load_blocks.go | 5 +- pkg/scanners/terraform/parser/load_module.go | 6 +-- .../terraform/parser/load_module_metadata.go | 2 +- .../terraform/parser/module_retrieval.go | 2 +- pkg/scanners/terraform/parser/parser.go | 8 ++-- .../terraform/parser/resolvers/cache.go | 4 +- .../terraform/parser/resolvers/local.go | 2 +- .../terraform/parser/resolvers/registry.go | 1 + .../terraform/parser/resolvers/remote.go | 4 +- pkg/scanners/terraform/scanner.go | 22 ++++----- pkg/scanners/terraformplan/parser/parser.go | 14 +++--- pkg/scanners/terraformplan/scanner.go | 13 ++--- pkg/scanners/toml/parser/parser.go | 5 +- pkg/scanners/toml/scanner.go | 16 +++---- pkg/scanners/universal/scanner.go | 4 +- pkg/scanners/yaml/parser/parser.go | 7 +-- pkg/scanners/yaml/scanner.go | 16 +++---- test/testutil/util.go | 5 +- 142 files changed, 414 insertions(+), 431 deletions(-) diff --git a/internal/adapters/arm/adapt.go b/internal/adapters/arm/adapt.go index e160d6219012..c5d36af965bd 100644 --- a/internal/adapters/arm/adapt.go +++ b/internal/adapters/arm/adapt.go @@ -3,6 +3,8 @@ package arm import ( "context" + "github.com/aquasecurity/defsec/pkg/providers/azure" + "github.com/aquasecurity/defsec/pkg/state" "github.com/aquasecurity/trivy/internal/adapters/arm/appservice" "github.com/aquasecurity/trivy/internal/adapters/arm/authorization" "github.com/aquasecurity/trivy/internal/adapters/arm/compute" @@ -16,13 +18,10 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/arm/securitycenter" "github.com/aquasecurity/trivy/internal/adapters/arm/storage" "github.com/aquasecurity/trivy/internal/adapters/arm/synapse" - - "github.com/aquasecurity/defsec/pkg/providers/azure" - "github.com/aquasecurity/defsec/pkg/state" scanner "github.com/aquasecurity/trivy/pkg/scanners/azure" ) -// Adapt ... +// Adapt adapts an azure arm instance func Adapt(ctx context.Context, deployment scanner.Deployment) *state.State { return &state.State{ Azure: adaptAzure(deployment), diff --git a/internal/adapters/arm/storage/adapt.go b/internal/adapters/arm/storage/adapt.go index e81e85cc1c26..5d2387a85d3e 100644 --- a/internal/adapters/arm/storage/adapt.go +++ b/internal/adapters/arm/storage/adapt.go @@ -4,9 +4,8 @@ import ( "strings" "github.com/aquasecurity/defsec/pkg/providers/azure/storage" - "github.com/aquasecurity/trivy/pkg/scanners/azure" - "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/azure" ) func Adapt(deployment azure.Deployment) storage.Storage { diff --git a/internal/adapters/cloudformation/adapt.go b/internal/adapters/cloudformation/adapt.go index bc2fb65c5bb3..7fc5fe2c81ee 100644 --- a/internal/adapters/cloudformation/adapt.go +++ b/internal/adapters/cloudformation/adapt.go @@ -6,7 +6,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts the Cloudformation instance func Adapt(cfFile parser.FileContext) *state.State { return &state.State{ AWS: aws.Adapt(cfFile), diff --git a/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go b/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go index db59784ee91e..dac3695f1270 100644 --- a/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go +++ b/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an AccessAnalyzer instance func Adapt(cfFile parser.FileContext) accessanalyzer.AccessAnalyzer { return accessanalyzer.AccessAnalyzer{ Analyzers: getAccessAnalyzer(cfFile), diff --git a/internal/adapters/cloudformation/aws/adapt.go b/internal/adapters/cloudformation/aws/adapt.go index 4e4446ad857e..c92c0a276930 100644 --- a/internal/adapters/cloudformation/aws/adapt.go +++ b/internal/adapters/cloudformation/aws/adapt.go @@ -36,7 +36,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a Cloudformation AWS instance func Adapt(cfFile parser.FileContext) aws.AWS { return aws.AWS{ APIGateway: apigateway.Adapt(cfFile), diff --git a/internal/adapters/cloudformation/aws/apigateway/apigateway.go b/internal/adapters/cloudformation/aws/apigateway/apigateway.go index 56b82fc069f7..0004eff9a096 100644 --- a/internal/adapters/cloudformation/aws/apigateway/apigateway.go +++ b/internal/adapters/cloudformation/aws/apigateway/apigateway.go @@ -7,7 +7,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an APIGateway instance func Adapt(cfFile parser.FileContext) apigateway.APIGateway { return apigateway.APIGateway{ V1: v1.APIGateway{ diff --git a/internal/adapters/cloudformation/aws/athena/athena.go b/internal/adapters/cloudformation/aws/athena/athena.go index 7207fd65bac9..37f42512567c 100644 --- a/internal/adapters/cloudformation/aws/athena/athena.go +++ b/internal/adapters/cloudformation/aws/athena/athena.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an Athena instance func Adapt(cfFile parser.FileContext) athena.Athena { return athena.Athena{ Databases: nil, diff --git a/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go b/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go index c7b8cbc5c049..6f647ccdff7e 100644 --- a/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go +++ b/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a CloudFront instance func Adapt(cfFile parser.FileContext) cloudfront.Cloudfront { return cloudfront.Cloudfront{ Distributions: getDistributions(cfFile), diff --git a/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go b/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go index 30d6892c3dcd..7521eeaf57c9 100644 --- a/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go +++ b/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a CloudTrail instance func Adapt(cfFile parser.FileContext) cloudtrail.CloudTrail { return cloudtrail.CloudTrail{ Trails: getCloudTrails(cfFile), diff --git a/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go b/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go index 1ef5ab08b686..6ad600648a0d 100644 --- a/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go +++ b/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a Cloudwatch instance func Adapt(cfFile parser.FileContext) cloudwatch.CloudWatch { return cloudwatch.CloudWatch{ LogGroups: getLogGroups(cfFile), diff --git a/internal/adapters/cloudformation/aws/codebuild/codebuild.go b/internal/adapters/cloudformation/aws/codebuild/codebuild.go index b6a6a6753c00..7874f42292b8 100644 --- a/internal/adapters/cloudformation/aws/codebuild/codebuild.go +++ b/internal/adapters/cloudformation/aws/codebuild/codebuild.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a CodeBuild instance func Adapt(cfFile parser.FileContext) codebuild.CodeBuild { return codebuild.CodeBuild{ Projects: getProjects(cfFile), diff --git a/internal/adapters/cloudformation/aws/config/config.go b/internal/adapters/cloudformation/aws/config/config.go index 1e19585b2c88..26aecd6568ec 100644 --- a/internal/adapters/cloudformation/aws/config/config.go +++ b/internal/adapters/cloudformation/aws/config/config.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a configurationaggregator instance func Adapt(cfFile parser.FileContext) config.Config { return config.Config{ ConfigurationAggregrator: getConfigurationAggregator(cfFile), diff --git a/internal/adapters/cloudformation/aws/documentdb/documentdb.go b/internal/adapters/cloudformation/aws/documentdb/documentdb.go index e7a6ac47d85a..220131b0473a 100644 --- a/internal/adapters/cloudformation/aws/documentdb/documentdb.go +++ b/internal/adapters/cloudformation/aws/documentdb/documentdb.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adaps a documentDB instance func Adapt(cfFile parser.FileContext) documentdb.DocumentDB { return documentdb.DocumentDB{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go b/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go index 67cb9b9b264d..20cf041f6f0c 100644 --- a/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go +++ b/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a dynamodb instance func Adapt(cfFile parser.FileContext) dynamodb.DynamoDB { return dynamodb.DynamoDB{ DAXClusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/ec2/ec2.go b/internal/adapters/cloudformation/aws/ec2/ec2.go index 40173b39fded..2e01a57079a6 100644 --- a/internal/adapters/cloudformation/aws/ec2/ec2.go +++ b/internal/adapters/cloudformation/aws/ec2/ec2.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an EC2 instance func Adapt(cfFile parser.FileContext) ec2.EC2 { return ec2.EC2{ LaunchConfigurations: getLaunchConfigurations(cfFile), diff --git a/internal/adapters/cloudformation/aws/ec2/nacl.go b/internal/adapters/cloudformation/aws/ec2/nacl.go index 2bd6190f6c5e..c6add109bb7c 100644 --- a/internal/adapters/cloudformation/aws/ec2/nacl.go +++ b/internal/adapters/cloudformation/aws/ec2/nacl.go @@ -3,10 +3,8 @@ package ec2 import ( "strconv" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" - + defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) diff --git a/internal/adapters/cloudformation/aws/ecr/ecr.go b/internal/adapters/cloudformation/aws/ecr/ecr.go index 38ae46872cd1..841911d82280 100644 --- a/internal/adapters/cloudformation/aws/ecr/ecr.go +++ b/internal/adapters/cloudformation/aws/ecr/ecr.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an ECR instance func Adapt(cfFile parser.FileContext) ecr.ECR { return ecr.ECR{ Repositories: getRepositories(cfFile), diff --git a/internal/adapters/cloudformation/aws/ecr/repository.go b/internal/adapters/cloudformation/aws/ecr/repository.go index 3f51a443700b..029feab3f877 100644 --- a/internal/adapters/cloudformation/aws/ecr/repository.go +++ b/internal/adapters/cloudformation/aws/ecr/repository.go @@ -3,14 +3,12 @@ package ecr import ( "fmt" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" - - "github.com/liamg/iamgo" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) func getRepositories(ctx parser.FileContext) (repositories []ecr.Repository) { diff --git a/internal/adapters/cloudformation/aws/ecs/ecs.go b/internal/adapters/cloudformation/aws/ecs/ecs.go index 0697a15648df..350e2e0dd61d 100644 --- a/internal/adapters/cloudformation/aws/ecs/ecs.go +++ b/internal/adapters/cloudformation/aws/ecs/ecs.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an ECS instance func Adapt(cfFile parser.FileContext) ecs.ECS { return ecs.ECS{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/efs/efs.go b/internal/adapters/cloudformation/aws/efs/efs.go index 1a751374f3ba..e7b02370a646 100644 --- a/internal/adapters/cloudformation/aws/efs/efs.go +++ b/internal/adapters/cloudformation/aws/efs/efs.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an EFS instance func Adapt(cfFile parser.FileContext) efs.EFS { return efs.EFS{ FileSystems: getFileSystems(cfFile), diff --git a/internal/adapters/cloudformation/aws/eks/eks.go b/internal/adapters/cloudformation/aws/eks/eks.go index 64ea051c6a25..d1b13a865e24 100644 --- a/internal/adapters/cloudformation/aws/eks/eks.go +++ b/internal/adapters/cloudformation/aws/eks/eks.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an EKS instance func Adapt(cfFile parser.FileContext) eks.EKS { return eks.EKS{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/elasticache/elasticache.go b/internal/adapters/cloudformation/aws/elasticache/elasticache.go index ceede8215187..856c45b25667 100644 --- a/internal/adapters/cloudformation/aws/elasticache/elasticache.go +++ b/internal/adapters/cloudformation/aws/elasticache/elasticache.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an ElasticCache instance func Adapt(cfFile parser.FileContext) elasticache.ElastiCache { return elasticache.ElastiCache{ Clusters: getClusterGroups(cfFile), diff --git a/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go b/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go index 5546215a7df8..94515a5a0484 100644 --- a/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go +++ b/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an ElasticSearch instance func Adapt(cfFile parser.FileContext) elasticsearch.Elasticsearch { return elasticsearch.Elasticsearch{ Domains: getDomains(cfFile), diff --git a/internal/adapters/cloudformation/aws/elb/elb.go b/internal/adapters/cloudformation/aws/elb/elb.go index 06159b9dda7b..8e72304e75ab 100644 --- a/internal/adapters/cloudformation/aws/elb/elb.go +++ b/internal/adapters/cloudformation/aws/elb/elb.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an ELB instance func Adapt(cfFile parser.FileContext) elb.ELB { return elb.ELB{ LoadBalancers: getLoadBalancers(cfFile), diff --git a/internal/adapters/cloudformation/aws/iam/iam.go b/internal/adapters/cloudformation/aws/iam/iam.go index 7774c4d7d2e8..f0d2329e61c9 100644 --- a/internal/adapters/cloudformation/aws/iam/iam.go +++ b/internal/adapters/cloudformation/aws/iam/iam.go @@ -6,7 +6,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an IAM instance func Adapt(cfFile parser.FileContext) iam.IAM { return iam.IAM{ PasswordPolicy: iam.PasswordPolicy{ diff --git a/internal/adapters/cloudformation/aws/iam/policy.go b/internal/adapters/cloudformation/aws/iam/policy.go index f9eb365dd73c..4487b70cefb7 100644 --- a/internal/adapters/cloudformation/aws/iam/policy.go +++ b/internal/adapters/cloudformation/aws/iam/policy.go @@ -1,10 +1,11 @@ package iam import ( + "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" - "github.com/liamg/iamgo" ) func getPolicies(ctx parser.FileContext) (policies []iam.Policy) { diff --git a/internal/adapters/cloudformation/aws/kinesis/kinesis.go b/internal/adapters/cloudformation/aws/kinesis/kinesis.go index 921027ced49a..1348a8a5de83 100644 --- a/internal/adapters/cloudformation/aws/kinesis/kinesis.go +++ b/internal/adapters/cloudformation/aws/kinesis/kinesis.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a Kinesis instance func Adapt(cfFile parser.FileContext) kinesis.Kinesis { return kinesis.Kinesis{ Streams: getStreams(cfFile), diff --git a/internal/adapters/cloudformation/aws/lambda/lambda.go b/internal/adapters/cloudformation/aws/lambda/lambda.go index d848296629a4..b17c9d6015e1 100644 --- a/internal/adapters/cloudformation/aws/lambda/lambda.go +++ b/internal/adapters/cloudformation/aws/lambda/lambda.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a lambda instance func Adapt(cfFile parser.FileContext) lambda.Lambda { return lambda.Lambda{ Functions: getFunctions(cfFile), diff --git a/internal/adapters/cloudformation/aws/mq/mq.go b/internal/adapters/cloudformation/aws/mq/mq.go index 8cad86d007b8..90d31994c5bb 100644 --- a/internal/adapters/cloudformation/aws/mq/mq.go +++ b/internal/adapters/cloudformation/aws/mq/mq.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an MQ instance func Adapt(cfFile parser.FileContext) mq.MQ { return mq.MQ{ Brokers: getBrokers(cfFile), diff --git a/internal/adapters/cloudformation/aws/msk/msk.go b/internal/adapters/cloudformation/aws/msk/msk.go index c6ed253b2466..9b7cc9eb5b20 100644 --- a/internal/adapters/cloudformation/aws/msk/msk.go +++ b/internal/adapters/cloudformation/aws/msk/msk.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an MSK instance func Adapt(cfFile parser.FileContext) msk.MSK { return msk.MSK{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/neptune/neptune.go b/internal/adapters/cloudformation/aws/neptune/neptune.go index 798836230816..528fd111041e 100644 --- a/internal/adapters/cloudformation/aws/neptune/neptune.go +++ b/internal/adapters/cloudformation/aws/neptune/neptune.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a Neptune instance func Adapt(cfFile parser.FileContext) neptune.Neptune { return neptune.Neptune{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/rds/rds.go b/internal/adapters/cloudformation/aws/rds/rds.go index cfa52ed16027..e5db62ccc77a 100644 --- a/internal/adapters/cloudformation/aws/rds/rds.go +++ b/internal/adapters/cloudformation/aws/rds/rds.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an RDS instance func Adapt(cfFile parser.FileContext) rds.RDS { clusters, orphans := getClustersAndInstances(cfFile) return rds.RDS{ diff --git a/internal/adapters/cloudformation/aws/redshift/redshift.go b/internal/adapters/cloudformation/aws/redshift/redshift.go index 43eccc3232f6..601a67043f72 100644 --- a/internal/adapters/cloudformation/aws/redshift/redshift.go +++ b/internal/adapters/cloudformation/aws/redshift/redshift.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a RedShift instance func Adapt(cfFile parser.FileContext) redshift.Redshift { return redshift.Redshift{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/s3/bucket.go b/internal/adapters/cloudformation/aws/s3/bucket.go index 45194abff553..8514d4a7c6e8 100644 --- a/internal/adapters/cloudformation/aws/s3/bucket.go +++ b/internal/adapters/cloudformation/aws/s3/bucket.go @@ -4,9 +4,8 @@ import ( "regexp" "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) diff --git a/internal/adapters/cloudformation/aws/s3/s3.go b/internal/adapters/cloudformation/aws/s3/s3.go index ad5c8410ab1c..d3f322475a51 100644 --- a/internal/adapters/cloudformation/aws/s3/s3.go +++ b/internal/adapters/cloudformation/aws/s3/s3.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an S3 instance func Adapt(cfFile parser.FileContext) s3.S3 { return s3.S3{ Buckets: getBuckets(cfFile), diff --git a/internal/adapters/cloudformation/aws/sam/function.go b/internal/adapters/cloudformation/aws/sam/function.go index 2c07a05626cb..5ab9ca1b06fe 100644 --- a/internal/adapters/cloudformation/aws/sam/function.go +++ b/internal/adapters/cloudformation/aws/sam/function.go @@ -1,11 +1,12 @@ package sam import ( + "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" - "github.com/liamg/iamgo" ) func getFunctions(cfFile parser.FileContext) (functions []sam.Function) { diff --git a/internal/adapters/cloudformation/aws/sam/sam.go b/internal/adapters/cloudformation/aws/sam/sam.go index dc684b2aa625..5ae61c53ca66 100644 --- a/internal/adapters/cloudformation/aws/sam/sam.go +++ b/internal/adapters/cloudformation/aws/sam/sam.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an SAM instance func Adapt(cfFile parser.FileContext) sam.SAM { return sam.SAM{ APIs: getApis(cfFile), diff --git a/internal/adapters/cloudformation/aws/sam/state_machines.go b/internal/adapters/cloudformation/aws/sam/state_machines.go index 8331cb907107..917bd48a53f5 100644 --- a/internal/adapters/cloudformation/aws/sam/state_machines.go +++ b/internal/adapters/cloudformation/aws/sam/state_machines.go @@ -1,11 +1,12 @@ package sam import ( + "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" - "github.com/liamg/iamgo" ) func getStateMachines(cfFile parser.FileContext) (stateMachines []sam.StateMachine) { diff --git a/internal/adapters/cloudformation/aws/sns/sns.go b/internal/adapters/cloudformation/aws/sns/sns.go index 149571412c82..8c9e24bea91d 100644 --- a/internal/adapters/cloudformation/aws/sns/sns.go +++ b/internal/adapters/cloudformation/aws/sns/sns.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a SNS instance func Adapt(cfFile parser.FileContext) sns.SNS { return sns.SNS{ Topics: getTopics(cfFile), diff --git a/internal/adapters/cloudformation/aws/sqs/queue.go b/internal/adapters/cloudformation/aws/sqs/queue.go index 21081060c924..396966b7db4d 100644 --- a/internal/adapters/cloudformation/aws/sqs/queue.go +++ b/internal/adapters/cloudformation/aws/sqs/queue.go @@ -3,14 +3,12 @@ package sqs import ( "fmt" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" - - "github.com/liamg/iamgo" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) func getQueues(ctx parser.FileContext) (queues []sqs.Queue) { diff --git a/internal/adapters/cloudformation/aws/sqs/sqs.go b/internal/adapters/cloudformation/aws/sqs/sqs.go index 3528fae01bf8..974860f6a09a 100644 --- a/internal/adapters/cloudformation/aws/sqs/sqs.go +++ b/internal/adapters/cloudformation/aws/sqs/sqs.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an SQS instance func Adapt(cfFile parser.FileContext) sqs.SQS { return sqs.SQS{ Queues: getQueues(cfFile), diff --git a/internal/adapters/cloudformation/aws/ssm/ssm.go b/internal/adapters/cloudformation/aws/ssm/ssm.go index 53dfeca789f2..33899d7494b5 100644 --- a/internal/adapters/cloudformation/aws/ssm/ssm.go +++ b/internal/adapters/cloudformation/aws/ssm/ssm.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts an SSM instance func Adapt(cfFile parser.FileContext) ssm.SSM { return ssm.SSM{ Secrets: getSecrets(cfFile), diff --git a/internal/adapters/cloudformation/aws/workspaces/workspaces.go b/internal/adapters/cloudformation/aws/workspaces/workspaces.go index 58be41f4a05a..9918ebdf4977 100644 --- a/internal/adapters/cloudformation/aws/workspaces/workspaces.go +++ b/internal/adapters/cloudformation/aws/workspaces/workspaces.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts a Workspaces instance func Adapt(cfFile parser.FileContext) workspaces.WorkSpaces { return workspaces.WorkSpaces{ WorkSpaces: getWorkSpaces(cfFile), diff --git a/internal/adapters/terraform/aws/ec2/autoscaling.go b/internal/adapters/terraform/aws/ec2/autoscaling.go index 93dc6980897a..3d6a958b80f2 100644 --- a/internal/adapters/terraform/aws/ec2/autoscaling.go +++ b/internal/adapters/terraform/aws/ec2/autoscaling.go @@ -3,11 +3,9 @@ package ec2 import ( "encoding/base64" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func adaptLaunchTemplates(modules terraform.Modules) (templates []ec2.LaunchTemplate) { diff --git a/internal/adapters/terraform/aws/ecr/adapt.go b/internal/adapters/terraform/aws/ecr/adapt.go index 83741a7e1e33..0aca6c6da7cb 100644 --- a/internal/adapters/terraform/aws/ecr/adapt.go +++ b/internal/adapters/terraform/aws/ecr/adapt.go @@ -1,12 +1,13 @@ package ecr import ( + "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" iamp "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" - "github.com/liamg/iamgo" ) func Adapt(modules terraform.Modules) ecr.ECR { diff --git a/internal/adapters/terraform/aws/iam/convert.go b/internal/adapters/terraform/aws/iam/convert.go index 66464b7b1b7c..6acff0a521e4 100644 --- a/internal/adapters/terraform/aws/iam/convert.go +++ b/internal/adapters/terraform/aws/iam/convert.go @@ -3,13 +3,11 @@ package iam import ( "strings" - "github.com/aquasecurity/defsec/pkg/scan" - - "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" - - "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/terraform" ) type wrappedDocument struct { diff --git a/internal/adapters/terraform/aws/iam/passwords.go b/internal/adapters/terraform/aws/iam/passwords.go index 73a09700adb0..10db50f1e94b 100644 --- a/internal/adapters/terraform/aws/iam/passwords.go +++ b/internal/adapters/terraform/aws/iam/passwords.go @@ -3,11 +3,9 @@ package iam import ( "math" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func adaptPasswordPolicy(modules terraform.Modules) iam.PasswordPolicy { diff --git a/internal/adapters/terraform/aws/iam/policies.go b/internal/adapters/terraform/aws/iam/policies.go index e44addeeaaea..de852ef6e81c 100644 --- a/internal/adapters/terraform/aws/iam/policies.go +++ b/internal/adapters/terraform/aws/iam/policies.go @@ -1,10 +1,11 @@ package iam import ( + "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/liamg/iamgo" ) func parsePolicy(policyBlock *terraform.Block, modules terraform.Modules) (iam.Policy, error) { diff --git a/internal/adapters/terraform/aws/s3/bucket.go b/internal/adapters/terraform/aws/s3/bucket.go index b254e5d56a5b..e26d7c67fe19 100644 --- a/internal/adapters/terraform/aws/s3/bucket.go +++ b/internal/adapters/terraform/aws/s3/bucket.go @@ -59,7 +59,7 @@ func getEncryption(block *terraform.Block, a *adapter) s3.Encryption { } } -func newS3Encryption(root *terraform.Block, sseConfgihuration *terraform.Block) s3.Encryption { +func newS3Encryption(root, sseConfgihuration *terraform.Block) s3.Encryption { return s3.Encryption{ Metadata: root.GetMetadata(), Enabled: isEncrypted(sseConfgihuration), diff --git a/internal/adapters/terraform/aws/sqs/adapt.go b/internal/adapters/terraform/aws/sqs/adapt.go index 04bca31e7101..84d28750cfaf 100644 --- a/internal/adapters/terraform/aws/sqs/adapt.go +++ b/internal/adapters/terraform/aws/sqs/adapt.go @@ -1,14 +1,14 @@ package sqs import ( + "github.com/google/uuid" + "github.com/liamg/iamgo" + iamp "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" - "github.com/liamg/iamgo" - - "github.com/google/uuid" ) func Adapt(modules terraform.Modules) sqs.SQS { @@ -49,7 +49,7 @@ func (a *adapter) adaptQueues() []sqs.Queue { } policy.Document.Parsed = *parsed policy.Document.Metadata = attr.GetMetadata() - } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { + } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { // nolint: goconst if doc, err := iam.ConvertTerraformDocument(a.modules, dataBlock); err == nil { policy.Document.Parsed = doc.Document policy.Document.Metadata = doc.Source.GetMetadata() @@ -57,7 +57,7 @@ func (a *adapter) adaptQueues() []sqs.Queue { } } } else if refBlock, err := a.modules.GetReferencedBlock(attr, policyBlock); err == nil { - if refBlock.Type() == "data" && refBlock.TypeLabel() == "aws_iam_policy_document" { + if refBlock.Type() == "data" && refBlock.TypeLabel() == "aws_iam_policy_document" { // nolint: goconst if doc, err := iam.ConvertTerraformDocument(a.modules, refBlock); err == nil { policy.Document.Parsed = doc.Document policy.Document.Metadata = doc.Source.GetMetadata() diff --git a/internal/adapters/terraform/aws/workspaces/adapt.go b/internal/adapters/terraform/aws/workspaces/adapt.go index 084bb82faae7..1707ccae1709 100644 --- a/internal/adapters/terraform/aws/workspaces/adapt.go +++ b/internal/adapters/terraform/aws/workspaces/adapt.go @@ -13,13 +13,13 @@ func Adapt(modules terraform.Modules) workspaces.WorkSpaces { } func adaptWorkspaces(modules terraform.Modules) []workspaces.WorkSpace { - var workspaces []workspaces.WorkSpace + var ws []workspaces.WorkSpace for _, module := range modules { for _, resource := range module.GetResourcesByType("aws_workspaces_workspace") { - workspaces = append(workspaces, adaptWorkspace(resource)) + ws = append(ws, adaptWorkspace(resource)) } } - return workspaces + return ws } func adaptWorkspace(resource *terraform.Block) workspaces.WorkSpace { diff --git a/internal/adapters/terraform/azure/compute/adapt.go b/internal/adapters/terraform/azure/compute/adapt.go index 87b151d99848..b5eb379147ac 100644 --- a/internal/adapters/terraform/azure/compute/adapt.go +++ b/internal/adapters/terraform/azure/compute/adapt.go @@ -3,13 +3,13 @@ package compute import ( "encoding/base64" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/azure/compute" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) +const AzureVirtualMachine = "azurerm_virtual_machine" + func Adapt(modules terraform.Modules) compute.Compute { return adaptCompute(modules) } @@ -28,7 +28,7 @@ func adaptCompute(modules terraform.Modules) compute.Compute { for _, resource := range module.GetResourcesByType("azurerm_windows_virtual_machine") { windowsVirtualMachines = append(windowsVirtualMachines, adaptWindowsVM(resource)) } - for _, resource := range module.GetResourcesByType("azurerm_virtual_machine") { + for _, resource := range module.GetResourcesByType(AzureVirtualMachine) { if resource.HasChild("os_profile_linux_config") { linuxVirtualMachines = append(linuxVirtualMachines, adaptLinuxVM(resource)) } else if resource.HasChild("os_profile_windows_config") { @@ -71,7 +71,7 @@ func adaptManagedDisk(resource *terraform.Block) compute.ManagedDisk { func adaptLinuxVM(resource *terraform.Block) compute.LinuxVirtualMachine { workingBlock := resource - if resource.TypeLabel() == "azurerm_virtual_machine" { + if resource.TypeLabel() == AzureVirtualMachine { if b := resource.GetBlock("os_profile"); b.IsNotNil() { workingBlock = b } @@ -86,7 +86,7 @@ func adaptLinuxVM(resource *terraform.Block) compute.LinuxVirtualMachine { customDataVal = defsecTypes.String(string(encoded), customDataAttr.GetMetadata()) } - if resource.TypeLabel() == "azurerm_virtual_machine" { + if resource.TypeLabel() == AzureVirtualMachine { workingBlock = resource.GetBlock("os_profile_linux_config") } disablePasswordAuthAttr := workingBlock.GetAttribute("disable_password_authentication") @@ -108,7 +108,7 @@ func adaptLinuxVM(resource *terraform.Block) compute.LinuxVirtualMachine { func adaptWindowsVM(resource *terraform.Block) compute.WindowsVirtualMachine { workingBlock := resource - if resource.TypeLabel() == "azurerm_virtual_machine" { + if resource.TypeLabel() == AzureVirtualMachine { if b := resource.GetBlock("os_profile"); b.IsNotNil() { workingBlock = b } diff --git a/internal/adapters/terraform/azure/keyvault/adapt.go b/internal/adapters/terraform/azure/keyvault/adapt.go index c78d39115bff..2e7a0f18a63a 100644 --- a/internal/adapters/terraform/azure/keyvault/adapt.go +++ b/internal/adapters/terraform/azure/keyvault/adapt.go @@ -3,11 +3,9 @@ package keyvault import ( "time" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/azure/keyvault" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) keyvault.KeyVault { diff --git a/internal/adapters/terraform/azure/network/adapt.go b/internal/adapters/terraform/azure/network/adapt.go index 899c0fe767d5..60d117fde8aa 100644 --- a/internal/adapters/terraform/azure/network/adapt.go +++ b/internal/adapters/terraform/azure/network/adapt.go @@ -4,13 +4,11 @@ import ( "strconv" "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/google/uuid" "github.com/aquasecurity/defsec/pkg/providers/azure/network" - - "github.com/google/uuid" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) network.Network { diff --git a/internal/adapters/terraform/azure/storage/adapt.go b/internal/adapters/terraform/azure/storage/adapt.go index 4519460b5eb2..ce150103bdb8 100644 --- a/internal/adapters/terraform/azure/storage/adapt.go +++ b/internal/adapters/terraform/azure/storage/adapt.go @@ -1,10 +1,9 @@ package storage import ( - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/providers/azure/storage" "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) storage.Storage { diff --git a/internal/adapters/terraform/cloudstack/compute/adapt.go b/internal/adapters/terraform/cloudstack/compute/adapt.go index 7104f74e2846..06ce13be5195 100644 --- a/internal/adapters/terraform/cloudstack/compute/adapt.go +++ b/internal/adapters/terraform/cloudstack/compute/adapt.go @@ -3,11 +3,9 @@ package compute import ( "encoding/base64" - "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/cloudstack/compute" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) compute.Compute { diff --git a/internal/adapters/terraform/digitalocean/compute/adapt.go b/internal/adapters/terraform/digitalocean/compute/adapt.go index ab5d12ad2ec2..e871dc8a4dc7 100644 --- a/internal/adapters/terraform/digitalocean/compute/adapt.go +++ b/internal/adapters/terraform/digitalocean/compute/adapt.go @@ -41,7 +41,7 @@ func adaptFirewalls(module terraform.Modules) []compute.Firewall { inboundRules := block.GetBlocks("inbound_rule") outboundRules := block.GetBlocks("outbound_rule") - inboundFirewallRules := []compute.InboundFirewallRule{} + var inboundFirewallRules []compute.InboundFirewallRule for _, inBoundRule := range inboundRules { inboundFirewallRule := compute.InboundFirewallRule{ Metadata: inBoundRule.GetMetadata(), @@ -52,7 +52,7 @@ func adaptFirewalls(module terraform.Modules) []compute.Firewall { inboundFirewallRules = append(inboundFirewallRules, inboundFirewallRule) } - outboundFirewallRules := []compute.OutboundFirewallRule{} + var outboundFirewallRules []compute.OutboundFirewallRule for _, outBoundRule := range outboundRules { outboundFirewallRule := compute.OutboundFirewallRule{ Metadata: outBoundRule.GetMetadata(), diff --git a/internal/adapters/terraform/digitalocean/spaces/adapt.go b/internal/adapters/terraform/digitalocean/spaces/adapt.go index 567768d3f764..93214777a41a 100644 --- a/internal/adapters/terraform/digitalocean/spaces/adapt.go +++ b/internal/adapters/terraform/digitalocean/spaces/adapt.go @@ -1,10 +1,11 @@ package spaces import ( + "github.com/google/uuid" + "github.com/aquasecurity/defsec/pkg/providers/digitalocean/spaces" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/google/uuid" ) func Adapt(modules terraform.Modules) spaces.Spaces { diff --git a/internal/adapters/terraform/google/compute/instances.go b/internal/adapters/terraform/google/compute/instances.go index 0ecf120d4f06..2fb8177b6a1c 100644 --- a/internal/adapters/terraform/google/compute/instances.go +++ b/internal/adapters/terraform/google/compute/instances.go @@ -1,10 +1,11 @@ package compute import ( + "github.com/zclconf/go-cty/cty" + "github.com/aquasecurity/defsec/pkg/providers/google/compute" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/zclconf/go-cty/cty" ) func adaptInstances(modules terraform.Modules) (instances []compute.Instance) { diff --git a/internal/adapters/terraform/google/compute/metadata.go b/internal/adapters/terraform/google/compute/metadata.go index c69947d3cda3..2cc58839a367 100644 --- a/internal/adapters/terraform/google/compute/metadata.go +++ b/internal/adapters/terraform/google/compute/metadata.go @@ -1,10 +1,11 @@ package compute import ( + "github.com/zclconf/go-cty/cty" + "github.com/aquasecurity/defsec/pkg/providers/google/compute" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/zclconf/go-cty/cty" ) func adaptProjectMetadata(modules terraform.Modules) compute.ProjectMetadata { diff --git a/internal/adapters/terraform/google/compute/networks.go b/internal/adapters/terraform/google/compute/networks.go index ef59c360eef8..978b2f49a0bf 100644 --- a/internal/adapters/terraform/google/compute/networks.go +++ b/internal/adapters/terraform/google/compute/networks.go @@ -4,10 +4,9 @@ import ( "strconv" "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/providers/google/compute" "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) const ( diff --git a/internal/adapters/terraform/google/gke/adapt.go b/internal/adapters/terraform/google/gke/adapt.go index 3edf549aec15..1b41988d0bda 100644 --- a/internal/adapters/terraform/google/gke/adapt.go +++ b/internal/adapters/terraform/google/gke/adapt.go @@ -1,11 +1,12 @@ package gke import ( + "github.com/google/uuid" + "github.com/zclconf/go-cty/cty" + "github.com/aquasecurity/defsec/pkg/providers/google/gke" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/google/uuid" - "github.com/zclconf/go-cty/cty" ) func Adapt(modules terraform.Modules) gke.GKE { diff --git a/internal/adapters/terraform/google/iam/adapt.go b/internal/adapters/terraform/google/iam/adapt.go index 45d082af945b..5c532879348c 100644 --- a/internal/adapters/terraform/google/iam/adapt.go +++ b/internal/adapters/terraform/google/iam/adapt.go @@ -1,10 +1,11 @@ package iam import ( + "github.com/google/uuid" + "github.com/aquasecurity/defsec/pkg/providers/google/iam" "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/defsec/pkg/types" - "github.com/google/uuid" ) func Adapt(modules terraform.Modules) iam.IAM { @@ -68,24 +69,24 @@ PROJECT: } // add folders to folders, orgs -FOLDER_NESTED: +FOLDER_NESTED: // nolint: gocritic for _, folder := range a.folders { for i, existing := range a.folders { if folder.parentBlockID != "" && folder.parentBlockID == existing.blockID { existing.folder.Folders = append(existing.folder.Folders, folder.folder) a.folders[i] = existing - continue FOLDER_NESTED + continue FOLDER_NESTED // nolint: gocritic } } } -FOLDER_ORG: +FOLDER_ORG: // nolint: gocritic for _, folder := range a.folders { if folder.parentBlockID != "" { if org, ok := a.orgs[folder.parentBlockID]; ok { org.Folders = append(org.Folders, folder.folder) a.orgs[folder.parentBlockID] = org - continue FOLDER_ORG + continue FOLDER_ORG // nolint: gocritic } } else { // add to placeholder? diff --git a/internal/adapters/terraform/google/iam/folder_iam.go b/internal/adapters/terraform/google/iam/folder_iam.go index 51b09f185ba8..0681166ac7c1 100644 --- a/internal/adapters/terraform/google/iam/folder_iam.go +++ b/internal/adapters/terraform/google/iam/folder_iam.go @@ -17,7 +17,7 @@ func (a *adapter) adaptFolderMembers() { member := a.adaptMember(iamBlock) folderAttr := iamBlock.GetAttribute("folder") if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_folder" { + if refBlock.TypeLabel() == GoogleFolder { var foundFolder bool for i, folder := range a.folders { if folder.blockID == refBlock.ID() { @@ -59,7 +59,7 @@ func (a *adapter) adaptFolderBindings() { folderAttr := iamBlock.GetAttribute("folder") if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_folder" { + if refBlock.TypeLabel() == GoogleFolder { var foundFolder bool for i, folder := range a.folders { if folder.blockID == refBlock.ID() { @@ -89,7 +89,7 @@ func (a *adapter) adaptFolderBindings() { binding := a.adaptBinding(iamBlock) folderAttr := iamBlock.GetAttribute("folder") if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_folder" { + if refBlock.TypeLabel() == GoogleFolder { var foundFolder bool for i, folder := range a.folders { if folder.blockID == refBlock.ID() { diff --git a/internal/adapters/terraform/google/iam/folders.go b/internal/adapters/terraform/google/iam/folders.go index 6e8de9641c6f..af574310a4c3 100644 --- a/internal/adapters/terraform/google/iam/folders.go +++ b/internal/adapters/terraform/google/iam/folders.go @@ -4,6 +4,9 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/google/iam" ) +const GoogleOrganization = "google_organization" +const GoogleFolder = "google_folder" + type parentedFolder struct { blockID string parentBlockID string @@ -12,7 +15,7 @@ type parentedFolder struct { } func (a *adapter) adaptFolders() { - for _, folderBlock := range a.modules.GetResourcesByType("google_folder") { + for _, folderBlock := range a.modules.GetResourcesByType(GoogleFolder) { var folder parentedFolder parentAttr := folderBlock.GetAttribute("parent") if parentAttr.IsNil() { @@ -26,10 +29,10 @@ func (a *adapter) adaptFolders() { } if referencedBlock, err := a.modules.GetReferencedBlock(parentAttr, folderBlock); err == nil { - if referencedBlock.TypeLabel() == "google_folder" { + if referencedBlock.TypeLabel() == GoogleFolder { folder.parentBlockID = referencedBlock.ID() } - if referencedBlock.TypeLabel() == "google_organization" { + if referencedBlock.TypeLabel() == GoogleOrganization { folder.parentBlockID = referencedBlock.ID() a.addOrg(folder.parentBlockID) } diff --git a/internal/adapters/terraform/google/iam/org_iam.go b/internal/adapters/terraform/google/iam/org_iam.go index bf56dabd3866..c2f97a01905e 100644 --- a/internal/adapters/terraform/google/iam/org_iam.go +++ b/internal/adapters/terraform/google/iam/org_iam.go @@ -1,9 +1,10 @@ package iam import ( + "github.com/google/uuid" + "github.com/aquasecurity/defsec/pkg/providers/google/iam" "github.com/aquasecurity/defsec/pkg/types" - "github.com/google/uuid" ) // see https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_organization_iam @@ -22,7 +23,7 @@ func (a *adapter) adaptOrganizationMembers() { } if refBlock, err := a.modules.GetReferencedBlock(organizationAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_organization" { + if refBlock.TypeLabel() == GoogleOrganization { a.addOrg(refBlock.ID()) org, ok := a.orgs[refBlock.ID()] if !ok { @@ -67,7 +68,7 @@ func (a *adapter) adaptOrganizationBindings() { orgAttr := iamBlock.GetAttribute("organization") if refBlock, err := a.modules.GetReferencedBlock(orgAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_organization" { + if refBlock.TypeLabel() == GoogleOrganization { if org, ok := a.orgs[refBlock.ID()]; ok { org.Bindings = append(org.Bindings, bindings...) a.orgs[refBlock.ID()] = org @@ -93,7 +94,7 @@ func (a *adapter) adaptOrganizationBindings() { } if refBlock, err := a.modules.GetReferencedBlock(organizationAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_organization" { + if refBlock.TypeLabel() == GoogleOrganization { a.addOrg(refBlock.ID()) org := a.orgs[refBlock.ID()] org.Bindings = append(org.Bindings, binding) diff --git a/internal/adapters/terraform/google/iam/project_iam.go b/internal/adapters/terraform/google/iam/project_iam.go index bac596af7569..bc2941904aa2 100644 --- a/internal/adapters/terraform/google/iam/project_iam.go +++ b/internal/adapters/terraform/google/iam/project_iam.go @@ -3,15 +3,15 @@ package iam import ( "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) // see https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_project_iam +const GoogleProject = "google_project" + func (a *adapter) adaptProjectIAM() { a.adaptProjectMembers() a.adaptProjectBindings() @@ -77,7 +77,7 @@ func (a *adapter) adaptProjectMembers() { } if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_project" { + if refBlock.TypeLabel() == GoogleProject { var foundProject bool for i, project := range a.projects { if project.blockID == refBlock.ID() { @@ -189,7 +189,7 @@ func (a *adapter) adaptProjectDataBindings() { } if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_project" { + if refBlock.TypeLabel() == GoogleProject { var foundProject bool for i, project := range a.projects { if project.blockID == refBlock.ID() { @@ -244,7 +244,7 @@ func (a *adapter) adaptProjectBindings() { } if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_project" { + if refBlock.TypeLabel() == GoogleProject { var foundProject bool for i, project := range a.projects { if project.blockID == refBlock.ID() { diff --git a/internal/adapters/terraform/google/iam/projects.go b/internal/adapters/terraform/google/iam/projects.go index e064dc8d0bd8..86e2fdbc2287 100644 --- a/internal/adapters/terraform/google/iam/projects.go +++ b/internal/adapters/terraform/google/iam/projects.go @@ -15,7 +15,7 @@ type parentedProject struct { } func (a *adapter) adaptProjects() { - for _, projectBlock := range a.modules.GetResourcesByType("google_project") { + for _, projectBlock := range a.modules.GetResourcesByType(GoogleProject) { var project parentedProject project.project.Metadata = projectBlock.GetMetadata() idAttr := projectBlock.GetAttribute("project_id") @@ -40,7 +40,7 @@ func (a *adapter) adaptProjects() { if orgAttr.IsNotNil() { if referencedBlock, err := a.modules.GetReferencedBlock(orgAttr, projectBlock); err == nil { - if referencedBlock.TypeLabel() == "google_organization" { + if referencedBlock.TypeLabel() == GoogleOrganization { project.orgBlockID = referencedBlock.ID() a.addOrg(project.orgBlockID) } @@ -48,7 +48,7 @@ func (a *adapter) adaptProjects() { } if folderAttr.IsNotNil() { if referencedBlock, err := a.modules.GetReferencedBlock(folderAttr, projectBlock); err == nil { - if referencedBlock.TypeLabel() == "google_folder" { + if referencedBlock.TypeLabel() == GoogleFolder { project.folderBlockID = referencedBlock.ID() } } diff --git a/internal/adapters/terraform/google/kms/adapt.go b/internal/adapters/terraform/google/kms/adapt.go index 1b76b7d8b501..10d46ecf1a48 100644 --- a/internal/adapters/terraform/google/kms/adapt.go +++ b/internal/adapters/terraform/google/kms/adapt.go @@ -3,11 +3,9 @@ package kms import ( "strconv" - "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/google/kms" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) kms.KMS { diff --git a/internal/adapters/terraform/google/sql/adapt.go b/internal/adapters/terraform/google/sql/adapt.go index b6cb39f39c3c..7f00b717c266 100644 --- a/internal/adapters/terraform/google/sql/adapt.go +++ b/internal/adapters/terraform/google/sql/adapt.go @@ -3,11 +3,9 @@ package sql import ( "strconv" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/google/sql" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) sql.SQL { diff --git a/internal/adapters/terraform/google/storage/adapt.go b/internal/adapters/terraform/google/storage/adapt.go index 36aff1b0e2a9..a927a0f6cd78 100644 --- a/internal/adapters/terraform/google/storage/adapt.go +++ b/internal/adapters/terraform/google/storage/adapt.go @@ -30,7 +30,7 @@ func (a *adapter) adaptBuckets() []storage.Bucket { var buckets []storage.Bucket for _, module := range a.modules { - for _, resource := range module.GetResourcesByType("google_storage_bucket") { + for _, resource := range module.GetResourcesByType(GoogleStorageBucket) { buckets = append(buckets, a.adaptBucketResource(resource)) } } diff --git a/internal/adapters/terraform/google/storage/iam.go b/internal/adapters/terraform/google/storage/iam.go index 24b88a657f94..e8c13a4b82cc 100644 --- a/internal/adapters/terraform/google/storage/iam.go +++ b/internal/adapters/terraform/google/storage/iam.go @@ -12,6 +12,8 @@ type parentedBinding struct { bindings []iamTypes.Binding } +const GoogleStorageBucket = "google_storage_bucket" + type parentedMember struct { blockID string bucketID string @@ -31,7 +33,7 @@ func (a *adapter) adaptBindings() { } if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_storage_bucket" { + if refBlock.TypeLabel() == GoogleStorageBucket { parented.bucketBlockID = refBlock.ID() } } @@ -62,7 +64,7 @@ func (a *adapter) adaptBindings() { } if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_storage_bucket" { + if refBlock.TypeLabel() == GoogleStorageBucket { parented.bucketBlockID = refBlock.ID() } } @@ -85,7 +87,7 @@ func (a *adapter) adaptMembers() { } if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_storage_bucket" { + if refBlock.TypeLabel() == GoogleStorageBucket { parented.bucketBlockID = refBlock.ID() } } diff --git a/internal/adapters/terraform/kubernetes/adapt.go b/internal/adapters/terraform/kubernetes/adapt.go index 459a4d823e4d..ffe03ee5656c 100644 --- a/internal/adapters/terraform/kubernetes/adapt.go +++ b/internal/adapters/terraform/kubernetes/adapt.go @@ -99,7 +99,7 @@ func adaptNetworkPolicy(resourceBlock *terraform.Block) kubernetes.NetworkPolicy } // https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/guides/versioned-resources -func getBlocksIgnoreVersion(module *terraform.Module, blockType string, resourceType string) terraform.Blocks { +func getBlocksIgnoreVersion(module *terraform.Module, blockType, resourceType string) terraform.Blocks { var res terraform.Blocks for _, block := range module.GetBlocks().OfType(blockType) { if isMatchingTypeLabel(block.TypeLabel(), resourceType) { @@ -109,7 +109,7 @@ func getBlocksIgnoreVersion(module *terraform.Module, blockType string, resource return res } -func isMatchingTypeLabel(typeLabel string, resourceType string) bool { +func isMatchingTypeLabel(typeLabel, resourceType string) bool { if typeLabel == resourceType { return true } diff --git a/internal/adapters/terraform/openstack/networking.go b/internal/adapters/terraform/openstack/networking.go index c77029b4fb9f..dd56a82b2d1d 100644 --- a/internal/adapters/terraform/openstack/networking.go +++ b/internal/adapters/terraform/openstack/networking.go @@ -1,10 +1,11 @@ package openstack import ( + "github.com/google/uuid" + "github.com/aquasecurity/defsec/pkg/providers/openstack" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/google/uuid" ) func adaptNetworking(modules terraform.Modules) openstack.Networking { diff --git a/internal/adapters/terraform/tftestutil/testutil.go b/internal/adapters/terraform/tftestutil/testutil.go index 8542e12095eb..251a795b049c 100644 --- a/internal/adapters/terraform/tftestutil/testutil.go +++ b/internal/adapters/terraform/tftestutil/testutil.go @@ -5,12 +5,11 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" ) -func CreateModulesFromSource(t *testing.T, source string, ext string) terraform.Modules { +func CreateModulesFromSource(t *testing.T, source, ext string) terraform.Modules { fs := testutil.CreateFS(t, map[string]string{ "source" + ext: source, }) diff --git a/pkg/detection/detect.go b/pkg/detection/detect.go index ecc29c771896..db1cd77d235d 100644 --- a/pkg/detection/detect.go +++ b/pkg/detection/detect.go @@ -29,7 +29,7 @@ const ( FileTypeAzureARM FileType = "azure-arm" ) -var matchers = map[FileType]func(name string, r io.ReadSeeker) bool{} +var matchers = make(map[FileType]func(name string, r io.ReadSeeker) bool) // nolint func init() { diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_object.go b/pkg/scanners/azure/arm/parser/armjson/decode_object.go index 516029b55deb..57b611065242 100644 --- a/pkg/scanners/azure/arm/parser/armjson/decode_object.go +++ b/pkg/scanners/azure/arm/parser/armjson/decode_object.go @@ -105,7 +105,7 @@ func (n *node) decodeObjectToStruct(v reflect.Value) error { subject := v.Field(i) - // if fields are nil pointers, initialise them with values of the correct type + // if fields are nil pointers, initialize them with values of the correct type if subject.Kind() == reflect.Ptr { if subject.IsNil() { subject.Set(reflect.New(subject.Type().Elem())) diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_object.go b/pkg/scanners/azure/arm/parser/armjson/parse_object.go index f87ffc439ac9..9d9dc53472c7 100644 --- a/pkg/scanners/azure/arm/parser/armjson/parse_object.go +++ b/pkg/scanners/azure/arm/parser/armjson/parse_object.go @@ -32,7 +32,7 @@ func (p *parser) parseObject(parentMetadata *types.Metadata) (Node, error) { } -// nolint: cyclop +// nolint: gocyclo func (p *parser) iterateObject(nextComments []Node, metadata *types.Metadata, n *node) (Node, error) { for { diff --git a/pkg/scanners/azure/arm/parser/parser.go b/pkg/scanners/azure/arm/parser/parser.go index a692fd93b438..b1dfd79dbf02 100644 --- a/pkg/scanners/azure/arm/parser/parser.go +++ b/pkg/scanners/azure/arm/parser/parser.go @@ -9,9 +9,8 @@ import ( "strings" "github.com/aquasecurity/defsec/pkg/debug" - "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/scanners/azure" "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser/armjson" "github.com/aquasecurity/trivy/pkg/scanners/azure/resolver" diff --git a/pkg/scanners/azure/arm/parser/template.go b/pkg/scanners/azure/arm/parser/template.go index 9ece4297149e..0f7ca8e75b28 100644 --- a/pkg/scanners/azure/arm/parser/template.go +++ b/pkg/scanners/azure/arm/parser/template.go @@ -70,7 +70,7 @@ func (v *Resource) UnmarshalJSONWithMetadata(node armjson.Node) error { if err := comment.Decode(&str); err != nil { return err } - // TODO + // TODO(someone): add support for metadata comments // v.Metadata.Comments = append(v.Metadata.Comments, str) } diff --git a/pkg/scanners/azure/arm/scanner.go b/pkg/scanners/azure/arm/scanner.go index c988ba9311c3..54045082c044 100644 --- a/pkg/scanners/azure/arm/scanner.go +++ b/pkg/scanners/azure/arm/scanner.go @@ -3,7 +3,6 @@ package arm import ( "context" "fmt" - "io" "io/fs" "sync" @@ -16,7 +15,6 @@ import ( "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/state" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/internal/adapters/arm" "github.com/aquasecurity/trivy/pkg/scanners" "github.com/aquasecurity/trivy/pkg/scanners/azure" @@ -26,7 +24,7 @@ import ( var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) -type Scanner struct { +type Scanner struct { // nolint: gocritic scannerOptions []options.ScannerOption parserOptions []options.ParserOption debug debug.Logger @@ -120,17 +118,17 @@ func (s *Scanner) initRegoScanner(srcFS fs.FS) error { return nil } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (scan.Results, error) { - p := parser.New(fs, s.parserOptions...) +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (scan.Results, error) { + p := parser.New(fsys, s.parserOptions...) deployments, err := p.ParseFS(ctx, dir) if err != nil { return nil, err } - if err := s.initRegoScanner(fs); err != nil { + if err := s.initRegoScanner(fsys); err != nil { return nil, err } - return s.scanDeployments(ctx, deployments, fs) + return s.scanDeployments(ctx, deployments, fsys) } func (s *Scanner) scanDeployments(ctx context.Context, deployments []azure.Deployment, f fs.FS) (scan.Results, error) { @@ -149,7 +147,7 @@ func (s *Scanner) scanDeployments(ctx context.Context, deployments []azure.Deplo return results, nil } -func (s *Scanner) scanDeployment(ctx context.Context, deployment azure.Deployment, fs fs.FS) (scan.Results, error) { +func (s *Scanner) scanDeployment(ctx context.Context, deployment azure.Deployment, fsys fs.FS) (scan.Results, error) { var results scan.Results deploymentState := s.adaptDeployment(ctx, deployment) if !s.regoOnly { @@ -172,7 +170,7 @@ func (s *Scanner) scanDeployment(ctx context.Context, deployment azure.Deploymen regoResults, err := s.regoScanner.ScanInput(ctx, rego.Input{ Path: deployment.Metadata.Range().GetFilename(), - FS: fs, + FS: fsys, Contents: deploymentState.ToRego(), }) if err != nil { diff --git a/pkg/scanners/azure/functions/copy_index.go b/pkg/scanners/azure/functions/copy_index.go index aee090e79466..d1289cc0a20d 100644 --- a/pkg/scanners/azure/functions/copy_index.go +++ b/pkg/scanners/azure/functions/copy_index.go @@ -1,6 +1,6 @@ package functions -var loopCounter = map[string]int{} +var loopCounter = make(map[string]int) func CopyIndex(args ...interface{}) interface{} { loopName := "default" diff --git a/pkg/scanners/azure/functions/create_object.go b/pkg/scanners/azure/functions/create_object.go index b9fe2d29f6d2..30dc239847f8 100644 --- a/pkg/scanners/azure/functions/create_object.go +++ b/pkg/scanners/azure/functions/create_object.go @@ -1,7 +1,7 @@ package functions func CreateObject(args ...interface{}) interface{} { - obj := map[string]interface{}{} + obj := make(map[string]interface{}) if len(args) == 0 { return obj } diff --git a/pkg/scanners/azure/functions/intersection.go b/pkg/scanners/azure/functions/intersection.go index 5eace2fe0bc7..d137a7c2aec8 100644 --- a/pkg/scanners/azure/functions/intersection.go +++ b/pkg/scanners/azure/functions/intersection.go @@ -19,7 +19,7 @@ func Intersection(args ...interface{}) interface{} { } func intersectionArray(args ...interface{}) interface{} { - result := []interface{}{} + var result []interface{} hash := make(map[interface{}]bool) for _, arg := range args[0].([]interface{}) { diff --git a/pkg/scanners/azure/functions/max.go b/pkg/scanners/azure/functions/max.go index 6cbfd5ba25bc..eb0338a4f894 100644 --- a/pkg/scanners/azure/functions/max.go +++ b/pkg/scanners/azure/functions/max.go @@ -9,8 +9,7 @@ func Max(args ...interface{}) interface{} { } return maxInt(ints) case interface{}: - switch iType := args[0].(type) { - case []int: + if iType, ok := args[0].([]int); ok { return maxInt(iType) } } diff --git a/pkg/scanners/azure/functions/min.go b/pkg/scanners/azure/functions/min.go index 35900e26d33d..5147c3bb2769 100644 --- a/pkg/scanners/azure/functions/min.go +++ b/pkg/scanners/azure/functions/min.go @@ -9,8 +9,7 @@ func Min(args ...interface{}) interface{} { } return minInt(ints) case interface{}: - switch iType := args[0].(type) { - case []int: + if iType, ok := args[0].([]int); ok { return minInt(iType) } } diff --git a/pkg/scanners/azure/functions/replace.go b/pkg/scanners/azure/functions/replace.go index 00a7a8a4560f..09f829db2c50 100644 --- a/pkg/scanners/azure/functions/replace.go +++ b/pkg/scanners/azure/functions/replace.go @@ -12,15 +12,15 @@ func Replace(args ...interface{}) interface{} { return "" } - old, ok := args[1].(string) + purana, ok := args[1].(string) if !ok { return "" } - new, ok := args[2].(string) + nava, ok := args[2].(string) if !ok { return "" } - return strings.ReplaceAll(input, old, new) + return strings.ReplaceAll(input, purana, nava) } diff --git a/pkg/scanners/azure/functions/split.go b/pkg/scanners/azure/functions/split.go index 04b7f5779d33..47e62e96034a 100644 --- a/pkg/scanners/azure/functions/split.go +++ b/pkg/scanners/azure/functions/split.go @@ -16,8 +16,7 @@ func Split(args ...interface{}) interface{} { case string: return strings.Split(input, separator) case interface{}: - switch separator := separator.(type) { - case []string: + if separator, ok := separator.([]string); ok { m := make(map[rune]int) for _, r := range separator { r := rune(r[0]) diff --git a/pkg/scanners/azure/functions/union.go b/pkg/scanners/azure/functions/union.go index 07bb98f28eeb..b0db1c3d6ed0 100644 --- a/pkg/scanners/azure/functions/union.go +++ b/pkg/scanners/azure/functions/union.go @@ -25,8 +25,7 @@ func unionMap(args ...interface{}) interface{} { result := make(map[string]interface{}) for _, arg := range args { - switch iType := arg.(type) { - case map[string]interface{}: + if iType, ok := arg.(map[string]interface{}); ok { for k, v := range iType { result[k] = v } @@ -37,12 +36,11 @@ func unionMap(args ...interface{}) interface{} { } func unionArray(args ...interface{}) interface{} { - result := []interface{}{} + var result []interface{} union := make(map[interface{}]bool) for _, arg := range args { - switch iType := arg.(type) { - case []interface{}: + if iType, ok := arg.([]interface{}); ok { for _, item := range iType { union[item] = true } diff --git a/pkg/scanners/cloudformation/parser/fn_builtin.go b/pkg/scanners/cloudformation/parser/fn_builtin.go index f20011618889..577a3b25c216 100644 --- a/pkg/scanners/cloudformation/parser/fn_builtin.go +++ b/pkg/scanners/cloudformation/parser/fn_builtin.go @@ -4,9 +4,9 @@ import ( "fmt" "net" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" - "github.com/apparentlymart/go-cidr/cidr" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" ) func GetAzs(property *Property) (*Property, bool) { @@ -43,7 +43,7 @@ func GetCidr(property *Property) (*Property, bool) { return property.deriveResolved(cftypes.List, ranges), true } -func calculateCidrs(ipaddress string, count int, bit int, original *Property) ([]*Property, error) { +func calculateCidrs(ipaddress string, count, bit int, original *Property) ([]*Property, error) { var cidrProperties []*Property diff --git a/pkg/scanners/cloudformation/parser/fn_ref.go b/pkg/scanners/cloudformation/parser/fn_ref.go index d2f2ed6eeca4..e57e3bf21eb0 100644 --- a/pkg/scanners/cloudformation/parser/fn_ref.go +++ b/pkg/scanners/cloudformation/parser/fn_ref.go @@ -25,22 +25,23 @@ func ResolveReference(property *Property) (resolved *Property, success bool) { var param *Parameter for k := range property.ctx.Parameters { - if k == refValue { - param = property.ctx.Parameters[k] - resolvedType := param.Type() - - switch param.Default().(type) { - case bool: - resolvedType = cftypes.Bool - case string: - resolvedType = cftypes.String - case int: - resolvedType = cftypes.Int - } - - resolved = property.deriveResolved(resolvedType, param.Default()) - return resolved, true + if k != refValue { + continue + } + param = property.ctx.Parameters[k] + resolvedType := param.Type() + + switch param.Default().(type) { + case bool: + resolvedType = cftypes.Bool + case string: + resolvedType = cftypes.String + case int: + resolvedType = cftypes.Int } + + resolved = property.deriveResolved(resolvedType, param.Default()) + return resolved, true } for k := range property.ctx.Resources { diff --git a/pkg/scanners/cloudformation/parser/fn_split.go b/pkg/scanners/cloudformation/parser/fn_split.go index 6facab992ea7..453de5a5b191 100644 --- a/pkg/scanners/cloudformation/parser/fn_split.go +++ b/pkg/scanners/cloudformation/parser/fn_split.go @@ -30,7 +30,7 @@ func ResolveSplit(property *Property) (resolved *Property, success bool) { return property.deriveResolved(cftypes.List, propertyList), true } -func createPropertyList(splitProp *Property, delimiterProp *Property, parent *Property) []*Property { +func createPropertyList(splitProp, delimiterProp, parent *Property) []*Property { splitString := splitProp.AsString() delimiter := delimiterProp.AsString() diff --git a/pkg/scanners/cloudformation/parser/fn_sub.go b/pkg/scanners/cloudformation/parser/fn_sub.go index 81e8401bcfa7..0dc2012daa68 100644 --- a/pkg/scanners/cloudformation/parser/fn_sub.go +++ b/pkg/scanners/cloudformation/parser/fn_sub.go @@ -26,7 +26,7 @@ func ResolveSub(property *Property) (resolved *Property, success bool) { return property, false } -func resolveMapSub(refValue *Property, original *Property) (*Property, bool) { +func resolveMapSub(refValue, original *Property) (*Property, bool) { refValues := refValue.AsList() if len(refValues) != 2 { return abortIntrinsic(original, "Fn::Sub with list expects 2 values, returning original property") @@ -60,7 +60,7 @@ func resolveMapSub(refValue *Property, original *Property) (*Property, bool) { return original.deriveResolved(cftypes.String, workingString), true } -func resolveStringSub(refValue *Property, original *Property) *Property { +func resolveStringSub(refValue, original *Property) *Property { workingString := refValue.AsString() for k, param := range pseudoParameters { diff --git a/pkg/scanners/cloudformation/parser/parameter.go b/pkg/scanners/cloudformation/parser/parameter.go index 493dea756168..8c833c7f7367 100644 --- a/pkg/scanners/cloudformation/parser/parameter.go +++ b/pkg/scanners/cloudformation/parser/parameter.go @@ -90,36 +90,34 @@ func (p *Parameters) UnmarshalJSON(data []byte) error { (*p) = params.Params case data[0] == '[' && data[len(data)-1] == ']': // array - { - // Original format - var params []string - - if err := json.Unmarshal(data, ¶ms); err == nil { - for _, param := range params { - parts := strings.Split(param, "=") - if len(parts) != 2 { - return fmt.Errorf("invalid key-value parameter: %q", param) - } - (*p)[parts[0]] = parts[1] + // Original format + var params []string + + if err := json.Unmarshal(data, ¶ms); err == nil { + for _, param := range params { + parts := strings.Split(param, "=") + if len(parts) != 2 { + return fmt.Errorf("invalid key-value parameter: %q", param) } - return nil + (*p)[parts[0]] = parts[1] } + return nil + } - // CloudFormation like format - var cfparams []struct { - ParameterKey string `json:"ParameterKey"` - ParameterValue string `json:"ParameterValue"` - } + // CloudFormation like format + var cfparams []struct { + ParameterKey string `json:"ParameterKey"` + ParameterValue string `json:"ParameterValue"` + } - d := json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&cfparams); err != nil { - return err - } + d := json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&cfparams); err != nil { + return err + } - for _, param := range cfparams { - (*p)[param.ParameterKey] = param.ParameterValue - } + for _, param := range cfparams { + (*p)[param.ParameterKey] = param.ParameterValue } default: return fmt.Errorf("unsupported parameters format") diff --git a/pkg/scanners/cloudformation/parser/parser.go b/pkg/scanners/cloudformation/parser/parser.go index 21aa0004304b..6e6ca1e9e453 100644 --- a/pkg/scanners/cloudformation/parser/parser.go +++ b/pkg/scanners/cloudformation/parser/parser.go @@ -11,11 +11,11 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/defsec/pkg/debug" - "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/liamg/jfather" "gopkg.in/yaml.v3" + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/detection" ) @@ -62,9 +62,9 @@ func (p *Parser) SetSkipRequiredCheck(b bool) { p.skipRequired = b } -func New(options ...options.ParserOption) *Parser { +func New(opts ...options.ParserOption) *Parser { p := &Parser{} - for _, option := range options { + for _, option := range opts { option(p) } return p @@ -103,12 +103,12 @@ func (p *Parser) ParseFS(ctx context.Context, fsys fs.FS, dir string) (FileConte return contexts, nil } -func (p *Parser) Required(fs fs.FS, path string) bool { +func (p *Parser) Required(fsys fs.FS, path string) bool { if p.skipRequired { return true } - f, err := fs.Open(filepath.ToSlash(path)) + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return false } @@ -120,7 +120,7 @@ func (p *Parser) Required(fs fs.FS, path string) bool { } -func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (context *FileContext, err error) { +func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (fctx *FileContext, err error) { defer func() { if e := recover(); e != nil { err = fmt.Errorf("panic during parse: %s", e) @@ -159,40 +159,40 @@ func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (contex lines := strings.Split(string(content), "\n") - context = &FileContext{ + fctx = &FileContext{ filepath: path, lines: lines, SourceFormat: sourceFmt, } if strings.HasSuffix(strings.ToLower(path), ".json") { - if err := jfather.Unmarshal(content, context); err != nil { + if err := jfather.Unmarshal(content, fctx); err != nil { return nil, NewErrInvalidContent(path, err) } } else { - if err := yaml.Unmarshal(content, context); err != nil { + if err := yaml.Unmarshal(content, fctx); err != nil { return nil, NewErrInvalidContent(path, err) } } - context.OverrideParameters(p.overridedParameters) + fctx.OverrideParameters(p.overridedParameters) - context.lines = lines - context.SourceFormat = sourceFmt - context.filepath = path + fctx.lines = lines + fctx.SourceFormat = sourceFmt + fctx.filepath = path p.debug.Log("Context loaded from source %s", path) // the context must be set to conditions before resources - for _, c := range context.Conditions { - c.setContext(context) + for _, c := range fctx.Conditions { + c.setContext(fctx) } - for name, r := range context.Resources { - r.ConfigureResource(name, fsys, path, context) + for name, r := range fctx.Resources { + r.ConfigureResource(name, fsys, path, fctx) } - return context, nil + return fctx, nil } func (p *Parser) parseParams() error { diff --git a/pkg/scanners/cloudformation/parser/property.go b/pkg/scanners/cloudformation/parser/property.go index 33336f9a2cda..7090df4e9886 100644 --- a/pkg/scanners/cloudformation/parser/property.go +++ b/pkg/scanners/cloudformation/parser/property.go @@ -6,12 +6,11 @@ import ( "strconv" "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" - "github.com/liamg/jfather" "gopkg.in/yaml.v3" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" ) type EqualityOptions = int @@ -414,7 +413,7 @@ func removeLeftMargin(lines []string) []string { func convert(input interface{}) interface{} { switch x := input.(type) { case map[interface{}]interface{}: - outpMap := map[string]interface{}{} + outpMap := make(map[string]interface{}) for k, v := range x { outpMap[k.(string)] = convert(v) } diff --git a/pkg/scanners/cloudformation/parser/property_helpers.go b/pkg/scanners/cloudformation/parser/property_helpers.go index 6883930ea4a6..f36bc77cba1c 100644 --- a/pkg/scanners/cloudformation/parser/property_helpers.go +++ b/pkg/scanners/cloudformation/parser/property_helpers.go @@ -5,7 +5,6 @@ import ( "strings" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" ) diff --git a/pkg/scanners/cloudformation/parser/resource.go b/pkg/scanners/cloudformation/parser/resource.go index 1258ac3fd5c0..d18bc2212d16 100644 --- a/pkg/scanners/cloudformation/parser/resource.go +++ b/pkg/scanners/cloudformation/parser/resource.go @@ -4,10 +4,10 @@ import ( "io/fs" "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/liamg/jfather" "gopkg.in/yaml.v3" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) type Resource struct { diff --git a/pkg/scanners/cloudformation/parser/util.go b/pkg/scanners/cloudformation/parser/util.go index a00a8ec8dd78..0836a1fc2413 100644 --- a/pkg/scanners/cloudformation/parser/util.go +++ b/pkg/scanners/cloudformation/parser/util.go @@ -3,10 +3,11 @@ package parser import ( "strconv" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" - "github.com/liamg/jfather" "gopkg.in/yaml.v3" + + "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/scanners/kubernetes/parser" ) func setPropertyValueFromJson(node jfather.Node, propertyData *PropertyInner) error { @@ -57,7 +58,7 @@ func setPropertyValueFromYaml(node *yaml.Node, propertyData *PropertyInner) erro newContent = createNode(node, newContent) - node.Tag = "!!map" + node.Tag = string(parser.TagMap) node.Kind = yaml.MappingNode node.Content = newContent } @@ -80,7 +81,7 @@ func setPropertyValueFromYaml(node *yaml.Node, propertyData *PropertyInner) erro } switch node.Tag { - case "!!map": + case string(parser.TagMap): var childData map[string]*Property if err := node.Decode(&childData); err != nil { return err @@ -119,7 +120,7 @@ func createNode(node *yaml.Node, newContent []*yaml.Node) []*yaml.Node { case yaml.SequenceNode: newNode.Tag = "!!seq" case yaml.MappingNode: - newNode.Tag = "!!map" + newNode.Tag = string(parser.TagMap) case yaml.ScalarNode: default: newNode.Tag = node.Tag diff --git a/pkg/scanners/cloudformation/scanner.go b/pkg/scanners/cloudformation/scanner.go index f68f89cf4285..615a37624d41 100644 --- a/pkg/scanners/cloudformation/scanner.go +++ b/pkg/scanners/cloudformation/scanner.go @@ -15,7 +15,6 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" - adapter "github.com/aquasecurity/trivy/internal/adapters/cloudformation" "github.com/aquasecurity/trivy/pkg/scanners" "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" @@ -48,7 +47,7 @@ func WithConfigsFS(fsys fs.FS) options.ScannerOption { var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) -type Scanner struct { +type Scanner struct { // nolint: gocritic debug debug.Logger policyDirs []string policyReaders []io.Reader @@ -151,9 +150,9 @@ func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { return regoScanner, nil } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (results scan.Results, err error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (results scan.Results, err error) { - contexts, err := s.parser.ParseFS(ctx, fs, dir) + contexts, err := s.parser.ParseFS(ctx, fsys, dir) if err != nil { return nil, err } @@ -162,7 +161,7 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (results sca return nil, nil } - regoScanner, err := s.initRegoScanner(fs) + regoScanner, err := s.initRegoScanner(fsys) if err != nil { return nil, err } @@ -171,7 +170,7 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (results sca if cfCtx == nil { continue } - fileResults, err := s.scanFileContext(ctx, regoScanner, cfCtx, fs) + fileResults, err := s.scanFileContext(ctx, regoScanner, cfCtx, fsys) if err != nil { return nil, err } @@ -183,23 +182,23 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (results sca return results, nil } -func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { +func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { - cfCtx, err := s.parser.ParseFile(ctx, fs, path) + cfCtx, err := s.parser.ParseFile(ctx, fsys, path) if err != nil { return nil, err } - regoScanner, err := s.initRegoScanner(fs) + regoScanner, err := s.initRegoScanner(fsys) if err != nil { return nil, err } - results, err := s.scanFileContext(ctx, regoScanner, cfCtx, fs) + results, err := s.scanFileContext(ctx, regoScanner, cfCtx, fsys) if err != nil { return nil, err } - results.SetSourceAndFilesystem("", fs, false) + results.SetSourceAndFilesystem("", fsys, false) sort.Slice(results, func(i, j int) bool { return results[i].Rule().AVDID < results[j].Rule().AVDID @@ -207,7 +206,7 @@ func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Res return results, nil } -func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser.FileContext, fs fs.FS) (results scan.Results, err error) { +func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser.FileContext, fsys fs.FS) (results scan.Results, err error) { state := adapter.Adapt(*cfCtx) if state == nil { return nil, nil @@ -242,7 +241,7 @@ func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner } regoResults, err := regoScanner.ScanInput(ctx, rego.Input{ Path: cfCtx.Metadata().Range().GetFilename(), - FS: fs, + FS: fsys, Contents: state.ToRego(), }) if err != nil { diff --git a/pkg/scanners/dockerfile/parser/parser.go b/pkg/scanners/dockerfile/parser/parser.go index b45c97adc80f..18ed130dccde 100644 --- a/pkg/scanners/dockerfile/parser/parser.go +++ b/pkg/scanners/dockerfile/parser/parser.go @@ -8,11 +8,11 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/defsec/pkg/debug" - "github.com/aquasecurity/defsec/pkg/providers/dockerfile" "github.com/moby/buildkit/frontend/dockerfile/instructions" "github.com/moby/buildkit/frontend/dockerfile/parser" + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/providers/dockerfile" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/detection" ) @@ -33,9 +33,9 @@ func (p *Parser) SetSkipRequiredCheck(b bool) { } // New creates a new Dockerfile parser -func New(options ...options.ParserOption) *Parser { +func New(opts ...options.ParserOption) *Parser { p := &Parser{} - for _, option := range options { + for _, option := range opts { option(p) } return p @@ -73,8 +73,8 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses Dockerfile content from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) (*dockerfile.Dockerfile, error) { - f, err := fs.Open(filepath.ToSlash(path)) +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) (*dockerfile.Dockerfile, error) { + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } diff --git a/pkg/scanners/dockerfile/scanner.go b/pkg/scanners/dockerfile/scanner.go index a0d3e60f47a4..10583fac0c53 100644 --- a/pkg/scanners/dockerfile/scanner.go +++ b/pkg/scanners/dockerfile/scanner.go @@ -8,11 +8,10 @@ import ( "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" - "github.com/aquasecurity/defsec/pkg/scan" - "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/scanners" "github.com/aquasecurity/trivy/pkg/scanners/dockerfile/parser" ) @@ -20,7 +19,7 @@ import ( var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) -type Scanner struct { +type Scanner struct { // nolint: gocritic debug debug.Logger policyDirs []string policyReaders []io.Reader @@ -113,9 +112,9 @@ func NewScanner(opts ...options.ScannerOption) *Scanner { return s } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { - files, err := s.parser.ParseFS(ctx, fs, path) + files, err := s.parser.ParseFS(ctx, fsys, path) if err != nil { return nil, err } @@ -128,25 +127,25 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Resul for path, dfile := range files { inputs = append(inputs, rego.Input{ Path: path, - FS: fs, + FS: fsys, Contents: dfile.ToRego(), }) } - results, err := s.scanRego(ctx, fs, inputs...) + results, err := s.scanRego(ctx, fsys, inputs...) if err != nil { return nil, err } return results, nil } -func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { - dockerfile, err := s.parser.ParseFile(ctx, fs, path) +func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { + dockerfile, err := s.parser.ParseFile(ctx, fsys, path) if err != nil { return nil, err } s.debug.Log("Scanning %s...", path) - return s.scanRego(ctx, fs, rego.Input{ + return s.scanRego(ctx, fsys, rego.Input{ Path: path, Contents: dockerfile.ToRego(), }) diff --git a/pkg/scanners/helm/parser/parser.go b/pkg/scanners/helm/parser/parser.go index 408f43069aff..6ec20577f18b 100644 --- a/pkg/scanners/helm/parser/parser.go +++ b/pkg/scanners/helm/parser/parser.go @@ -13,16 +13,15 @@ import ( "sort" "strings" - "gopkg.in/yaml.v3" - - "github.com/aquasecurity/defsec/pkg/debug" "github.com/google/uuid" + "gopkg.in/yaml.v3" "helm.sh/helm/v3/pkg/action" "helm.sh/helm/v3/pkg/chart" "helm.sh/helm/v3/pkg/chart/loader" "helm.sh/helm/v3/pkg/release" "helm.sh/helm/v3/pkg/releaseutil" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/detection" ) @@ -77,7 +76,7 @@ func (p *Parser) SetAPIVersions(values ...string) { p.apiVersions = values } -func New(path string, options ...options.ParserOption) *Parser { +func New(path string, opts ...options.ParserOption) *Parser { client := action.NewInstall(&action.Configuration{}) client.DryRun = true // don't do anything @@ -89,7 +88,7 @@ func New(path string, options ...options.ParserOption) *Parser { ChartSource: path, } - for _, option := range options { + for _, option := range opts { option(p) } @@ -167,14 +166,14 @@ func (p *Parser) addPaths(paths ...string) error { func (p *Parser) extractChartName(chartPath string) error { - chart, err := p.workingFS.Open(chartPath) + chrt, err := p.workingFS.Open(chartPath) if err != nil { return err } - defer func() { _ = chart.Close() }() + defer func() { _ = chrt.Close() }() var chartContent map[string]interface{} - if err := yaml.NewDecoder(chart).Decode(&chartContent); err != nil { + if err := yaml.NewDecoder(chrt).Decode(&chartContent); err != nil { // the chart likely has the name templated and so cannot be parsed as yaml - use a temporary name if dir := filepath.Dir(chartPath); dir != "" && dir != "." { p.helmClient.ReleaseName = dir @@ -224,7 +223,7 @@ func (p *Parser) RenderedChartFiles() ([]ChartFile, error) { return p.getRenderedManifests(manifestsKeys, splitManifests), nil } -func (p *Parser) getRelease(chart *chart.Chart) (*release.Release, error) { +func (p *Parser) getRelease(chrt *chart.Chart) (*release.Release, error) { opts := &ValueOptions{ ValueFiles: p.valuesFiles, Values: p.values, @@ -236,7 +235,7 @@ func (p *Parser) getRelease(chart *chart.Chart) (*release.Release, error) { if err != nil { return nil, err } - r, err := p.helmClient.RunWithContext(context.Background(), chart, vals) + r, err := p.helmClient.RunWithContext(context.Background(), chrt, vals) if err != nil { return nil, err } diff --git a/pkg/scanners/helm/parser/parser_tar.go b/pkg/scanners/helm/parser/parser_tar.go index 0c77408ea7d6..4a9502c0e599 100644 --- a/pkg/scanners/helm/parser/parser_tar.go +++ b/pkg/scanners/helm/parser/parser_tar.go @@ -11,8 +11,9 @@ import ( "os" "path/filepath" - "github.com/aquasecurity/trivy/pkg/detection" "github.com/liamg/memoryfs" + + "github.com/aquasecurity/trivy/pkg/detection" ) var errSkipFS = errors.New("skip parse FS") diff --git a/pkg/scanners/helm/parser/vals.go b/pkg/scanners/helm/parser/vals.go index 300dad819730..b54cd7c3a521 100644 --- a/pkg/scanners/helm/parser/vals.go +++ b/pkg/scanners/helm/parser/vals.go @@ -22,11 +22,11 @@ type ValueOptions struct { // MergeValues merges values from files specified via -f/--values and directly // via --set, --set-string, or --set-file, marshaling them to YAML func (opts *ValueOptions) MergeValues() (map[string]interface{}, error) { - base := map[string]interface{}{} + base := make(map[string]interface{}) // User specified a values files via -f/--values for _, filePath := range opts.ValueFiles { - currentMap := map[string]interface{}{} + currentMap := make(map[string]interface{}) bytes, err := readFile(filePath) if err != nil { diff --git a/pkg/scanners/helm/scanner.go b/pkg/scanners/helm/scanner.go index 81c386c43213..e9f8c9f68741 100644 --- a/pkg/scanners/helm/scanner.go +++ b/pkg/scanners/helm/scanner.go @@ -8,14 +8,14 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/defsec/pkg/debug" - "github.com/aquasecurity/defsec/pkg/framework" - "github.com/aquasecurity/defsec/pkg/scan" - "github.com/aquasecurity/defsec/pkg/types" "github.com/liamg/memoryfs" + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/rego" + "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/detection" "github.com/aquasecurity/trivy/pkg/scanners" "github.com/aquasecurity/trivy/pkg/scanners/helm/parser" @@ -52,19 +52,19 @@ func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { } // New creates a new Scanner -func New(options ...options.ScannerOption) *Scanner { +func New(opts ...options.ScannerOption) *Scanner { s := &Scanner{ - options: options, + options: opts, } - for _, option := range options { + for _, option := range opts { option(s) } return s } -func (s *Scanner) AddParserOptions(options ...options.ParserOption) { - s.parserOptions = append(s.parserOptions, options...) +func (s *Scanner) AddParserOptions(opts ...options.ParserOption) { + s.parserOptions = append(s.parserOptions, opts...) } func (s *Scanner) SetUseEmbeddedPolicies(b bool) { diff --git a/pkg/scanners/json/parser/parser.go b/pkg/scanners/json/parser/parser.go index 3489f0dc661c..1ff75eae3a30 100644 --- a/pkg/scanners/json/parser/parser.go +++ b/pkg/scanners/json/parser/parser.go @@ -68,8 +68,8 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses Dockerfile content from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) (interface{}, error) { - f, err := fs.Open(filepath.ToSlash(path)) +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) (interface{}, error) { + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } diff --git a/pkg/scanners/json/scanner.go b/pkg/scanners/json/scanner.go index 6612b6d8f477..8276bcfeb55a 100644 --- a/pkg/scanners/json/scanner.go +++ b/pkg/scanners/json/scanner.go @@ -19,7 +19,7 @@ import ( var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) -type Scanner struct { +type Scanner struct { // nolint: gocritic debug debug.Logger policyDirs []string policyReaders []io.Reader @@ -102,9 +102,9 @@ func (s *Scanner) Name() string { return "JSON" } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { - files, err := s.parser.ParseFS(ctx, fs, path) + files, err := s.parser.ParseFS(ctx, fsys, path) if err != nil { return nil, err } @@ -117,25 +117,25 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Resul for path, file := range files { inputs = append(inputs, rego.Input{ Path: path, - FS: fs, + FS: fsys, Contents: file, }) } - results, err := s.scanRego(ctx, fs, inputs...) + results, err := s.scanRego(ctx, fsys, inputs...) if err != nil { return nil, err } return results, nil } -func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { - parsed, err := s.parser.ParseFile(ctx, fs, path) +func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { + parsed, err := s.parser.ParseFile(ctx, fsys, path) if err != nil { return nil, err } s.debug.Log("Scanning %s...", path) - return s.scanRego(ctx, fs, rego.Input{ + return s.scanRego(ctx, fsys, rego.Input{ Path: path, Contents: parsed, }) diff --git a/pkg/scanners/kubernetes/parser/manifest.go b/pkg/scanners/kubernetes/parser/manifest.go index 3f809a6f9145..82da971b3e30 100644 --- a/pkg/scanners/kubernetes/parser/manifest.go +++ b/pkg/scanners/kubernetes/parser/manifest.go @@ -14,7 +14,7 @@ type Manifest struct { func (m *Manifest) UnmarshalYAML(value *yaml.Node) error { switch value.Tag { - case "!!map": + case string(TagMap): node := new(ManifestNode) node.Path = m.Path if err := value.Decode(node); err != nil { diff --git a/pkg/scanners/kubernetes/parser/parser.go b/pkg/scanners/kubernetes/parser/parser.go index 57b3a663283d..a1847686cef6 100644 --- a/pkg/scanners/kubernetes/parser/parser.go +++ b/pkg/scanners/kubernetes/parser/parser.go @@ -34,9 +34,9 @@ func (p *Parser) SetSkipRequiredCheck(b bool) { } // New creates a new K8s parser -func New(options ...options.ParserOption) *Parser { +func New(opts ...options.ParserOption) *Parser { p := &Parser{} - for _, option := range options { + for _, option := range opts { option(p) } return p @@ -73,8 +73,8 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses Kubernetes manifest from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) ([]interface{}, error) { - f, err := fs.Open(filepath.ToSlash(path)) +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) ([]interface{}, error) { + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } @@ -82,11 +82,11 @@ func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) ([]interfac return p.Parse(f, path) } -func (p *Parser) required(fs fs.FS, path string) bool { +func (p *Parser) required(fsys fs.FS, path string) bool { if p.skipRequired { return true } - f, err := fs.Open(filepath.ToSlash(path)) + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return false } diff --git a/pkg/scanners/kubernetes/scanner.go b/pkg/scanners/kubernetes/scanner.go index a50c9db30c1d..6c5c3b7ff2ab 100644 --- a/pkg/scanners/kubernetes/scanner.go +++ b/pkg/scanners/kubernetes/scanner.go @@ -8,6 +8,8 @@ import ( "sort" "sync" + "github.com/liamg/memoryfs" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/rego" @@ -16,13 +18,12 @@ import ( "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/scanners" "github.com/aquasecurity/trivy/pkg/scanners/kubernetes/parser" - "github.com/liamg/memoryfs" ) var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) -type Scanner struct { +type Scanner struct { // nolint: gocritic debug debug.Logger options []options.ScannerOption policyDirs []string diff --git a/pkg/scanners/terraform/executor/executor.go b/pkg/scanners/terraform/executor/executor.go index 5a22e042d585..943283652a0e 100644 --- a/pkg/scanners/terraform/executor/executor.go +++ b/pkg/scanners/terraform/executor/executor.go @@ -8,13 +8,12 @@ import ( "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/rego" "github.com/aquasecurity/defsec/pkg/rules" "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/severity" "github.com/aquasecurity/defsec/pkg/state" "github.com/aquasecurity/defsec/pkg/terraform" - - "github.com/aquasecurity/defsec/pkg/rego" adapter "github.com/aquasecurity/trivy/internal/adapters/terraform" ) @@ -67,7 +66,7 @@ func New(options ...Option) *Executor { } // Find element in list -func checkInList(id string, altIDs []string, list []string) bool { +func checkInList(id string, altIDs, list []string) bool { for _, codeIgnored := range list { if codeIgnored == id { return true @@ -107,7 +106,7 @@ func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, er checksTime := time.Now() registeredRules := rules.GetRegistered(e.frameworks...) - e.debug.Log("Initialised %d rule(s).", len(registeredRules)) + e.debug.Log("Initialized %d rule(s).", len(registeredRules)) pool := NewPool(threads, registeredRules, modules, infra, e.ignoreCheckErrors, e.regoScanner, e.regoOnly) e.debug.Log("Created pool with %d worker(s) to apply rules.", threads) diff --git a/pkg/scanners/terraform/executor/pool.go b/pkg/scanners/terraform/executor/pool.go index 9ea1f6907468..39a277c5480d 100644 --- a/pkg/scanners/terraform/executor/pool.go +++ b/pkg/scanners/terraform/executor/pool.go @@ -26,11 +26,11 @@ type Pool struct { regoOnly bool } -func NewPool(size int, rules []types.RegisteredRule, modules terraform.Modules, state *state.State, ignoreErrors bool, regoScanner *rego.Scanner, regoOnly bool) *Pool { +func NewPool(size int, rules []types.RegisteredRule, modules terraform.Modules, st *state.State, ignoreErrors bool, regoScanner *rego.Scanner, regoOnly bool) *Pool { return &Pool{ size: size, rules: rules, - state: state, + state: st, modules: modules, ignoreErrors: ignoreErrors, rs: regoScanner, @@ -227,7 +227,7 @@ func cleanPathRelativeToWorkingDir(dir, path string) string { return relPath } -func wildcardMatch(pattern string, subject string) bool { +func wildcardMatch(pattern, subject string) bool { if pattern == "" { return false } diff --git a/pkg/scanners/terraform/executor/statistics.go b/pkg/scanners/terraform/executor/statistics.go index 5c2dd1784ea2..6bdaab0fd2a4 100644 --- a/pkg/scanners/terraform/executor/statistics.go +++ b/pkg/scanners/terraform/executor/statistics.go @@ -8,8 +8,9 @@ import ( "strconv" "strings" - "github.com/aquasecurity/defsec/pkg/scan" "github.com/olekukonko/tablewriter" + + "github.com/aquasecurity/defsec/pkg/scan" ) type StatisticsItem struct { diff --git a/pkg/scanners/terraform/options.go b/pkg/scanners/terraform/options.go index 93c50d6ddd60..9c57af3400f6 100644 --- a/pkg/scanners/terraform/options.go +++ b/pkg/scanners/terraform/options.go @@ -8,7 +8,6 @@ import ( "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/severity" "github.com/aquasecurity/defsec/pkg/state" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" ) diff --git a/pkg/scanners/terraform/parser/evaluator.go b/pkg/scanners/terraform/parser/evaluator.go index cc3b24557e3d..e83ea9301db9 100644 --- a/pkg/scanners/terraform/parser/evaluator.go +++ b/pkg/scanners/terraform/parser/evaluator.go @@ -8,16 +8,16 @@ import ( "reflect" "time" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/ext/typeexpr" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" "golang.org/x/exp/slices" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/terraform" tfcontext "github.com/aquasecurity/defsec/pkg/terraform/context" "github.com/aquasecurity/defsec/pkg/types" - "github.com/hashicorp/hcl/v2" - "github.com/hashicorp/hcl/v2/ext/typeexpr" - "github.com/zclconf/go-cty/cty" - "github.com/zclconf/go-cty/cty/convert" ) const ( diff --git a/pkg/scanners/terraform/parser/funcs/collection.go b/pkg/scanners/terraform/parser/funcs/collection.go index 693b8912f618..f68af2ce36af 100644 --- a/pkg/scanners/terraform/parser/funcs/collection.go +++ b/pkg/scanners/terraform/parser/funcs/collection.go @@ -353,7 +353,7 @@ var MatchkeysFunc = function.New(&function.Spec{ return cty.ListValEmpty(retType.ElementType()), errors.New("length of keys and values should be equal") } - output := make([]cty.Value, 0) + var output []cty.Value values := args[0] // Keys and searchset must be the same type. @@ -582,7 +582,7 @@ var TransposeFunc = function.New(&function.Spec{ } for outKey, outVal := range tmpMap { - values := make([]cty.Value, 0) + var values []cty.Value for _, v := range outVal { values = append(values, cty.StringVal(v)) } @@ -600,7 +600,7 @@ var TransposeFunc = function.New(&function.Spec{ // ListFunc constructs a function that takes an arbitrary number of arguments // and returns a list containing those values in the same order. // -// This function is deprecated in Terraform v0.12 +// Deprecated: This function is deprecated in Terraform v0.12 var ListFunc = function.New(&function.Spec{ Params: []function.Parameter{}, VarParam: &function.Parameter{ @@ -621,7 +621,7 @@ var ListFunc = function.New(&function.Spec{ // MapFunc constructs a function that takes an even number of arguments and // returns a map whose elements are constructed from consecutive pairs of arguments. // -// This function is deprecated in Terraform v0.12 +// Deprecated: This function is deprecated in Terraform v0.12 var MapFunc = function.New(&function.Spec{ Params: []function.Parameter{}, VarParam: &function.Parameter{ diff --git a/pkg/scanners/terraform/parser/funcs/crypto.go b/pkg/scanners/terraform/parser/funcs/crypto.go index 424c4c184763..4bc3bab99744 100644 --- a/pkg/scanners/terraform/parser/funcs/crypto.go +++ b/pkg/scanners/terraform/parser/funcs/crypto.go @@ -2,9 +2,9 @@ package funcs import ( - "crypto/md5" + "crypto/md5" // nolint: gosec "crypto/rsa" - "crypto/sha1" + "crypto/sha1" // nolint: gosec "crypto/sha256" "crypto/sha512" "encoding/asn1" @@ -274,7 +274,7 @@ func UUID() (cty.Value, error) { // UUIDV5 generates and returns a Type-5 UUID in the standard hexadecimal string // format. -func UUIDV5(namespace cty.Value, name cty.Value) (cty.Value, error) { +func UUIDV5(namespace, name cty.Value) (cty.Value, error) { return UUIDV5Func.Call([]cty.Value{namespace, name}) } diff --git a/pkg/scanners/terraform/parser/funcs/datetime.go b/pkg/scanners/terraform/parser/funcs/datetime.go index 253e59eef018..b09da879da99 100644 --- a/pkg/scanners/terraform/parser/funcs/datetime.go +++ b/pkg/scanners/terraform/parser/funcs/datetime.go @@ -66,6 +66,6 @@ func Timestamp() (cty.Value, error) { // // The result is a string, also in RFC 3339 format, representing the result // of adding the given direction to the given timestamp. -func TimeAdd(timestamp cty.Value, duration cty.Value) (cty.Value, error) { +func TimeAdd(timestamp, duration cty.Value) (cty.Value, error) { return TimeAddFunc.Call([]cty.Value{timestamp, duration}) } diff --git a/pkg/scanners/terraform/parser/funcs/defaults.go b/pkg/scanners/terraform/parser/funcs/defaults.go index 4467b81e35ce..1e5c0913adbd 100644 --- a/pkg/scanners/terraform/parser/funcs/defaults.go +++ b/pkg/scanners/terraform/parser/funcs/defaults.go @@ -69,6 +69,7 @@ var DefaultsFunc = function.New(&function.Spec{ }, }) +// nolint: gocyclo func defaultsApply(input, fallback cty.Value) cty.Value { wantTy := input.Type() @@ -113,7 +114,7 @@ func defaultsApply(input, fallback cty.Value) cty.Value { return input } atys := wantTy.AttributeTypes() - ret := map[string]cty.Value{} + ret := make(map[string]cty.Value) for attr, aty := range atys { inputSub := umInput.GetAttr(attr) fallbackSub := cty.NullVal(aty) @@ -154,7 +155,7 @@ func defaultsApply(input, fallback cty.Value) cty.Value { ety := wantTy.ElementType() switch { case wantTy.IsMapType(): - newVals := map[string]cty.Value{} + newVals := make(map[string]cty.Value) if !umInput.IsNull() { for it := umInput.ElementIterator(); it.Next(); { diff --git a/pkg/scanners/terraform/parser/funcs/encoding.go b/pkg/scanners/terraform/parser/funcs/encoding.go index f74a508fb7ed..778367fb8fce 100644 --- a/pkg/scanners/terraform/parser/funcs/encoding.go +++ b/pkg/scanners/terraform/parser/funcs/encoding.go @@ -30,7 +30,7 @@ var Base64DecodeFunc = function.New(&function.Spec{ if err != nil { return cty.UnknownVal(cty.String), fmt.Errorf("failed to decode base64 data '%s'", s) } - if !utf8.Valid([]byte(sDec)) { + if !utf8.Valid(sDec) { log.Printf("[DEBUG] the result of decoding the provided string is not valid UTF-8: %s", sDec) return cty.UnknownVal(cty.String), fmt.Errorf("the result of decoding the provided string is not valid UTF-8") } diff --git a/pkg/scanners/terraform/parser/funcs/number.go b/pkg/scanners/terraform/parser/funcs/number.go index 6c8f5dc3b6d9..012455eb7737 100644 --- a/pkg/scanners/terraform/parser/funcs/number.go +++ b/pkg/scanners/terraform/parser/funcs/number.go @@ -165,6 +165,6 @@ func Signum(num cty.Value) (cty.Value, error) { } // ParseInt parses a string argument and returns an integer of the specified base. -func ParseInt(num cty.Value, base cty.Value) (cty.Value, error) { +func ParseInt(num, base cty.Value) (cty.Value, error) { return ParseIntFunc.Call([]cty.Value{num, base}) } diff --git a/pkg/scanners/terraform/parser/funcs/string.go b/pkg/scanners/terraform/parser/funcs/string.go index 49696784e872..6fe077c1f586 100644 --- a/pkg/scanners/terraform/parser/funcs/string.go +++ b/pkg/scanners/terraform/parser/funcs/string.go @@ -43,7 +43,7 @@ var ReplaceFunc = function.New(&function.Spec{ return cty.StringVal(re.ReplaceAllString(str, replace)), nil } - return cty.StringVal(strings.Replace(str, substr, replace, -1)), nil + return cty.StringVal(strings.ReplaceAll(str, substr, replace)), nil }, }) diff --git a/pkg/scanners/terraform/parser/functions.go b/pkg/scanners/terraform/parser/functions.go index 72cb74e0246b..f6e9cd62664f 100644 --- a/pkg/scanners/terraform/parser/functions.go +++ b/pkg/scanners/terraform/parser/functions.go @@ -3,12 +3,13 @@ package parser import ( "io/fs" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/funcs" "github.com/hashicorp/hcl/v2/ext/tryfunc" ctyyaml "github.com/zclconf/go-cty-yaml" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" "github.com/zclconf/go-cty/cty/function/stdlib" + + "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/funcs" ) // Functions returns the set of functions that should be used to when evaluating diff --git a/pkg/scanners/terraform/parser/load_blocks.go b/pkg/scanners/terraform/parser/load_blocks.go index 88bd9de47f3b..34b303e3a91f 100644 --- a/pkg/scanners/terraform/parser/load_blocks.go +++ b/pkg/scanners/terraform/parser/load_blocks.go @@ -6,9 +6,10 @@ import ( "strings" "time" + "github.com/hashicorp/hcl/v2" + "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/defsec/pkg/types" - "github.com/hashicorp/hcl/v2" ) func loadBlocksFromFile(file sourceFile, moduleSource string) (hcl.Blocks, []terraform.Ignore, error) { @@ -23,7 +24,7 @@ func loadBlocksFromFile(file sourceFile, moduleSource string) (hcl.Blocks, []ter return contents.Blocks, ignores, nil } -func parseIgnores(data []byte, path string, moduleSource string) []terraform.Ignore { +func parseIgnores(data []byte, path, moduleSource string) []terraform.Ignore { var ignores []terraform.Ignore for i, line := range strings.Split(string(data), "\n") { line = strings.TrimSpace(line) diff --git a/pkg/scanners/terraform/parser/load_module.go b/pkg/scanners/terraform/parser/load_module.go index c8d0141bfd01..7abb8ba18455 100644 --- a/pkg/scanners/terraform/parser/load_module.go +++ b/pkg/scanners/terraform/parser/load_module.go @@ -8,10 +8,10 @@ import ( "path/filepath" "strings" + "github.com/zclconf/go-cty/cty" + "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/resolvers" - - "github.com/zclconf/go-cty/cty" ) type moduleLoadError struct { @@ -146,7 +146,7 @@ func (e *evaluator) loadModuleFromTerraformCache(ctx context.Context, b *terrafo func (e *evaluator) loadExternalModule(ctx context.Context, b *terraform.Block, source string) (*ModuleDefinition, error) { - e.debug.Log("locating non-initialised module '%s'...", source) + e.debug.Log("locating non-initialized module '%s'...", source) version := b.GetAttribute("version").AsStringValueOrDefault("", b).Value() opt := resolvers.Options{ diff --git a/pkg/scanners/terraform/parser/load_module_metadata.go b/pkg/scanners/terraform/parser/load_module_metadata.go index 9d06402a76fc..7b316f8d66e1 100644 --- a/pkg/scanners/terraform/parser/load_module_metadata.go +++ b/pkg/scanners/terraform/parser/load_module_metadata.go @@ -16,7 +16,7 @@ type modulesMetadata struct { } func loadModuleMetadata(target fs.FS, fullPath string) (*modulesMetadata, string, error) { - metadataPath := filepath.Join(fullPath, ".terraform/modules/modules.json") + metadataPath := filepath.Join(fullPath, ".terraform/modules/modules.json") // nolint: gocritic f, err := target.Open(metadataPath) if err != nil { diff --git a/pkg/scanners/terraform/parser/module_retrieval.go b/pkg/scanners/terraform/parser/module_retrieval.go index 66127715f513..57248613f651 100644 --- a/pkg/scanners/terraform/parser/module_retrieval.go +++ b/pkg/scanners/terraform/parser/module_retrieval.go @@ -19,7 +19,7 @@ var defaultResolvers = []ModuleResolver{ resolvers.Registry, } -func resolveModule(ctx context.Context, current fs.FS, opt resolvers.Options) (filesystem fs.FS, sourcePrefix string, downloadPath string, err error) { +func resolveModule(ctx context.Context, current fs.FS, opt resolvers.Options) (filesystem fs.FS, sourcePrefix, downloadPath string, err error) { opt.Debug("Resolving module '%s' with source: '%s'...", opt.Name, opt.Source) for _, resolver := range defaultResolvers { if filesystem, prefix, path, applies, err := resolver.Resolve(ctx, current, opt); err != nil { diff --git a/pkg/scanners/terraform/parser/parser.go b/pkg/scanners/terraform/parser/parser.go index 2521fb5dbcb6..7f3fa52b2307 100644 --- a/pkg/scanners/terraform/parser/parser.go +++ b/pkg/scanners/terraform/parser/parser.go @@ -10,14 +10,14 @@ import ( "strings" "time" - "github.com/aquasecurity/defsec/pkg/debug" - "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/defsec/pkg/terraform" - tfcontext "github.com/aquasecurity/defsec/pkg/terraform/context" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" "github.com/zclconf/go-cty/cty" + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/terraform" + tfcontext "github.com/aquasecurity/defsec/pkg/terraform/context" "github.com/aquasecurity/trivy/pkg/extrafs" ) diff --git a/pkg/scanners/terraform/parser/resolvers/cache.go b/pkg/scanners/terraform/parser/resolvers/cache.go index 1314d538a60a..6efc15f72dbb 100644 --- a/pkg/scanners/terraform/parser/resolvers/cache.go +++ b/pkg/scanners/terraform/parser/resolvers/cache.go @@ -25,7 +25,7 @@ func locateCacheFS() (fs.FS, error) { func locateCacheDir() (string, error) { cacheDir := filepath.Join(os.TempDir(), tempDirName, "cache") - if err := os.MkdirAll(cacheDir, 0o755); err != nil { + if err := os.MkdirAll(cacheDir, 0o750); err != nil { return "", err } if !isWritable(cacheDir) { @@ -34,7 +34,7 @@ func locateCacheDir() (string, error) { return cacheDir, nil } -func (r *cacheResolver) Resolve(_ context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { +func (r *cacheResolver) Resolve(_ context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix, downloadPath string, applies bool, err error) { if opt.SkipCache { opt.Debug("Cache is disabled.") return nil, "", "", false, nil diff --git a/pkg/scanners/terraform/parser/resolvers/local.go b/pkg/scanners/terraform/parser/resolvers/local.go index 94d92099b6c3..58fe5b9cd084 100644 --- a/pkg/scanners/terraform/parser/resolvers/local.go +++ b/pkg/scanners/terraform/parser/resolvers/local.go @@ -10,7 +10,7 @@ type localResolver struct{} var Local = &localResolver{} -func (r *localResolver) Resolve(_ context.Context, target fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { +func (r *localResolver) Resolve(_ context.Context, target fs.FS, opt Options) (filesystem fs.FS, prefix, downloadPath string, applies bool, err error) { if !opt.hasPrefix(".", "..") { return nil, "", "", false, nil } diff --git a/pkg/scanners/terraform/parser/resolvers/registry.go b/pkg/scanners/terraform/parser/resolvers/registry.go index 5623e9064e06..4bc629d35af4 100644 --- a/pkg/scanners/terraform/parser/resolvers/registry.go +++ b/pkg/scanners/terraform/parser/resolvers/registry.go @@ -143,6 +143,7 @@ func resolveVersion(input string, versions moduleVersions) (string, error) { if len(versions.Modules[0].Versions) == 0 { return "", fmt.Errorf("no available versions for module") } + constraints, err := semver.NewConstraint(input) if err != nil { return "", err diff --git a/pkg/scanners/terraform/parser/resolvers/remote.go b/pkg/scanners/terraform/parser/resolvers/remote.go index 4c1a96437e65..4a6a26798a8a 100644 --- a/pkg/scanners/terraform/parser/resolvers/remote.go +++ b/pkg/scanners/terraform/parser/resolvers/remote.go @@ -29,7 +29,7 @@ func (r *remoteResolver) GetDownloadCount() int { return int(atomic.LoadInt32(&r.count)) } -func (r *remoteResolver) Resolve(ctx context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { +func (r *remoteResolver) Resolve(ctx context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix, downloadPath string, applies bool, err error) { if !opt.hasPrefix("github.com/", "bitbucket.org/", "s3:", "git@", "git:", "hg:", "https:", "gcs:") { return nil, "", "", false, nil } @@ -58,7 +58,7 @@ func (r *remoteResolver) Resolve(ctx context.Context, _ fs.FS, opt Options) (fil func (r *remoteResolver) download(ctx context.Context, opt Options, dst string) error { _ = os.RemoveAll(dst) - if err := os.MkdirAll(filepath.Dir(dst), 0o755); err != nil { + if err := os.MkdirAll(filepath.Dir(dst), 0o750); err != nil { return err } diff --git a/pkg/scanners/terraform/scanner.go b/pkg/scanners/terraform/scanner.go index 9569848b35a4..b6636460723f 100644 --- a/pkg/scanners/terraform/scanner.go +++ b/pkg/scanners/terraform/scanner.go @@ -10,15 +10,15 @@ import ( "sync" "time" + "golang.org/x/exp/slices" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/rego" "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/defsec/pkg/types" - "golang.org/x/exp/slices" - - "github.com/aquasecurity/defsec/pkg/rego" "github.com/aquasecurity/trivy/pkg/extrafs" "github.com/aquasecurity/trivy/pkg/scanners" "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" @@ -30,7 +30,7 @@ var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) var _ ConfigurableTerraformScanner = (*Scanner)(nil) -type Scanner struct { +type Scanner struct { // nolint: gocritic sync.Mutex options []options.ScannerOption parserOpt []options.ParserOption @@ -76,12 +76,12 @@ func (s *Scanner) SetForceAllDirs(b bool) { s.forceAllDirs = b } -func (s *Scanner) AddParserOptions(options ...options.ParserOption) { - s.parserOpt = append(s.parserOpt, options...) +func (s *Scanner) AddParserOptions(opts ...options.ParserOption) { + s.parserOpt = append(s.parserOpt, opts...) } -func (s *Scanner) AddExecutorOptions(options ...executor.Option) { - s.executorOpt = append(s.executorOpt, options...) +func (s *Scanner) AddExecutorOptions(opts ...executor.Option) { + s.executorOpt = append(s.executorOpt, opts...) } func (s *Scanner) SetPolicyReaders(readers []io.Reader) { @@ -128,12 +128,12 @@ type Metrics struct { } } -func New(options ...options.ScannerOption) *Scanner { +func New(opts ...options.ScannerOption) *Scanner { s := &Scanner{ dirs: make(map[string]struct{}), - options: options, + options: opts, } - for _, opt := range options { + for _, opt := range opts { opt(s) } return s diff --git a/pkg/scanners/terraformplan/parser/parser.go b/pkg/scanners/terraformplan/parser/parser.go index a14eea339ac2..583e539f1b66 100644 --- a/pkg/scanners/terraformplan/parser/parser.go +++ b/pkg/scanners/terraformplan/parser/parser.go @@ -8,8 +8,9 @@ import ( "os" "strings" - "github.com/aquasecurity/defsec/pkg/terraform" "github.com/liamg/memoryfs" + + "github.com/aquasecurity/defsec/pkg/terraform" ) type Parser struct { @@ -141,13 +142,12 @@ func getResources(module Module, resourceChanges []ResourceChange, configuration return resources, nil } -func unpackConfigurationValue(val interface{}, r Resource) (interface{}, bool) { - switch t := val.(type) { - case map[string]interface{}: +func unpackConfigurationValue(val any, r Resource) (any, bool) { + if t, ok := val.(map[string]any); ok { for k, v := range t { switch k { case "references": - reference := v.([]interface{})[0].(string) + reference := v.([]any)[0].(string) if strings.HasPrefix(r.Address, "module.") { hashable := strings.TrimSuffix(strings.Split(r.Address, fmt.Sprintf(".%s.", r.Type))[0], ".data") /* #nosec */ @@ -171,9 +171,7 @@ func unpackConfigurationValue(val interface{}, r Resource) (interface{}, bool) { reference = strings.Join(rejoin, ".") } - - shouldReplace := false - return terraform.PlanReference{Value: reference}, shouldReplace + return terraform.PlanReference{Value: reference}, false case "constant_value": return v, false } diff --git a/pkg/scanners/terraformplan/scanner.go b/pkg/scanners/terraformplan/scanner.go index 101086d353b8..aee33c256369 100644 --- a/pkg/scanners/terraformplan/scanner.go +++ b/pkg/scanners/terraformplan/scanner.go @@ -6,6 +6,8 @@ import ( "io" "io/fs" + "github.com/bmatcuk/doublestar/v4" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" @@ -13,7 +15,6 @@ import ( terraformScanner "github.com/aquasecurity/trivy/pkg/scanners/terraform" "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/scanners/terraformplan/parser" - "github.com/bmatcuk/doublestar/v4" ) var tfPlanExts = []string{ @@ -123,21 +124,21 @@ func (s *Scanner) ScanFS(ctx context.Context, inputFS fs.FS, dir string) (scan.R return results, nil } -func New(options ...options.ScannerOption) *Scanner { +func New(opts ...options.ScannerOption) *Scanner { scanner := &Scanner{ parser: *parser.New(), - options: options, + options: opts, } - for _, o := range options { + for _, o := range opts { o(scanner) } return scanner } -func (s *Scanner) ScanFile(filepath string, fs fs.FS) (scan.Results, error) { +func (s *Scanner) ScanFile(filepath string, fsys fs.FS) (scan.Results, error) { s.debug.Log("Scanning file %s", filepath) - file, err := fs.Open(filepath) + file, err := fsys.Open(filepath) if err != nil { return nil, err } diff --git a/pkg/scanners/toml/parser/parser.go b/pkg/scanners/toml/parser/parser.go index 736d6bd64915..3af651b9d2ab 100644 --- a/pkg/scanners/toml/parser/parser.go +++ b/pkg/scanners/toml/parser/parser.go @@ -7,6 +7,7 @@ import ( "path/filepath" "github.com/BurntSushi/toml" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/detection" @@ -68,8 +69,8 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses toml content from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) (interface{}, error) { - f, err := fs.Open(filepath.ToSlash(path)) +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) (interface{}, error) { + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } diff --git a/pkg/scanners/toml/scanner.go b/pkg/scanners/toml/scanner.go index 46df8c54e429..9af56eb73ac3 100644 --- a/pkg/scanners/toml/scanner.go +++ b/pkg/scanners/toml/scanner.go @@ -17,7 +17,7 @@ import ( var _ options.ConfigurableScanner = (*Scanner)(nil) -type Scanner struct { +type Scanner struct { // nolint: gocritic debug debug.Logger options []options.ScannerOption policyDirs []string @@ -96,9 +96,9 @@ func NewScanner(opts ...options.ScannerOption) *Scanner { return s } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { - files, err := s.parser.ParseFS(ctx, fs, path) + files, err := s.parser.ParseFS(ctx, fsys, path) if err != nil { return nil, err } @@ -112,24 +112,24 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Resul inputs = append(inputs, rego.Input{ Path: path, Contents: file, - FS: fs, + FS: fsys, }) } - results, err := s.scanRego(ctx, fs, inputs...) + results, err := s.scanRego(ctx, fsys, inputs...) if err != nil { return nil, err } return results, nil } -func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { - parsed, err := s.parser.ParseFile(ctx, fs, path) +func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { + parsed, err := s.parser.ParseFile(ctx, fsys, path) if err != nil { return nil, err } s.debug.Log("Scanning %s...", path) - return s.scanRego(ctx, fs, rego.Input{ + return s.scanRego(ctx, fsys, rego.Input{ Path: path, Contents: parsed, }) diff --git a/pkg/scanners/universal/scanner.go b/pkg/scanners/universal/scanner.go index f08904216ea0..7040bd43054b 100644 --- a/pkg/scanners/universal/scanner.go +++ b/pkg/scanners/universal/scanner.go @@ -50,10 +50,10 @@ func (s *Scanner) Name() string { return "Universal" } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (scan.Results, error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (scan.Results, error) { var results scan.Results for _, inner := range s.fsScanners { - innerResults, err := inner.ScanFS(ctx, fs, dir) + innerResults, err := inner.ScanFS(ctx, fsys, dir) if err != nil { return nil, err } diff --git a/pkg/scanners/yaml/parser/parser.go b/pkg/scanners/yaml/parser/parser.go index 1fd56063247b..783d6e693b30 100644 --- a/pkg/scanners/yaml/parser/parser.go +++ b/pkg/scanners/yaml/parser/parser.go @@ -8,10 +8,11 @@ import ( "path/filepath" "strings" + "gopkg.in/yaml.v3" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/detection" - "gopkg.in/yaml.v3" ) var _ options.ConfigurableParser = (*Parser)(nil) @@ -70,8 +71,8 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses yaml content from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) ([]interface{}, error) { - f, err := fs.Open(filepath.ToSlash(path)) +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) ([]interface{}, error) { + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } diff --git a/pkg/scanners/yaml/scanner.go b/pkg/scanners/yaml/scanner.go index d6e32df005e0..607462061fa6 100644 --- a/pkg/scanners/yaml/scanner.go +++ b/pkg/scanners/yaml/scanner.go @@ -17,7 +17,7 @@ import ( var _ options.ConfigurableScanner = (*Scanner)(nil) -type Scanner struct { +type Scanner struct { // nolint: gocritic options []options.ScannerOption debug debug.Logger policyDirs []string @@ -95,9 +95,9 @@ func NewScanner(opts ...options.ScannerOption) *Scanner { return s } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { - fileset, err := s.parser.ParseFS(ctx, fs, path) + fileset, err := s.parser.ParseFS(ctx, fsys, path) if err != nil { return nil, err } @@ -112,25 +112,25 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Resul inputs = append(inputs, rego.Input{ Path: path, Contents: file, - FS: fs, + FS: fsys, }) } } - results, err := s.scanRego(ctx, fs, inputs...) + results, err := s.scanRego(ctx, fsys, inputs...) if err != nil { return nil, err } return results, nil } -func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { - parsed, err := s.parser.ParseFile(ctx, fs, path) +func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { + parsed, err := s.parser.ParseFile(ctx, fsys, path) if err != nil { return nil, err } s.debug.Log("Scanning %s...", path) - return s.scanRego(ctx, fs, rego.Input{ + return s.scanRego(ctx, fsys, rego.Input{ Path: path, Contents: parsed, }) diff --git a/test/testutil/util.go b/test/testutil/util.go index ea29df78a9ae..706c99c40b14 100644 --- a/test/testutil/util.go +++ b/test/testutil/util.go @@ -7,10 +7,11 @@ import ( "strings" "testing" - "github.com/aquasecurity/defsec/pkg/scan" "github.com/liamg/memoryfs" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scan" ) func AssertRuleFound(t *testing.T, ruleID string, results scan.Results, message string, args ...interface{}) { @@ -56,7 +57,7 @@ func CreateFS(t *testing.T, files map[string]string) fs.FS { return memfs } -func AssertDefsecEqual(t *testing.T, expected interface{}, actual interface{}) { +func AssertDefsecEqual(t *testing.T, expected, actual interface{}) { expectedJson, err := json.MarshalIndent(expected, "", "\t") require.NoError(t, err) actualJson, err := json.MarshalIndent(actual, "", "\t") From 971bddbb28bab6b02a87b554441de89da3de5cf9 Mon Sep 17 00:00:00 2001 From: Simar Date: Thu, 25 Jan 2024 22:27:24 -0700 Subject: [PATCH 03/13] use go-version/pkg/semver --- go.mod | 1 - go.sum | 2 -- pkg/scanners/terraform/parser/resolvers/registry.go | 6 +++--- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index b6ac31504872..fd17000843d2 100644 --- a/go.mod +++ b/go.mod @@ -118,7 +118,6 @@ require ( ) require ( - github.com/Masterminds/semver v1.5.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/aws/smithy-go v1.19.0 github.com/bitnami/go-version v0.0.0-20231130084017-bb00604d650c diff --git a/go.sum b/go.sum index 362a05d16330..eb45aeeaeaa6 100644 --- a/go.sum +++ b/go.sum @@ -252,8 +252,6 @@ github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= diff --git a/pkg/scanners/terraform/parser/resolvers/registry.go b/pkg/scanners/terraform/parser/resolvers/registry.go index 4bc629d35af4..a64fba4804da 100644 --- a/pkg/scanners/terraform/parser/resolvers/registry.go +++ b/pkg/scanners/terraform/parser/resolvers/registry.go @@ -11,7 +11,7 @@ import ( "strings" "time" - "github.com/Masterminds/semver" + "github.com/aquasecurity/go-version/pkg/semver" ) type registryResolver struct { @@ -144,13 +144,13 @@ func resolveVersion(input string, versions moduleVersions) (string, error) { return "", fmt.Errorf("no available versions for module") } - constraints, err := semver.NewConstraint(input) + constraints, err := semver.NewConstraints(input) if err != nil { return "", err } var realVersions semver.Collection for _, rawVersion := range versions.Modules[0].Versions { - realVersion, err := semver.NewVersion(rawVersion.Version) + realVersion, err := semver.Parse(rawVersion.Version) if err != nil { continue } From 5d6951130d569ccef786775581a00335ecfb2d79 Mon Sep 17 00:00:00 2001 From: Simar Date: Thu, 25 Jan 2024 22:40:30 -0700 Subject: [PATCH 04/13] fix unit tests --- pkg/scanners/azure/functions/intersection_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/scanners/azure/functions/intersection_test.go b/pkg/scanners/azure/functions/intersection_test.go index 98630fa9687c..42d23fee4bf7 100644 --- a/pkg/scanners/azure/functions/intersection_test.go +++ b/pkg/scanners/azure/functions/intersection_test.go @@ -36,7 +36,7 @@ func Test_Intersect(t *testing.T) { []interface{}{"a", "b", "c"}, []interface{}{}, }, - expected: []interface{}{}, + expected: []interface{}(nil), }, { name: "intersect two arrays with both empty", @@ -44,7 +44,7 @@ func Test_Intersect(t *testing.T) { []interface{}{}, []interface{}{}, }, - expected: []interface{}{}, + expected: []interface{}(nil), }, { name: "intersect two arrays with both nil", From 65e6aa5be381752a9a20d2fe9f088c60bb2cded3 Mon Sep 17 00:00:00 2001 From: Simar Date: Thu, 25 Jan 2024 22:50:33 -0700 Subject: [PATCH 05/13] fix unit test --- .../adapters/terraform/digitalocean/compute/adapt_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/adapters/terraform/digitalocean/compute/adapt_test.go b/internal/adapters/terraform/digitalocean/compute/adapt_test.go index 66b035c2392d..8530158ca0b1 100644 --- a/internal/adapters/terraform/digitalocean/compute/adapt_test.go +++ b/internal/adapters/terraform/digitalocean/compute/adapt_test.go @@ -139,8 +139,8 @@ func Test_adaptFirewalls(t *testing.T) { expected: []compute.Firewall{ { Metadata: defsecTypes.NewTestMetadata(), - OutboundRules: []compute.OutboundFirewallRule{}, - InboundRules: []compute.InboundFirewallRule{}, + OutboundRules: []compute.OutboundFirewallRule(nil), + InboundRules: []compute.InboundFirewallRule(nil), }, }, }, From 81cdcc7f4ba8ab0fb754e27446897ab0bf7d03ec Mon Sep 17 00:00:00 2001 From: Simar Date: Thu, 25 Jan 2024 22:57:31 -0700 Subject: [PATCH 06/13] remove testutil dep --- go.mod | 1 - go.sum | 2 -- internal/adapters/cloudformation/aws/config/adapt_test.go | 2 +- internal/adapters/cloudformation/aws/ec2/adapt_test.go | 2 +- internal/adapters/cloudformation/aws/elb/adapt_test.go | 2 +- internal/adapters/cloudformation/aws/rds/adapt_test.go | 2 +- internal/adapters/terraform/aws/apigateway/adapt_test.go | 2 +- internal/adapters/terraform/aws/apigateway/apiv1_test.go | 2 +- internal/adapters/terraform/aws/apigateway/apiv2_test.go | 2 +- internal/adapters/terraform/aws/apigateway/namesv1_test.go | 2 +- internal/adapters/terraform/aws/apigateway/namesv2_test.go | 2 +- internal/adapters/terraform/aws/athena/adapt_test.go | 2 +- internal/adapters/terraform/aws/cloudfront/adapt_test.go | 2 +- internal/adapters/terraform/aws/cloudtrail/adapt_test.go | 2 +- internal/adapters/terraform/aws/cloudwatch/adapt_test.go | 2 +- internal/adapters/terraform/aws/codebuild/adapt_test.go | 2 +- internal/adapters/terraform/aws/config/adapt_test.go | 2 +- internal/adapters/terraform/aws/documentdb/adapt_test.go | 2 +- internal/adapters/terraform/aws/dynamodb/adapt_test.go | 2 +- internal/adapters/terraform/aws/ec2/adapt_test.go | 2 +- internal/adapters/terraform/aws/ec2/autoscaling_test.go | 2 +- internal/adapters/terraform/aws/ec2/subnet_test.go | 2 +- internal/adapters/terraform/aws/ec2/volume_test.go | 2 +- internal/adapters/terraform/aws/ec2/vpc_test.go | 2 +- internal/adapters/terraform/aws/ecr/adapt_test.go | 2 +- internal/adapters/terraform/aws/ecs/adapt_test.go | 2 +- internal/adapters/terraform/aws/efs/adapt_test.go | 2 +- internal/adapters/terraform/aws/eks/adapt_test.go | 2 +- internal/adapters/terraform/aws/elasticache/adapt_test.go | 2 +- internal/adapters/terraform/aws/elasticsearch/adapt_test.go | 2 +- internal/adapters/terraform/aws/elb/adapt_test.go | 2 +- internal/adapters/terraform/aws/emr/adapt_test.go | 2 +- internal/adapters/terraform/aws/iam/groups_test.go | 2 +- internal/adapters/terraform/aws/iam/passwords_test.go | 2 +- internal/adapters/terraform/aws/iam/policies_test.go | 2 +- internal/adapters/terraform/aws/iam/roles_test.go | 2 +- internal/adapters/terraform/aws/iam/users_test.go | 2 +- internal/adapters/terraform/aws/kinesis/adapt_test.go | 2 +- internal/adapters/terraform/aws/kms/adapt_test.go | 2 +- internal/adapters/terraform/aws/lambda/adapt_test.go | 2 +- internal/adapters/terraform/aws/mq/adapt_test.go | 2 +- internal/adapters/terraform/aws/msk/adapt_test.go | 2 +- internal/adapters/terraform/aws/neptune/adapt_test.go | 2 +- internal/adapters/terraform/aws/provider/adapt_test.go | 2 +- internal/adapters/terraform/aws/rds/adapt_test.go | 2 +- internal/adapters/terraform/aws/redshift/adapt_test.go | 2 +- internal/adapters/terraform/aws/s3/adapt_test.go | 2 +- internal/adapters/terraform/aws/sns/adapt_test.go | 2 +- internal/adapters/terraform/aws/sqs/adapt_test.go | 2 +- internal/adapters/terraform/aws/ssm/adapt_test.go | 2 +- internal/adapters/terraform/aws/workspaces/adapt_test.go | 2 +- internal/adapters/terraform/azure/appservice/adapt_test.go | 2 +- internal/adapters/terraform/azure/authorization/adapt_test.go | 2 +- internal/adapters/terraform/azure/compute/adapt_test.go | 2 +- internal/adapters/terraform/azure/container/adapt_test.go | 2 +- internal/adapters/terraform/azure/database/adapt_test.go | 2 +- internal/adapters/terraform/azure/datafactory/adapt_test.go | 2 +- internal/adapters/terraform/azure/datalake/adapt_test.go | 2 +- internal/adapters/terraform/azure/keyvault/adapt_test.go | 2 +- internal/adapters/terraform/azure/monitor/adapt_test.go | 2 +- internal/adapters/terraform/azure/network/adapt_test.go | 2 +- internal/adapters/terraform/azure/securitycenter/adapt_test.go | 2 +- internal/adapters/terraform/azure/storage/adapt_test.go | 2 +- internal/adapters/terraform/azure/synapse/adapt_test.go | 2 +- internal/adapters/terraform/cloudstack/compute/adapt_test.go | 2 +- internal/adapters/terraform/digitalocean/compute/adapt_test.go | 2 +- internal/adapters/terraform/digitalocean/spaces/adapt_test.go | 2 +- internal/adapters/terraform/github/secrets/adapt_test.go | 2 +- internal/adapters/terraform/google/bigquery/adapt_test.go | 2 +- internal/adapters/terraform/google/compute/disks_test.go | 2 +- internal/adapters/terraform/google/compute/instances_test.go | 2 +- internal/adapters/terraform/google/compute/metadata_test.go | 2 +- internal/adapters/terraform/google/compute/networks_test.go | 2 +- internal/adapters/terraform/google/compute/ssl_test.go | 2 +- internal/adapters/terraform/google/dns/adapt_test.go | 2 +- internal/adapters/terraform/google/gke/adapt_test.go | 2 +- internal/adapters/terraform/google/iam/adapt_test.go | 2 +- internal/adapters/terraform/google/iam/project_iam_test.go | 2 +- internal/adapters/terraform/google/kms/adapt_test.go | 2 +- internal/adapters/terraform/google/sql/adapt_test.go | 2 +- internal/adapters/terraform/google/storage/adapt_test.go | 2 +- internal/adapters/terraform/nifcloud/computing/instance_test.go | 2 +- .../terraform/nifcloud/computing/security_group_test.go | 2 +- internal/adapters/terraform/nifcloud/dns/record_test.go | 2 +- internal/adapters/terraform/nifcloud/nas/nas_instance_test.go | 2 +- .../adapters/terraform/nifcloud/nas/nas_security_group_test.go | 2 +- .../terraform/nifcloud/network/elastic_load_balancer_test.go | 2 +- .../adapters/terraform/nifcloud/network/load_balancer_test.go | 2 +- internal/adapters/terraform/nifcloud/network/router_test.go | 2 +- .../adapters/terraform/nifcloud/network/vpn_gateway_test.go | 2 +- internal/adapters/terraform/nifcloud/rdb/db_instance_test.go | 2 +- .../adapters/terraform/nifcloud/rdb/db_security_group_test.go | 2 +- .../nifcloud/sslcertificate/server_certificate_test.go | 2 +- internal/adapters/terraform/openstack/adapt_test.go | 2 +- internal/adapters/terraform/tftestutil/testutil.go | 2 +- pkg/scanners/cloudformation/parser/parser_test.go | 2 +- pkg/scanners/cloudformation/scanner_test.go | 2 +- pkg/scanners/dockerfile/scanner_test.go | 2 +- pkg/scanners/json/scanner_test.go | 2 +- pkg/scanners/kubernetes/scanner_test.go | 2 +- pkg/scanners/terraform/executor/executor_test.go | 2 +- pkg/scanners/terraform/parser/load_vars_test.go | 2 +- pkg/scanners/terraform/parser/parser_integration_test.go | 2 +- pkg/scanners/terraform/scanner_integration_test.go | 2 +- pkg/scanners/terraform/scanner_test.go | 2 +- pkg/scanners/terraformplan/scanner_test.go | 2 +- pkg/scanners/toml/scanner_test.go | 2 +- pkg/scanners/yaml/scanner_test.go | 2 +- test/count_test.go | 2 +- test/deterministic_test.go | 2 +- test/json_test.go | 2 +- test/module_test.go | 2 +- test/performance_test.go | 2 +- test/setup_test.go | 2 +- test/wildcard_test.go | 2 +- 115 files changed, 113 insertions(+), 116 deletions(-) diff --git a/go.mod b/go.mod index fd17000843d2..62f0c54fe9b4 100644 --- a/go.mod +++ b/go.mod @@ -25,7 +25,6 @@ require ( github.com/aquasecurity/tml v0.6.1 github.com/aquasecurity/trivy-aws v0.7.1 github.com/aquasecurity/trivy-db v0.0.0-20231005141211-4fc651f7ac8d - github.com/aquasecurity/trivy-iac v0.8.0 github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48 github.com/aquasecurity/trivy-kubernetes v0.6.3-0.20240118072219-c433b06f98e1 github.com/aquasecurity/trivy-policies v0.8.0 diff --git a/go.sum b/go.sum index eb45aeeaeaa6..570e1d31a49d 100644 --- a/go.sum +++ b/go.sum @@ -351,8 +351,6 @@ github.com/aquasecurity/trivy-aws v0.7.1 h1:XElKZsP9Hqe2JVekQgGCIkFtgRgVlP+80wKL github.com/aquasecurity/trivy-aws v0.7.1/go.mod h1:bJT7pzsqo9q5yi3arJSt789bAH0eDb7c+niFYMBNcMQ= github.com/aquasecurity/trivy-db v0.0.0-20231005141211-4fc651f7ac8d h1:fjI9mkoTUAkbGqpzt9nJsO24RAdfG+ZSiLFj0G2jO8c= github.com/aquasecurity/trivy-db v0.0.0-20231005141211-4fc651f7ac8d/go.mod h1:cj9/QmD9N3OZnKQMp+/DvdV+ym3HyIkd4e+F0ZM3ZGs= -github.com/aquasecurity/trivy-iac v0.8.0 h1:NKFhk/BTwQ0jIh4t74V8+6UIGUvPlaxO9HPlSMQi3fo= -github.com/aquasecurity/trivy-iac v0.8.0/go.mod h1:ARiMeNqcaVWOXJmp8hmtMnNm/Jd836IOmDBUW5r4KEk= github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48 h1:JVgBIuIYbwG+ekC5lUHUpGJboPYiCcxiz06RCtz8neI= github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48/go.mod h1:Ldya37FLi0e/5Cjq2T5Bty7cFkzUDwTcPeQua+2M8i8= github.com/aquasecurity/trivy-kubernetes v0.6.3-0.20240118072219-c433b06f98e1 h1:/LsIHMQJ4SOxZeib/bvLP7S3YDTXJVIsQyS4kIIP0GQ= diff --git a/internal/adapters/cloudformation/aws/config/adapt_test.go b/internal/adapters/cloudformation/aws/config/adapt_test.go index a7e21abe61d9..05972445eb12 100644 --- a/internal/adapters/cloudformation/aws/config/adapt_test.go +++ b/internal/adapters/cloudformation/aws/config/adapt_test.go @@ -8,8 +8,8 @@ import ( "github.com/aquasecurity/defsec/pkg/types" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/test/testutil" ) func TestAdapt(t *testing.T) { diff --git a/internal/adapters/cloudformation/aws/ec2/adapt_test.go b/internal/adapters/cloudformation/aws/ec2/adapt_test.go index 735b2fbca80f..3f89796f9601 100644 --- a/internal/adapters/cloudformation/aws/ec2/adapt_test.go +++ b/internal/adapters/cloudformation/aws/ec2/adapt_test.go @@ -8,8 +8,8 @@ import ( "github.com/aquasecurity/defsec/pkg/types" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/test/testutil" ) func TestAdapt(t *testing.T) { diff --git a/internal/adapters/cloudformation/aws/elb/adapt_test.go b/internal/adapters/cloudformation/aws/elb/adapt_test.go index c975145b8426..607e07f94238 100644 --- a/internal/adapters/cloudformation/aws/elb/adapt_test.go +++ b/internal/adapters/cloudformation/aws/elb/adapt_test.go @@ -8,8 +8,8 @@ import ( "github.com/aquasecurity/defsec/pkg/types" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/test/testutil" ) func TestAdapt(t *testing.T) { diff --git a/internal/adapters/cloudformation/aws/rds/adapt_test.go b/internal/adapters/cloudformation/aws/rds/adapt_test.go index 09dbd9fb7d09..178b47230843 100644 --- a/internal/adapters/cloudformation/aws/rds/adapt_test.go +++ b/internal/adapters/cloudformation/aws/rds/adapt_test.go @@ -8,8 +8,8 @@ import ( "github.com/aquasecurity/defsec/pkg/types" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/test/testutil" ) func TestAdapt(t *testing.T) { diff --git a/internal/adapters/terraform/aws/apigateway/adapt_test.go b/internal/adapters/terraform/aws/apigateway/adapt_test.go index 269068c3b159..1b116d0df8e5 100644 --- a/internal/adapters/terraform/aws/apigateway/adapt_test.go +++ b/internal/adapters/terraform/aws/apigateway/adapt_test.go @@ -7,8 +7,8 @@ import ( v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/apigateway/apiv1_test.go b/internal/adapters/terraform/aws/apigateway/apiv1_test.go index 86b2677f24dc..1d1ae52dbd85 100644 --- a/internal/adapters/terraform/aws/apigateway/apiv1_test.go +++ b/internal/adapters/terraform/aws/apigateway/apiv1_test.go @@ -4,8 +4,8 @@ import ( "testing" v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptAPIMethodsV1(t *testing.T) { diff --git a/internal/adapters/terraform/aws/apigateway/apiv2_test.go b/internal/adapters/terraform/aws/apigateway/apiv2_test.go index 818f96dfbcb6..f5c728996746 100644 --- a/internal/adapters/terraform/aws/apigateway/apiv2_test.go +++ b/internal/adapters/terraform/aws/apigateway/apiv2_test.go @@ -4,8 +4,8 @@ import ( "testing" v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptAPIsV2(t *testing.T) { diff --git a/internal/adapters/terraform/aws/apigateway/namesv1_test.go b/internal/adapters/terraform/aws/apigateway/namesv1_test.go index 72ace2eca839..8232d1271aea 100644 --- a/internal/adapters/terraform/aws/apigateway/namesv1_test.go +++ b/internal/adapters/terraform/aws/apigateway/namesv1_test.go @@ -4,8 +4,8 @@ import ( "testing" v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptDomainNamesV1(t *testing.T) { diff --git a/internal/adapters/terraform/aws/apigateway/namesv2_test.go b/internal/adapters/terraform/aws/apigateway/namesv2_test.go index ba7941f8e935..c2c1c8da4c78 100644 --- a/internal/adapters/terraform/aws/apigateway/namesv2_test.go +++ b/internal/adapters/terraform/aws/apigateway/namesv2_test.go @@ -4,8 +4,8 @@ import ( "testing" v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptDomainNamesV2(t *testing.T) { diff --git a/internal/adapters/terraform/aws/athena/adapt_test.go b/internal/adapters/terraform/aws/athena/adapt_test.go index 179d1461df57..c4daaf9c9284 100644 --- a/internal/adapters/terraform/aws/athena/adapt_test.go +++ b/internal/adapters/terraform/aws/athena/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/cloudfront/adapt_test.go b/internal/adapters/terraform/aws/cloudfront/adapt_test.go index a10f321cfe56..9131bd1a36d0 100644 --- a/internal/adapters/terraform/aws/cloudfront/adapt_test.go +++ b/internal/adapters/terraform/aws/cloudfront/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/cloudtrail/adapt_test.go b/internal/adapters/terraform/aws/cloudtrail/adapt_test.go index 8a1df4d136e5..c669d96f0010 100644 --- a/internal/adapters/terraform/aws/cloudtrail/adapt_test.go +++ b/internal/adapters/terraform/aws/cloudtrail/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/cloudwatch/adapt_test.go b/internal/adapters/terraform/aws/cloudwatch/adapt_test.go index 6272585b45d7..1486cfeb7d08 100644 --- a/internal/adapters/terraform/aws/cloudwatch/adapt_test.go +++ b/internal/adapters/terraform/aws/cloudwatch/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/codebuild/adapt_test.go b/internal/adapters/terraform/aws/codebuild/adapt_test.go index 7c55ab0e0f11..0488a1441831 100644 --- a/internal/adapters/terraform/aws/codebuild/adapt_test.go +++ b/internal/adapters/terraform/aws/codebuild/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/config/adapt_test.go b/internal/adapters/terraform/aws/config/adapt_test.go index d48579571d7e..5fd929b70689 100644 --- a/internal/adapters/terraform/aws/config/adapt_test.go +++ b/internal/adapters/terraform/aws/config/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" ) diff --git a/internal/adapters/terraform/aws/documentdb/adapt_test.go b/internal/adapters/terraform/aws/documentdb/adapt_test.go index 359cd0be9da3..470d2992c189 100644 --- a/internal/adapters/terraform/aws/documentdb/adapt_test.go +++ b/internal/adapters/terraform/aws/documentdb/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/dynamodb/adapt_test.go b/internal/adapters/terraform/aws/dynamodb/adapt_test.go index 05f7e126c507..5c233021083d 100644 --- a/internal/adapters/terraform/aws/dynamodb/adapt_test.go +++ b/internal/adapters/terraform/aws/dynamodb/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/ec2/adapt_test.go b/internal/adapters/terraform/aws/ec2/adapt_test.go index 1596eb4ad633..12fc3f4c2b99 100644 --- a/internal/adapters/terraform/aws/ec2/adapt_test.go +++ b/internal/adapters/terraform/aws/ec2/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/ec2/autoscaling_test.go b/internal/adapters/terraform/aws/ec2/autoscaling_test.go index 75e032b79605..893fec8e63cd 100644 --- a/internal/adapters/terraform/aws/ec2/autoscaling_test.go +++ b/internal/adapters/terraform/aws/ec2/autoscaling_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/ec2/subnet_test.go b/internal/adapters/terraform/aws/ec2/subnet_test.go index ea6cfd2599c6..a18b4d768edb 100644 --- a/internal/adapters/terraform/aws/ec2/subnet_test.go +++ b/internal/adapters/terraform/aws/ec2/subnet_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/ec2/volume_test.go b/internal/adapters/terraform/aws/ec2/volume_test.go index 95baea539d6e..c09d1ca2ed9b 100644 --- a/internal/adapters/terraform/aws/ec2/volume_test.go +++ b/internal/adapters/terraform/aws/ec2/volume_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/ec2/vpc_test.go b/internal/adapters/terraform/aws/ec2/vpc_test.go index 704c15c77404..cdde2f6c6fdc 100644 --- a/internal/adapters/terraform/aws/ec2/vpc_test.go +++ b/internal/adapters/terraform/aws/ec2/vpc_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_AdaptVPC(t *testing.T) { diff --git a/internal/adapters/terraform/aws/ecr/adapt_test.go b/internal/adapters/terraform/aws/ecr/adapt_test.go index 502708014e90..d6110cb1960a 100644 --- a/internal/adapters/terraform/aws/ecr/adapt_test.go +++ b/internal/adapters/terraform/aws/ecr/adapt_test.go @@ -10,7 +10,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/liamg/iamgo" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/ecs/adapt_test.go b/internal/adapters/terraform/aws/ecs/adapt_test.go index 5ed13d970de3..293422d0cd88 100644 --- a/internal/adapters/terraform/aws/ecs/adapt_test.go +++ b/internal/adapters/terraform/aws/ecs/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/efs/adapt_test.go b/internal/adapters/terraform/aws/efs/adapt_test.go index 0182ba9060f7..cca5358ff01a 100644 --- a/internal/adapters/terraform/aws/efs/adapt_test.go +++ b/internal/adapters/terraform/aws/efs/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/eks/adapt_test.go b/internal/adapters/terraform/aws/eks/adapt_test.go index aec8f95219ac..f303f2dfa3b1 100644 --- a/internal/adapters/terraform/aws/eks/adapt_test.go +++ b/internal/adapters/terraform/aws/eks/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/elasticache/adapt_test.go b/internal/adapters/terraform/aws/elasticache/adapt_test.go index 2733e0fac8d3..436142734b19 100644 --- a/internal/adapters/terraform/aws/elasticache/adapt_test.go +++ b/internal/adapters/terraform/aws/elasticache/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/elasticsearch/adapt_test.go b/internal/adapters/terraform/aws/elasticsearch/adapt_test.go index c9cb0694b2a2..bba77842d69a 100644 --- a/internal/adapters/terraform/aws/elasticsearch/adapt_test.go +++ b/internal/adapters/terraform/aws/elasticsearch/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/elb/adapt_test.go b/internal/adapters/terraform/aws/elb/adapt_test.go index fa841476f14a..c0f4dae37115 100644 --- a/internal/adapters/terraform/aws/elb/adapt_test.go +++ b/internal/adapters/terraform/aws/elb/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/emr/adapt_test.go b/internal/adapters/terraform/aws/emr/adapt_test.go index 0f1373dc7ecc..b38dcc5811fe 100644 --- a/internal/adapters/terraform/aws/emr/adapt_test.go +++ b/internal/adapters/terraform/aws/emr/adapt_test.go @@ -9,8 +9,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptSecurityConfiguration(t *testing.T) { diff --git a/internal/adapters/terraform/aws/iam/groups_test.go b/internal/adapters/terraform/aws/iam/groups_test.go index c2179368043e..5e06c6974312 100644 --- a/internal/adapters/terraform/aws/iam/groups_test.go +++ b/internal/adapters/terraform/aws/iam/groups_test.go @@ -7,8 +7,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/iam" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptGroups(t *testing.T) { diff --git a/internal/adapters/terraform/aws/iam/passwords_test.go b/internal/adapters/terraform/aws/iam/passwords_test.go index a4d92c67e2a3..b6d920ba6ab3 100644 --- a/internal/adapters/terraform/aws/iam/passwords_test.go +++ b/internal/adapters/terraform/aws/iam/passwords_test.go @@ -7,8 +7,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/iam" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptPasswordPolicy(t *testing.T) { diff --git a/internal/adapters/terraform/aws/iam/policies_test.go b/internal/adapters/terraform/aws/iam/policies_test.go index ce1aac1743a3..9d7706185b93 100644 --- a/internal/adapters/terraform/aws/iam/policies_test.go +++ b/internal/adapters/terraform/aws/iam/policies_test.go @@ -8,8 +8,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/liamg/iamgo" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func defaultPolicyDocuemnt(offset bool) iam.Document { diff --git a/internal/adapters/terraform/aws/iam/roles_test.go b/internal/adapters/terraform/aws/iam/roles_test.go index 2b0190f23174..b1d6354017ff 100644 --- a/internal/adapters/terraform/aws/iam/roles_test.go +++ b/internal/adapters/terraform/aws/iam/roles_test.go @@ -7,8 +7,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptRoles(t *testing.T) { diff --git a/internal/adapters/terraform/aws/iam/users_test.go b/internal/adapters/terraform/aws/iam/users_test.go index 1e880dffa33e..ef4e3606c041 100644 --- a/internal/adapters/terraform/aws/iam/users_test.go +++ b/internal/adapters/terraform/aws/iam/users_test.go @@ -6,8 +6,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptUsers(t *testing.T) { diff --git a/internal/adapters/terraform/aws/kinesis/adapt_test.go b/internal/adapters/terraform/aws/kinesis/adapt_test.go index 36ffa3fa12c6..ff90ad5cfefa 100644 --- a/internal/adapters/terraform/aws/kinesis/adapt_test.go +++ b/internal/adapters/terraform/aws/kinesis/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/kms/adapt_test.go b/internal/adapters/terraform/aws/kms/adapt_test.go index 1f284586d4aa..fc203f373042 100644 --- a/internal/adapters/terraform/aws/kms/adapt_test.go +++ b/internal/adapters/terraform/aws/kms/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/lambda/adapt_test.go b/internal/adapters/terraform/aws/lambda/adapt_test.go index c82457e9695b..64c884c02e8c 100644 --- a/internal/adapters/terraform/aws/lambda/adapt_test.go +++ b/internal/adapters/terraform/aws/lambda/adapt_test.go @@ -9,8 +9,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_Adapt(t *testing.T) { diff --git a/internal/adapters/terraform/aws/mq/adapt_test.go b/internal/adapters/terraform/aws/mq/adapt_test.go index 0a3dd1d1e3cc..a7e110c3fc82 100644 --- a/internal/adapters/terraform/aws/mq/adapt_test.go +++ b/internal/adapters/terraform/aws/mq/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/msk/adapt_test.go b/internal/adapters/terraform/aws/msk/adapt_test.go index 87a063f4ee72..de3752b73991 100644 --- a/internal/adapters/terraform/aws/msk/adapt_test.go +++ b/internal/adapters/terraform/aws/msk/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/neptune/adapt_test.go b/internal/adapters/terraform/aws/neptune/adapt_test.go index ce2d5b80c896..148b29f3b3e4 100644 --- a/internal/adapters/terraform/aws/neptune/adapt_test.go +++ b/internal/adapters/terraform/aws/neptune/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/provider/adapt_test.go b/internal/adapters/terraform/aws/provider/adapt_test.go index ffcd45027627..acdd08ded4f2 100644 --- a/internal/adapters/terraform/aws/provider/adapt_test.go +++ b/internal/adapters/terraform/aws/provider/adapt_test.go @@ -6,8 +6,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func TestAdapt(t *testing.T) { diff --git a/internal/adapters/terraform/aws/rds/adapt_test.go b/internal/adapters/terraform/aws/rds/adapt_test.go index 3636d0589479..c6e750b5277d 100644 --- a/internal/adapters/terraform/aws/rds/adapt_test.go +++ b/internal/adapters/terraform/aws/rds/adapt_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_Adapt(t *testing.T) { diff --git a/internal/adapters/terraform/aws/redshift/adapt_test.go b/internal/adapters/terraform/aws/redshift/adapt_test.go index 443cc3bcbea9..3a852f5cb9a4 100644 --- a/internal/adapters/terraform/aws/redshift/adapt_test.go +++ b/internal/adapters/terraform/aws/redshift/adapt_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_Adapt(t *testing.T) { diff --git a/internal/adapters/terraform/aws/s3/adapt_test.go b/internal/adapters/terraform/aws/s3/adapt_test.go index 5b6ebe4df1ec..35d6a4e5aaca 100644 --- a/internal/adapters/terraform/aws/s3/adapt_test.go +++ b/internal/adapters/terraform/aws/s3/adapt_test.go @@ -7,8 +7,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/s3" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/liamg/iamgo" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/sns/adapt_test.go b/internal/adapters/terraform/aws/sns/adapt_test.go index c02dd9ea9c59..1213829a1d11 100644 --- a/internal/adapters/terraform/aws/sns/adapt_test.go +++ b/internal/adapters/terraform/aws/sns/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/sqs/adapt_test.go b/internal/adapters/terraform/aws/sqs/adapt_test.go index 736d09d1b0e5..f6a191ec23eb 100644 --- a/internal/adapters/terraform/aws/sqs/adapt_test.go +++ b/internal/adapters/terraform/aws/sqs/adapt_test.go @@ -10,7 +10,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/liamg/iamgo" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/ssm/adapt_test.go b/internal/adapters/terraform/aws/ssm/adapt_test.go index cd6620f028cd..73dc82015f9b 100644 --- a/internal/adapters/terraform/aws/ssm/adapt_test.go +++ b/internal/adapters/terraform/aws/ssm/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/workspaces/adapt_test.go b/internal/adapters/terraform/aws/workspaces/adapt_test.go index 0572effc1c9b..0888dd1e020c 100644 --- a/internal/adapters/terraform/aws/workspaces/adapt_test.go +++ b/internal/adapters/terraform/aws/workspaces/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/appservice/adapt_test.go b/internal/adapters/terraform/azure/appservice/adapt_test.go index b28a3d8521d5..a0432279774b 100644 --- a/internal/adapters/terraform/azure/appservice/adapt_test.go +++ b/internal/adapters/terraform/azure/appservice/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/authorization/adapt_test.go b/internal/adapters/terraform/azure/authorization/adapt_test.go index bf6c371ce19a..74c8602edebc 100644 --- a/internal/adapters/terraform/azure/authorization/adapt_test.go +++ b/internal/adapters/terraform/azure/authorization/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/compute/adapt_test.go b/internal/adapters/terraform/azure/compute/adapt_test.go index beb3a73a4470..3af617db9685 100644 --- a/internal/adapters/terraform/azure/compute/adapt_test.go +++ b/internal/adapters/terraform/azure/compute/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/container/adapt_test.go b/internal/adapters/terraform/azure/container/adapt_test.go index 8c5c450153f8..a43fcdf35f97 100644 --- a/internal/adapters/terraform/azure/container/adapt_test.go +++ b/internal/adapters/terraform/azure/container/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/database/adapt_test.go b/internal/adapters/terraform/azure/database/adapt_test.go index eb449b08b35b..401b8603fa7d 100644 --- a/internal/adapters/terraform/azure/database/adapt_test.go +++ b/internal/adapters/terraform/azure/database/adapt_test.go @@ -6,8 +6,8 @@ import ( defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/defsec/pkg/providers/azure/database" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/datafactory/adapt_test.go b/internal/adapters/terraform/azure/datafactory/adapt_test.go index 29bc62b09a0b..acd13315d904 100644 --- a/internal/adapters/terraform/azure/datafactory/adapt_test.go +++ b/internal/adapters/terraform/azure/datafactory/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/datalake/adapt_test.go b/internal/adapters/terraform/azure/datalake/adapt_test.go index f0c13133ceeb..41fd476522cf 100644 --- a/internal/adapters/terraform/azure/datalake/adapt_test.go +++ b/internal/adapters/terraform/azure/datalake/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/keyvault/adapt_test.go b/internal/adapters/terraform/azure/keyvault/adapt_test.go index 6665f4ce4a2f..b7e668712697 100644 --- a/internal/adapters/terraform/azure/keyvault/adapt_test.go +++ b/internal/adapters/terraform/azure/keyvault/adapt_test.go @@ -10,7 +10,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/monitor/adapt_test.go b/internal/adapters/terraform/azure/monitor/adapt_test.go index 509c6eeb002b..f7894c53da59 100644 --- a/internal/adapters/terraform/azure/monitor/adapt_test.go +++ b/internal/adapters/terraform/azure/monitor/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/network/adapt_test.go b/internal/adapters/terraform/azure/network/adapt_test.go index af27eb117d85..74e1f3f26aca 100644 --- a/internal/adapters/terraform/azure/network/adapt_test.go +++ b/internal/adapters/terraform/azure/network/adapt_test.go @@ -9,8 +9,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_Adapt(t *testing.T) { diff --git a/internal/adapters/terraform/azure/securitycenter/adapt_test.go b/internal/adapters/terraform/azure/securitycenter/adapt_test.go index 70faef6f27b1..1454259aa3d5 100644 --- a/internal/adapters/terraform/azure/securitycenter/adapt_test.go +++ b/internal/adapters/terraform/azure/securitycenter/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/azure/storage/adapt_test.go b/internal/adapters/terraform/azure/storage/adapt_test.go index 664c33f83abe..c0e3b85f3c99 100644 --- a/internal/adapters/terraform/azure/storage/adapt_test.go +++ b/internal/adapters/terraform/azure/storage/adapt_test.go @@ -9,8 +9,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_Adapt(t *testing.T) { diff --git a/internal/adapters/terraform/azure/synapse/adapt_test.go b/internal/adapters/terraform/azure/synapse/adapt_test.go index 48951957d5dc..8c1146733c4d 100644 --- a/internal/adapters/terraform/azure/synapse/adapt_test.go +++ b/internal/adapters/terraform/azure/synapse/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/cloudstack/compute/adapt_test.go b/internal/adapters/terraform/cloudstack/compute/adapt_test.go index 36fce11efd7f..b94cd90983a9 100644 --- a/internal/adapters/terraform/cloudstack/compute/adapt_test.go +++ b/internal/adapters/terraform/cloudstack/compute/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/digitalocean/compute/adapt_test.go b/internal/adapters/terraform/digitalocean/compute/adapt_test.go index 8530158ca0b1..f39685ba22c1 100644 --- a/internal/adapters/terraform/digitalocean/compute/adapt_test.go +++ b/internal/adapters/terraform/digitalocean/compute/adapt_test.go @@ -9,8 +9,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptDroplets(t *testing.T) { diff --git a/internal/adapters/terraform/digitalocean/spaces/adapt_test.go b/internal/adapters/terraform/digitalocean/spaces/adapt_test.go index de55e3cd288f..c1b0378c0d98 100644 --- a/internal/adapters/terraform/digitalocean/spaces/adapt_test.go +++ b/internal/adapters/terraform/digitalocean/spaces/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/github/secrets/adapt_test.go b/internal/adapters/terraform/github/secrets/adapt_test.go index 05ae36dd68c6..d45748dc87d2 100644 --- a/internal/adapters/terraform/github/secrets/adapt_test.go +++ b/internal/adapters/terraform/github/secrets/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_Adapt(t *testing.T) { diff --git a/internal/adapters/terraform/google/bigquery/adapt_test.go b/internal/adapters/terraform/google/bigquery/adapt_test.go index 3fda806b61bb..320a5f478752 100644 --- a/internal/adapters/terraform/google/bigquery/adapt_test.go +++ b/internal/adapters/terraform/google/bigquery/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/google/compute/disks_test.go b/internal/adapters/terraform/google/compute/disks_test.go index efedacd23333..da80933c326d 100644 --- a/internal/adapters/terraform/google/compute/disks_test.go +++ b/internal/adapters/terraform/google/compute/disks_test.go @@ -7,8 +7,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/google/compute" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptDisks(t *testing.T) { diff --git a/internal/adapters/terraform/google/compute/instances_test.go b/internal/adapters/terraform/google/compute/instances_test.go index 5c031b11dbed..8dc61c0d173c 100644 --- a/internal/adapters/terraform/google/compute/instances_test.go +++ b/internal/adapters/terraform/google/compute/instances_test.go @@ -7,8 +7,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/google/compute" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptInstances(t *testing.T) { diff --git a/internal/adapters/terraform/google/compute/metadata_test.go b/internal/adapters/terraform/google/compute/metadata_test.go index 43881eabf848..3067eef746fb 100644 --- a/internal/adapters/terraform/google/compute/metadata_test.go +++ b/internal/adapters/terraform/google/compute/metadata_test.go @@ -7,8 +7,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/google/compute" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptProjectMetadata(t *testing.T) { diff --git a/internal/adapters/terraform/google/compute/networks_test.go b/internal/adapters/terraform/google/compute/networks_test.go index 3722a2ee6629..2bae86539afb 100644 --- a/internal/adapters/terraform/google/compute/networks_test.go +++ b/internal/adapters/terraform/google/compute/networks_test.go @@ -7,8 +7,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/google/compute" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptNetworks(t *testing.T) { diff --git a/internal/adapters/terraform/google/compute/ssl_test.go b/internal/adapters/terraform/google/compute/ssl_test.go index 16840801b47d..1b0224def50f 100644 --- a/internal/adapters/terraform/google/compute/ssl_test.go +++ b/internal/adapters/terraform/google/compute/ssl_test.go @@ -7,8 +7,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/google/compute" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptSSLPolicies(t *testing.T) { diff --git a/internal/adapters/terraform/google/dns/adapt_test.go b/internal/adapters/terraform/google/dns/adapt_test.go index 117c5349194c..eedae6f87e76 100644 --- a/internal/adapters/terraform/google/dns/adapt_test.go +++ b/internal/adapters/terraform/google/dns/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/google/gke/adapt_test.go b/internal/adapters/terraform/google/gke/adapt_test.go index 5f6fcc4f4c97..da491bd425da 100644 --- a/internal/adapters/terraform/google/gke/adapt_test.go +++ b/internal/adapters/terraform/google/gke/adapt_test.go @@ -9,8 +9,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_Adapt(t *testing.T) { diff --git a/internal/adapters/terraform/google/iam/adapt_test.go b/internal/adapters/terraform/google/iam/adapt_test.go index 19c684b70f6d..e49ba44a466c 100644 --- a/internal/adapters/terraform/google/iam/adapt_test.go +++ b/internal/adapters/terraform/google/iam/adapt_test.go @@ -6,8 +6,8 @@ import ( defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/defsec/pkg/providers/google/iam" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/google/iam/project_iam_test.go b/internal/adapters/terraform/google/iam/project_iam_test.go index 5c2f75187f4a..3d3270ca4637 100644 --- a/internal/adapters/terraform/google/iam/project_iam_test.go +++ b/internal/adapters/terraform/google/iam/project_iam_test.go @@ -7,8 +7,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/google/iam" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_AdaptBinding(t *testing.T) { diff --git a/internal/adapters/terraform/google/kms/adapt_test.go b/internal/adapters/terraform/google/kms/adapt_test.go index f6f2b7c323ba..09043e00c049 100644 --- a/internal/adapters/terraform/google/kms/adapt_test.go +++ b/internal/adapters/terraform/google/kms/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/google/sql/adapt_test.go b/internal/adapters/terraform/google/sql/adapt_test.go index 89c1a58e95ff..b6a7060f2830 100644 --- a/internal/adapters/terraform/google/sql/adapt_test.go +++ b/internal/adapters/terraform/google/sql/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/google/storage/adapt_test.go b/internal/adapters/terraform/google/storage/adapt_test.go index 069d39e1051a..2dcef331d1c6 100644 --- a/internal/adapters/terraform/google/storage/adapt_test.go +++ b/internal/adapters/terraform/google/storage/adapt_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_Adapt(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/computing/instance_test.go b/internal/adapters/terraform/nifcloud/computing/instance_test.go index a8316b77b231..45832714bee0 100644 --- a/internal/adapters/terraform/nifcloud/computing/instance_test.go +++ b/internal/adapters/terraform/nifcloud/computing/instance_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptInstances(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/computing/security_group_test.go b/internal/adapters/terraform/nifcloud/computing/security_group_test.go index 026d6cd451b7..70800804b76d 100644 --- a/internal/adapters/terraform/nifcloud/computing/security_group_test.go +++ b/internal/adapters/terraform/nifcloud/computing/security_group_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptSecurityGroups(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/dns/record_test.go b/internal/adapters/terraform/nifcloud/dns/record_test.go index 65867c90b579..ccf7e4cc7bd1 100644 --- a/internal/adapters/terraform/nifcloud/dns/record_test.go +++ b/internal/adapters/terraform/nifcloud/dns/record_test.go @@ -8,7 +8,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptRecords(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go b/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go index 450b008fa7e9..26b9e1408a2f 100644 --- a/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go +++ b/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptNASInstances(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go b/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go index 1f64131f5a68..01e93aade40b 100644 --- a/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go +++ b/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptNASSecurityGroups(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go b/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go index 9d61a0b7e6e5..06bb3a96e78a 100644 --- a/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go +++ b/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go @@ -8,7 +8,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptElasticLoadBalancers(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/network/load_balancer_test.go b/internal/adapters/terraform/nifcloud/network/load_balancer_test.go index c52e40a0a921..cbcebb11d774 100644 --- a/internal/adapters/terraform/nifcloud/network/load_balancer_test.go +++ b/internal/adapters/terraform/nifcloud/network/load_balancer_test.go @@ -8,7 +8,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptLoadBalancers(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/network/router_test.go b/internal/adapters/terraform/nifcloud/network/router_test.go index 19618a67e82f..3c2fe55ab92b 100644 --- a/internal/adapters/terraform/nifcloud/network/router_test.go +++ b/internal/adapters/terraform/nifcloud/network/router_test.go @@ -8,7 +8,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptRouters(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go b/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go index 0aa7331b58a3..d589bd2fd7c6 100644 --- a/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go +++ b/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go @@ -8,7 +8,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptVpnGateways(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go b/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go index 0999144c589f..5878dc3fdc14 100644 --- a/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go +++ b/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptDBInstances(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go b/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go index 5092a3385a53..148fe2cc8ddb 100644 --- a/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go +++ b/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_adaptDBSecurityGroups(t *testing.T) { diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go b/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go index 74dc6eb87766..661e452a019d 100644 --- a/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go +++ b/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) const certificate = ` diff --git a/internal/adapters/terraform/openstack/adapt_test.go b/internal/adapters/terraform/openstack/adapt_test.go index 66cad2e631bd..e175f4a7ec98 100644 --- a/internal/adapters/terraform/openstack/adapt_test.go +++ b/internal/adapters/terraform/openstack/adapt_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/tftestutil/testutil.go b/internal/adapters/terraform/tftestutil/testutil.go index 251a795b049c..27038cd18c23 100644 --- a/internal/adapters/terraform/tftestutil/testutil.go +++ b/internal/adapters/terraform/tftestutil/testutil.go @@ -5,8 +5,8 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" + "github.com/aquasecurity/trivy/test/testutil" ) func CreateModulesFromSource(t *testing.T, source, ext string) terraform.Modules { diff --git a/pkg/scanners/cloudformation/parser/parser_test.go b/pkg/scanners/cloudformation/parser/parser_test.go index 8a5e3844ff01..5862d4757186 100644 --- a/pkg/scanners/cloudformation/parser/parser_test.go +++ b/pkg/scanners/cloudformation/parser/parser_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func parseFile(t *testing.T, source string, name string) (FileContexts, error) { diff --git a/pkg/scanners/cloudformation/scanner_test.go b/pkg/scanners/cloudformation/scanner_test.go index 6f6792195b32..1fab8452e98c 100644 --- a/pkg/scanners/cloudformation/scanner_test.go +++ b/pkg/scanners/cloudformation/scanner_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/require" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_BasicScan(t *testing.T) { diff --git a/pkg/scanners/dockerfile/scanner_test.go b/pkg/scanners/dockerfile/scanner_test.go index a4cf64b377a9..4a793f5e00bb 100644 --- a/pkg/scanners/dockerfile/scanner_test.go +++ b/pkg/scanners/dockerfile/scanner_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) const DS006PolicyWithDockerfileSchema = `# METADATA diff --git a/pkg/scanners/json/scanner_test.go b/pkg/scanners/json/scanner_test.go index c66c49c8d5f7..18e6dc90d49b 100644 --- a/pkg/scanners/json/scanner_test.go +++ b/pkg/scanners/json/scanner_test.go @@ -7,7 +7,7 @@ import ( "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/kubernetes/scanner_test.go b/pkg/scanners/kubernetes/scanner_test.go index 8614925f18f0..eacfae5b3f0c 100644 --- a/pkg/scanners/kubernetes/scanner_test.go +++ b/pkg/scanners/kubernetes/scanner_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/terraform/executor/executor_test.go b/pkg/scanners/terraform/executor/executor_test.go index f35e4a7f2391..26de5bd2bc0f 100644 --- a/pkg/scanners/terraform/executor/executor_test.go +++ b/pkg/scanners/terraform/executor/executor_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" + "github.com/aquasecurity/trivy/test/testutil" ) var panicRule = scan.Rule{ diff --git a/pkg/scanners/terraform/parser/load_vars_test.go b/pkg/scanners/terraform/parser/load_vars_test.go index 28d0dcbada67..f6e6792206a8 100644 --- a/pkg/scanners/terraform/parser/load_vars_test.go +++ b/pkg/scanners/terraform/parser/load_vars_test.go @@ -3,7 +3,7 @@ package parser import ( "testing" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/zclconf/go-cty/cty" diff --git a/pkg/scanners/terraform/parser/parser_integration_test.go b/pkg/scanners/terraform/parser/parser_integration_test.go index ba7dd82c7bd7..bbce2a151ce0 100644 --- a/pkg/scanners/terraform/parser/parser_integration_test.go +++ b/pkg/scanners/terraform/parser/parser_integration_test.go @@ -4,7 +4,7 @@ import ( "context" "testing" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/terraform/scanner_integration_test.go b/pkg/scanners/terraform/scanner_integration_test.go index 94dffbb4ceed..70b912bf3065 100644 --- a/pkg/scanners/terraform/scanner_integration_test.go +++ b/pkg/scanners/terraform/scanner_integration_test.go @@ -7,7 +7,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/terraform/scanner_test.go b/pkg/scanners/terraform/scanner_test.go index 07044f8d10bc..71de2f7b0c26 100644 --- a/pkg/scanners/terraform/scanner_test.go +++ b/pkg/scanners/terraform/scanner_test.go @@ -17,7 +17,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) var alwaysFailRule = scan.Rule{ diff --git a/pkg/scanners/terraformplan/scanner_test.go b/pkg/scanners/terraformplan/scanner_test.go index 79b27265e095..ea04ffd62580 100644 --- a/pkg/scanners/terraformplan/scanner_test.go +++ b/pkg/scanners/terraformplan/scanner_test.go @@ -8,7 +8,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/toml/scanner_test.go b/pkg/scanners/toml/scanner_test.go index efb512a9ddf0..89be2f593921 100644 --- a/pkg/scanners/toml/scanner_test.go +++ b/pkg/scanners/toml/scanner_test.go @@ -7,7 +7,7 @@ import ( "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/yaml/scanner_test.go b/pkg/scanners/yaml/scanner_test.go index d15b021c2120..27eecf244ab7 100644 --- a/pkg/scanners/yaml/scanner_test.go +++ b/pkg/scanners/yaml/scanner_test.go @@ -7,7 +7,7 @@ import ( "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/test/count_test.go b/test/count_test.go index 5f6d0154227a..ab9a737e8433 100644 --- a/test/count_test.go +++ b/test/count_test.go @@ -10,7 +10,7 @@ import ( "github.com/aquasecurity/defsec/pkg/terraform" "github.com/stretchr/testify/assert" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_ResourcesWithCount(t *testing.T) { diff --git a/test/deterministic_test.go b/test/deterministic_test.go index 4c6a56b471d8..ae715ffee6cb 100644 --- a/test/deterministic_test.go +++ b/test/deterministic_test.go @@ -7,9 +7,9 @@ import ( "github.com/aquasecurity/defsec/pkg/rules" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_DeterministicResults(t *testing.T) { diff --git a/test/json_test.go b/test/json_test.go index 5f7f31446fa0..9b65b14b3e8c 100644 --- a/test/json_test.go +++ b/test/json_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/defsec/pkg/severity" "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func TestScanningJSON(t *testing.T) { diff --git a/test/module_test.go b/test/module_test.go index e0d6fdeca05f..c60f2d3772f7 100644 --- a/test/module_test.go +++ b/test/module_test.go @@ -15,10 +15,10 @@ import ( "github.com/aquasecurity/defsec/pkg/terraform" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy-policies/checks/cloud/aws/iam" "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" + "github.com/aquasecurity/trivy/test/testutil" ) var badRule = scan.Rule{ diff --git a/test/performance_test.go b/test/performance_test.go index 2e8896e744da..81fa5cfe2b66 100644 --- a/test/performance_test.go +++ b/test/performance_test.go @@ -8,9 +8,9 @@ import ( "github.com/aquasecurity/defsec/pkg/rules" - "github.com/aquasecurity/trivy-iac/test/testutil" "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" + "github.com/aquasecurity/trivy/test/testutil" ) func BenchmarkCalculate(b *testing.B) { diff --git a/test/setup_test.go b/test/setup_test.go index ec79f26a5398..226bf3400ed8 100644 --- a/test/setup_test.go +++ b/test/setup_test.go @@ -9,9 +9,9 @@ import ( "github.com/aquasecurity/defsec/pkg/terraform" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-iac/test/testutil" tfScanner "github.com/aquasecurity/trivy/pkg/scanners/terraform" "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" + "github.com/aquasecurity/trivy/test/testutil" ) func createModulesFromSource(t *testing.T, source string, ext string) terraform.Modules { diff --git a/test/wildcard_test.go b/test/wildcard_test.go index c8a0d37ce0a6..f2be00691640 100644 --- a/test/wildcard_test.go +++ b/test/wildcard_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/defsec/pkg/severity" "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy-iac/test/testutil" + "github.com/aquasecurity/trivy/test/testutil" ) func Test_WildcardMatchingOnRequiredLabels(t *testing.T) { From 67c7c1ef03f4f4b5cf0e1e1fc5d2876119b89984 Mon Sep 17 00:00:00 2001 From: Simar Date: Thu, 25 Jan 2024 22:58:14 -0700 Subject: [PATCH 07/13] add a todo --- test/rules_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/test/rules_test.go b/test/rules_test.go index 4772d803e0eb..eec1ca2ae5f7 100644 --- a/test/rules_test.go +++ b/test/rules_test.go @@ -23,6 +23,7 @@ func TestAVDIDs(t *testing.T) { } } +// TODO(simar7): Re-evaluate where this test should live? //func TestRulesAgainstExampleCode(t *testing.T) { // for _, rule := range rules.GetRegistered(framework.ALL) { // testName := fmt.Sprintf("%s/%s", rule.GetRule().AVDID, rule.LongID()) From 0896c9b8b9433d54144203d05ea7a01e5cab7ff6 Mon Sep 17 00:00:00 2001 From: Simar Date: Fri, 26 Jan 2024 18:32:22 -0700 Subject: [PATCH 08/13] skip certain tests on windows --- pkg/scanners/helm/test/scanner_test.go | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/pkg/scanners/helm/test/scanner_test.go b/pkg/scanners/helm/test/scanner_test.go index 2c1d14d12ab1..90cf3e65a243 100644 --- a/pkg/scanners/helm/test/scanner_test.go +++ b/pkg/scanners/helm/test/scanner_test.go @@ -5,6 +5,7 @@ import ( "io" "os" "path/filepath" + "runtime" "sort" "strings" "testing" @@ -16,6 +17,10 @@ import ( ) func Test_helm_scanner_with_archive(t *testing.T) { + // TODO(simar7): Figure out why this test fails on Winndows only + if runtime.GOOS == "windows" { + t.Skip("skipping test on windows") + } tests := []struct { testName string @@ -103,6 +108,10 @@ func Test_helm_scanner_with_missing_name_can_recover(t *testing.T) { } func Test_helm_scanner_with_dir(t *testing.T) { + // TODO(simar7): Figure out why this test fails on Winndows only + if runtime.GOOS == "windows" { + t.Skip("skipping test on windows") + } tests := []struct { testName string @@ -152,6 +161,11 @@ func Test_helm_scanner_with_dir(t *testing.T) { } func Test_helm_scanner_with_custom_policies(t *testing.T) { + // TODO(simar7): Figure out why this test fails on Winndows only + if runtime.GOOS == "windows" { + t.Skip("skipping test on windows") + } + regoRule := ` package user.kubernetes.ID001 From 4e26740c0d5db211574d0c73ba3bab60ee391bfc Mon Sep 17 00:00:00 2001 From: Simar Date: Mon, 29 Jan 2024 18:54:49 -0700 Subject: [PATCH 09/13] mv pkgs under pkg/iac --- internal/adapters/cloudformation/aws/adapt.go | 74 ------------- internal/adapters/terraform/adapt.go | 31 ------ internal/adapters/terraform/aws/adapt.go | 79 -------------- internal/adapters/terraform/azure/adapt.go | 37 ------- internal/adapters/terraform/google/adapt.go | 27 ----- .../analyzer/config/terraform/terraform.go | 2 +- pkg/fanal/analyzer/const.go | 4 +- {internal => pkg/iac}/adapters/arm/adapt.go | 28 ++--- .../iac}/adapters/arm/appservice/adapt.go | 2 +- .../iac}/adapters/arm/authorization/adapt.go | 2 +- .../iac}/adapters/arm/compute/adapt.go | 2 +- .../iac}/adapters/arm/compute/adapt_test.go | 29 +++-- .../iac}/adapters/arm/container/adapt.go | 2 +- .../iac}/adapters/arm/database/adapt.go | 2 +- .../iac}/adapters/arm/database/firewall.go | 2 +- .../iac}/adapters/arm/database/maria.go | 2 +- .../iac}/adapters/arm/database/mssql.go | 12 +-- .../iac}/adapters/arm/database/postgresql.go | 2 +- .../iac}/adapters/arm/datafactory/adapt.go | 2 +- .../iac}/adapters/arm/datalake/adapt.go | 2 +- .../iac}/adapters/arm/keyvault/adapt.go | 2 +- .../iac}/adapters/arm/monitor/adapt.go | 2 +- .../iac}/adapters/arm/network/adapt.go | 2 +- .../iac}/adapters/arm/securitycenter/adapt.go | 2 +- .../iac}/adapters/arm/storage/adapt.go | 2 +- .../iac}/adapters/arm/storage/adapt_test.go | 25 +++-- .../iac}/adapters/arm/synapse/adapt.go | 2 +- .../iac}/adapters/cloudformation/adapt.go | 4 +- .../aws/accessanalyzer/accessanalyzer.go | 2 +- .../aws/accessanalyzer/analyzer.go | 2 +- pkg/iac/adapters/cloudformation/aws/adapt.go | 74 +++++++++++++ .../aws/apigateway/apigateway.go | 2 +- .../cloudformation/aws/apigateway/stage.go | 8 +- .../cloudformation/aws/athena/athena.go | 2 +- .../cloudformation/aws/athena/workgroup.go | 2 +- .../aws/cloudfront/cloudfront.go | 2 +- .../aws/cloudfront/distribution.go | 6 +- .../aws/cloudtrail/cloudtrail.go | 2 +- .../cloudformation/aws/cloudtrail/trails.go | 2 +- .../aws/cloudwatch/cloudwatch.go | 2 +- .../aws/cloudwatch/log_group.go | 2 +- .../cloudformation/aws/codebuild/codebuild.go | 2 +- .../cloudformation/aws/codebuild/project.go | 8 +- .../cloudformation/aws/config/adapt_test.go | 2 +- .../cloudformation/aws/config/aggregator.go | 6 +- .../cloudformation/aws/config/config.go | 2 +- .../cloudformation/aws/documentdb/cluster.go | 8 +- .../aws/documentdb/documentdb.go | 2 +- .../cloudformation/aws/dynamodb/cluster.go | 2 +- .../cloudformation/aws/dynamodb/dynamodb.go | 2 +- .../cloudformation/aws/ec2/adapt_test.go | 2 +- .../adapters/cloudformation/aws/ec2/ec2.go | 2 +- .../cloudformation/aws/ec2/instance.go | 10 +- .../aws/ec2/launch_configuration.go | 2 +- .../cloudformation/aws/ec2/launch_template.go | 6 +- .../adapters/cloudformation/aws/ec2/nacl.go | 2 +- .../cloudformation/aws/ec2/security_group.go | 8 +- .../adapters/cloudformation/aws/ec2/subnet.go | 2 +- .../adapters/cloudformation/aws/ec2/volume.go | 2 +- .../adapters/cloudformation/aws/ecr/ecr.go | 2 +- .../cloudformation/aws/ecr/repository.go | 8 +- .../cloudformation/aws/ecs/cluster.go | 8 +- .../adapters/cloudformation/aws/ecs/ecs.go | 2 +- .../cloudformation/aws/ecs/task_definition.go | 10 +- .../adapters/cloudformation/aws/efs/efs.go | 2 +- .../cloudformation/aws/efs/filesystem.go | 2 +- .../cloudformation/aws/eks/cluster.go | 6 +- .../adapters/cloudformation/aws/eks/eks.go | 2 +- .../cloudformation/aws/elasticache/cluster.go | 2 +- .../aws/elasticache/elasticache.go | 2 +- .../aws/elasticache/replication_group.go | 2 +- .../aws/elasticache/security_group.go | 2 +- .../aws/elasticsearch/domain.go | 2 +- .../aws/elasticsearch/elasticsearch.go | 2 +- .../cloudformation/aws/elb/adapt_test.go | 2 +- .../adapters/cloudformation/aws/elb/elb.go | 2 +- .../cloudformation/aws/elb/loadbalancer.go | 14 +-- .../adapters/cloudformation/aws/iam/iam.go | 2 +- .../adapters/cloudformation/aws/iam/policy.go | 14 +-- .../cloudformation/aws/kinesis/kinesis.go | 2 +- .../cloudformation/aws/kinesis/stream.go | 2 +- .../cloudformation/aws/lambda/function.go | 6 +- .../cloudformation/aws/lambda/lambda.go | 2 +- .../adapters/cloudformation/aws/mq/broker.go | 2 +- .../iac}/adapters/cloudformation/aws/mq/mq.go | 2 +- .../cloudformation/aws/msk/cluster.go | 2 +- .../adapters/cloudformation/aws/msk/msk.go | 2 +- .../cloudformation/aws/neptune/cluster.go | 6 +- .../cloudformation/aws/neptune/neptune.go | 2 +- .../cloudformation/aws/rds/adapt_test.go | 2 +- .../cloudformation/aws/rds/cluster.go | 2 +- .../cloudformation/aws/rds/instance.go | 12 +-- .../aws/rds/parameter_groups.go | 6 +- .../adapters/cloudformation/aws/rds/rds.go | 2 +- .../cloudformation/aws/redshift/cluster.go | 2 +- .../cloudformation/aws/redshift/redshift.go | 2 +- .../aws/redshift/security_group.go | 2 +- .../adapters/cloudformation/aws/s3/bucket.go | 16 +-- .../iac}/adapters/cloudformation/aws/s3/s3.go | 2 +- .../adapters/cloudformation/aws/sam/api.go | 12 +-- .../cloudformation/aws/sam/function.go | 6 +- .../cloudformation/aws/sam/http_api.go | 8 +- .../adapters/cloudformation/aws/sam/sam.go | 2 +- .../cloudformation/aws/sam/state_machines.go | 8 +- .../adapters/cloudformation/aws/sam/tables.go | 6 +- .../adapters/cloudformation/aws/sns/sns.go | 2 +- .../adapters/cloudformation/aws/sns/topic.go | 2 +- .../adapters/cloudformation/aws/sqs/queue.go | 2 +- .../adapters/cloudformation/aws/sqs/sqs.go | 2 +- .../adapters/cloudformation/aws/ssm/secret.go | 2 +- .../adapters/cloudformation/aws/ssm/ssm.go | 2 +- .../aws/workspaces/workspace.go | 2 +- .../aws/workspaces/workspaces.go | 2 +- pkg/iac/adapters/terraform/adapt.go | 31 ++++++ .../aws/accessanalyzer/accessanalyzer.go | 0 pkg/iac/adapters/terraform/aws/adapt.go | 79 ++++++++++++++ .../terraform/aws/apigateway/adapt.go | 0 .../terraform/aws/apigateway/adapt_test.go | 2 +- .../terraform/aws/apigateway/apiv1.go | 0 .../terraform/aws/apigateway/apiv1_test.go | 2 +- .../terraform/aws/apigateway/apiv2.go | 0 .../terraform/aws/apigateway/apiv2_test.go | 2 +- .../terraform/aws/apigateway/namesv1.go | 0 .../terraform/aws/apigateway/namesv1_test.go | 2 +- .../terraform/aws/apigateway/namesv2.go | 0 .../terraform/aws/apigateway/namesv2_test.go | 2 +- .../adapters/terraform/aws/athena/adapt.go | 0 .../terraform/aws/athena/adapt_test.go | 3 +- .../terraform/aws/cloudfront/adapt.go | 0 .../terraform/aws/cloudfront/adapt_test.go | 3 +- .../terraform/aws/cloudtrail/adapt.go | 0 .../terraform/aws/cloudtrail/adapt_test.go | 3 +- .../terraform/aws/cloudwatch/adapt.go | 0 .../terraform/aws/cloudwatch/adapt_test.go | 3 +- .../adapters/terraform/aws/codebuild/adapt.go | 0 .../terraform/aws/codebuild/adapt_test.go | 3 +- .../adapters/terraform/aws/config/adapt.go | 0 .../terraform/aws/config/adapt_test.go | 3 +- .../terraform/aws/documentdb/adapt.go | 0 .../terraform/aws/documentdb/adapt_test.go | 3 +- .../adapters/terraform/aws/dynamodb/adapt.go | 0 .../terraform/aws/dynamodb/adapt_test.go | 3 +- .../iac}/adapters/terraform/aws/ec2/adapt.go | 0 .../adapters/terraform/aws/ec2/adapt_test.go | 3 +- .../adapters/terraform/aws/ec2/autoscaling.go | 0 .../terraform/aws/ec2/autoscaling_test.go | 3 +- .../iac}/adapters/terraform/aws/ec2/subnet.go | 0 .../adapters/terraform/aws/ec2/subnet_test.go | 3 +- .../iac}/adapters/terraform/aws/ec2/volume.go | 0 .../adapters/terraform/aws/ec2/volume_test.go | 3 +- .../iac}/adapters/terraform/aws/ec2/vpc.go | 0 .../adapters/terraform/aws/ec2/vpc_test.go | 2 +- .../iac}/adapters/terraform/aws/ecr/adapt.go | 2 +- .../adapters/terraform/aws/ecr/adapt_test.go | 3 +- .../iac}/adapters/terraform/aws/ecs/adapt.go | 0 .../adapters/terraform/aws/ecs/adapt_test.go | 3 +- .../iac}/adapters/terraform/aws/efs/adapt.go | 0 .../adapters/terraform/aws/efs/adapt_test.go | 3 +- .../iac}/adapters/terraform/aws/eks/adapt.go | 0 .../adapters/terraform/aws/eks/adapt_test.go | 3 +- .../terraform/aws/elasticache/adapt.go | 0 .../terraform/aws/elasticache/adapt_test.go | 3 +- .../terraform/aws/elasticsearch/adapt.go | 0 .../terraform/aws/elasticsearch/adapt_test.go | 3 +- .../iac}/adapters/terraform/aws/elb/adapt.go | 0 .../adapters/terraform/aws/elb/adapt_test.go | 3 +- .../iac}/adapters/terraform/aws/emr/adapt.go | 0 .../adapters/terraform/aws/emr/adapt_test.go | 2 +- .../iac}/adapters/terraform/aws/iam/adapt.go | 0 .../adapters/terraform/aws/iam/adapt_test.go | 2 +- .../adapters/terraform/aws/iam/convert.go | 0 .../iac}/adapters/terraform/aws/iam/groups.go | 0 .../adapters/terraform/aws/iam/groups_test.go | 2 +- .../adapters/terraform/aws/iam/passwords.go | 0 .../terraform/aws/iam/passwords_test.go | 2 +- .../adapters/terraform/aws/iam/policies.go | 0 .../terraform/aws/iam/policies_test.go | 2 +- .../iac}/adapters/terraform/aws/iam/roles.go | 0 .../adapters/terraform/aws/iam/roles_test.go | 2 +- .../iac}/adapters/terraform/aws/iam/users.go | 0 .../adapters/terraform/aws/iam/users_test.go | 2 +- .../adapters/terraform/aws/kinesis/adapt.go | 0 .../terraform/aws/kinesis/adapt_test.go | 3 +- .../iac}/adapters/terraform/aws/kms/adapt.go | 0 .../adapters/terraform/aws/kms/adapt_test.go | 3 +- .../adapters/terraform/aws/lambda/adapt.go | 0 .../terraform/aws/lambda/adapt_test.go | 2 +- .../iac}/adapters/terraform/aws/mq/adapt.go | 0 .../adapters/terraform/aws/mq/adapt_test.go | 3 +- .../iac}/adapters/terraform/aws/msk/adapt.go | 0 .../adapters/terraform/aws/msk/adapt_test.go | 3 +- .../adapters/terraform/aws/neptune/adapt.go | 0 .../terraform/aws/neptune/adapt_test.go | 3 +- .../adapters/terraform/aws/provider/adapt.go | 0 .../terraform/aws/provider/adapt_test.go | 2 +- .../iac}/adapters/terraform/aws/rds/adapt.go | 30 +++--- .../adapters/terraform/aws/rds/adapt_test.go | 2 +- .../adapters/terraform/aws/redshift/adapt.go | 0 .../terraform/aws/redshift/adapt_test.go | 2 +- .../iac}/adapters/terraform/aws/s3/adapt.go | 0 .../adapters/terraform/aws/s3/adapt_test.go | 2 +- .../iac}/adapters/terraform/aws/s3/bucket.go | 0 .../adapters/terraform/aws/s3/bucket_test.go | 3 +- .../adapters/terraform/aws/s3/policies.go | 2 +- .../terraform/aws/s3/public_access_block.go | 0 .../iac}/adapters/terraform/aws/sns/adapt.go | 0 .../adapters/terraform/aws/sns/adapt_test.go | 3 +- .../iac}/adapters/terraform/aws/sqs/adapt.go | 2 +- .../adapters/terraform/aws/sqs/adapt_test.go | 3 +- .../iac}/adapters/terraform/aws/ssm/adapt.go | 0 .../adapters/terraform/aws/ssm/adapt_test.go | 3 +- .../terraform/aws/workspaces/adapt.go | 0 .../terraform/aws/workspaces/adapt_test.go | 3 +- pkg/iac/adapters/terraform/azure/adapt.go | 37 +++++++ .../terraform/azure/appservice/adapt.go | 0 .../terraform/azure/appservice/adapt_test.go | 3 +- .../terraform/azure/authorization/adapt.go | 0 .../azure/authorization/adapt_test.go | 3 +- .../adapters/terraform/azure/compute/adapt.go | 0 .../terraform/azure/compute/adapt_test.go | 3 +- .../terraform/azure/container/adapt.go | 0 .../terraform/azure/container/adapt_test.go | 3 +- .../terraform/azure/database/adapt.go | 0 .../terraform/azure/database/adapt_test.go | 2 +- .../terraform/azure/datafactory/adapt.go | 0 .../terraform/azure/datafactory/adapt_test.go | 3 +- .../terraform/azure/datalake/adapt.go | 0 .../terraform/azure/datalake/adapt_test.go | 3 +- .../terraform/azure/keyvault/adapt.go | 0 .../terraform/azure/keyvault/adapt_test.go | 3 +- .../adapters/terraform/azure/monitor/adapt.go | 0 .../terraform/azure/monitor/adapt_test.go | 3 +- .../adapters/terraform/azure/network/adapt.go | 0 .../terraform/azure/network/adapt_test.go | 2 +- .../terraform/azure/securitycenter/adapt.go | 0 .../azure/securitycenter/adapt_test.go | 3 +- .../adapters/terraform/azure/storage/adapt.go | 0 .../terraform/azure/storage/adapt_test.go | 2 +- .../adapters/terraform/azure/synapse/adapt.go | 0 .../terraform/azure/synapse/adapt_test.go | 3 +- .../adapters/terraform/cloudstack/adapt.go | 2 +- .../terraform/cloudstack/compute/adapt.go | 0 .../cloudstack/compute/adapt_test.go | 3 +- .../adapters/terraform/digitalocean/adapt.go | 4 +- .../terraform/digitalocean/compute/adapt.go | 0 .../digitalocean/compute/adapt_test.go | 2 +- .../terraform/digitalocean/spaces/adapt.go | 0 .../digitalocean/spaces/adapt_test.go | 3 +- .../iac}/adapters/terraform/github/adapt.go | 6 +- .../github/branch_protections/adapt.go | 0 .../github/branch_protections/adapt_test.go | 2 +- .../terraform/github/repositories/adapt.go | 0 .../github/repositories/adapt_test.go | 2 +- .../terraform/github/secrets/adapt.go | 0 .../terraform/github/secrets/adapt_test.go | 3 +- pkg/iac/adapters/terraform/google/adapt.go | 27 +++++ .../terraform/google/bigquery/adapt.go | 0 .../terraform/google/bigquery/adapt_test.go | 3 +- .../terraform/google/compute/adapt.go | 0 .../terraform/google/compute/adapt_test.go | 2 +- .../terraform/google/compute/disks.go | 0 .../terraform/google/compute/disks_test.go | 2 +- .../terraform/google/compute/instances.go | 0 .../google/compute/instances_test.go | 2 +- .../terraform/google/compute/metadata.go | 0 .../terraform/google/compute/metadata_test.go | 2 +- .../terraform/google/compute/networks.go | 0 .../terraform/google/compute/networks_test.go | 2 +- .../adapters/terraform/google/compute/ssl.go | 0 .../terraform/google/compute/ssl_test.go | 2 +- .../adapters/terraform/google/dns/adapt.go | 0 .../terraform/google/dns/adapt_test.go | 3 +- .../adapters/terraform/google/gke/adapt.go | 0 .../terraform/google/gke/adapt_test.go | 2 +- .../adapters/terraform/google/iam/adapt.go | 0 .../terraform/google/iam/adapt_test.go | 2 +- .../adapters/terraform/google/iam/convert.go | 0 .../terraform/google/iam/folder_iam.go | 0 .../adapters/terraform/google/iam/folders.go | 0 .../adapters/terraform/google/iam/org_iam.go | 0 .../terraform/google/iam/project_iam.go | 0 .../terraform/google/iam/project_iam_test.go | 2 +- .../adapters/terraform/google/iam/projects.go | 0 .../iam/workload_identity_pool_providers.go | 0 .../adapters/terraform/google/kms/adapt.go | 0 .../terraform/google/kms/adapt_test.go | 3 +- .../adapters/terraform/google/sql/adapt.go | 0 .../terraform/google/sql/adapt_test.go | 3 +- .../terraform/google/storage/adapt.go | 0 .../terraform/google/storage/adapt_test.go | 2 +- .../adapters/terraform/google/storage/iam.go | 8 +- .../adapters/terraform/kubernetes/adapt.go | 0 .../terraform/kubernetes/adapt_test.go | 0 .../terraform/nifcloud/computing/adapt.go | 0 .../nifcloud/computing/adapt_test.go | 2 +- .../terraform/nifcloud/computing/instance.go | 0 .../nifcloud/computing/instance_test.go | 3 +- .../nifcloud/computing/security_group.go | 0 .../nifcloud/computing/security_group_test.go | 3 +- .../adapters/terraform/nifcloud/dns/adapt.go | 0 .../terraform/nifcloud/dns/adapt_test.go | 2 +- .../adapters/terraform/nifcloud/dns/record.go | 0 .../terraform/nifcloud/dns/record_test.go | 3 +- .../adapters/terraform/nifcloud/nas/adapt.go | 0 .../terraform/nifcloud/nas/adapt_test.go | 2 +- .../terraform/nifcloud/nas/nas_instance.go | 0 .../nifcloud/nas/nas_instance_test.go | 3 +- .../nifcloud/nas/nas_security_group.go | 0 .../nifcloud/nas/nas_security_group_test.go | 3 +- .../terraform/nifcloud/network/adapt.go | 0 .../terraform/nifcloud/network/adapt_test.go | 2 +- .../nifcloud/network/elastic_load_balancer.go | 0 .../network/elastic_load_balancer_test.go | 3 +- .../nifcloud/network/load_balancer.go | 0 .../nifcloud/network/load_balancer_test.go | 3 +- .../terraform/nifcloud/network/router.go | 0 .../terraform/nifcloud/network/router_test.go | 3 +- .../terraform/nifcloud/network/vpn_gateway.go | 0 .../nifcloud/network/vpn_gateway_test.go | 3 +- .../adapters/terraform/nifcloud/nifcloud.go | 12 +-- .../adapters/terraform/nifcloud/rdb/adapt.go | 0 .../terraform/nifcloud/rdb/adapt_test.go | 2 +- .../terraform/nifcloud/rdb/db_instance.go | 0 .../nifcloud/rdb/db_instance_test.go | 3 +- .../nifcloud/rdb/db_security_group.go | 0 .../nifcloud/rdb/db_security_group_test.go | 3 +- .../nifcloud/sslcertificate/adapt.go | 0 .../nifcloud/sslcertificate/adapt_test.go | 2 +- .../sslcertificate/server_certificate.go | 0 .../sslcertificate/server_certificate_test.go | 3 +- .../adapters/terraform/openstack/adapt.go | 0 .../terraform/openstack/adapt_test.go | 3 +- .../terraform/openstack/networking.go | 0 .../terraform/openstack/networking_test.go | 2 +- .../iac}/adapters/terraform/oracle/adapt.go | 0 .../adapters/terraform/tftestutil/testutil.go | 4 +- pkg/{ => iac}/detection/detect.go | 2 +- pkg/{ => iac}/detection/detect_test.go | 0 pkg/{ => iac}/detection/peek.go | 0 pkg/{ => iac}/detection/testdata/big.file | Bin pkg/{ => iac}/detection/testdata/small.file | 0 .../azure/arm/parser/armjson/bench_test.go | 0 .../azure/arm/parser/armjson/decode.go | 0 .../azure/arm/parser/armjson/decode_array.go | 0 .../arm/parser/armjson/decode_boolean.go | 0 .../arm/parser/armjson/decode_meta_test.go | 0 .../azure/arm/parser/armjson/decode_null.go | 0 .../azure/arm/parser/armjson/decode_number.go | 0 .../azure/arm/parser/armjson/decode_object.go | 0 .../azure/arm/parser/armjson/decode_string.go | 0 .../scanners/azure/arm/parser/armjson/kind.go | 0 .../scanners/azure/arm/parser/armjson/node.go | 0 .../azure/arm/parser/armjson/parse.go | 0 .../azure/arm/parser/armjson/parse_array.go | 0 .../arm/parser/armjson/parse_array_test.go | 0 .../azure/arm/parser/armjson/parse_boolean.go | 0 .../arm/parser/armjson/parse_boolean_test.go | 0 .../azure/arm/parser/armjson/parse_comment.go | 0 .../arm/parser/armjson/parse_complex_test.go | 0 .../azure/arm/parser/armjson/parse_null.go | 0 .../arm/parser/armjson/parse_null_test.go | 0 .../azure/arm/parser/armjson/parse_number.go | 0 .../arm/parser/armjson/parse_number_test.go | 0 .../azure/arm/parser/armjson/parse_object.go | 0 .../arm/parser/armjson/parse_object_test.go | 0 .../azure/arm/parser/armjson/parse_string.go | 0 .../arm/parser/armjson/parse_string_test.go | 0 .../arm/parser/armjson/parse_whitespace.go | 0 .../azure/arm/parser/armjson/reader.go | 0 .../azure/arm/parser/armjson/reader_test.go | 0 .../azure/arm/parser/armjson/unmarshal.go | 0 .../scanners/azure/arm/parser/parser.go | 34 +++--- .../scanners/azure/arm/parser/parser_test.go | 70 ++++++------- .../scanners/azure/arm/parser/template.go | 4 +- .../azure/arm/parser/template_test.go | 4 +- .../azure/arm/parser/testdata/example.json | 0 .../azure/arm/parser/testdata/postgres.json | 0 pkg/{ => iac}/scanners/azure/arm/scanner.go | 8 +- pkg/{ => iac}/scanners/azure/deployment.go | 0 .../scanners/azure/expressions/lex.go | 0 .../scanners/azure/expressions/node.go | 10 +- .../azure/expressions/token_walker.go | 0 pkg/{ => iac}/scanners/azure/functions/add.go | 0 .../scanners/azure/functions/add_test.go | 0 pkg/{ => iac}/scanners/azure/functions/and.go | 0 .../scanners/azure/functions/and_test.go | 0 .../scanners/azure/functions/array.go | 0 .../scanners/azure/functions/array_test.go | 0 .../scanners/azure/functions/base64.go | 0 .../scanners/azure/functions/base64_test.go | 0 .../scanners/azure/functions/bool.go | 0 .../scanners/azure/functions/bool_test.go | 0 .../scanners/azure/functions/casing.go | 0 .../scanners/azure/functions/casing_test.go | 0 .../scanners/azure/functions/coalesce.go | 0 .../scanners/azure/functions/coalesce_test.go | 0 .../scanners/azure/functions/concat.go | 0 .../scanners/azure/functions/concat_test.go | 0 .../scanners/azure/functions/contains.go | 0 .../scanners/azure/functions/contains_test.go | 0 .../scanners/azure/functions/copy_index.go | 0 .../azure/functions/copy_index_test.go | 0 .../scanners/azure/functions/create_array.go | 0 .../azure/functions/create_array_test.go | 0 .../scanners/azure/functions/create_object.go | 0 .../azure/functions/create_object_test.go | 0 .../scanners/azure/functions/data_uri.go | 0 .../scanners/azure/functions/data_uri_test.go | 0 .../scanners/azure/functions/date_time_add.go | 0 .../azure/functions/date_time_epoch.go | 0 .../azure/functions/date_time_epoch_test.go | 0 .../azure/functions/datetime_add_test.go | 0 .../scanners/azure/functions/deployment.go | 0 pkg/{ => iac}/scanners/azure/functions/div.go | 0 .../scanners/azure/functions/div_test.go | 0 .../scanners/azure/functions/empty.go | 0 .../scanners/azure/functions/empty_test.go | 0 .../scanners/azure/functions/ends_with.go | 0 .../azure/functions/ends_with_test.go | 0 .../scanners/azure/functions/equals.go | 0 .../scanners/azure/functions/equals_test.go | 0 .../scanners/azure/functions/false.go | 0 .../scanners/azure/functions/first.go | 0 .../scanners/azure/functions/first_test.go | 0 .../scanners/azure/functions/float.go | 0 .../scanners/azure/functions/float_test.go | 0 .../scanners/azure/functions/format.go | 0 .../scanners/azure/functions/format_test.go | 0 .../scanners/azure/functions/functions.go | 0 .../scanners/azure/functions/greater.go | 0 .../scanners/azure/functions/greater_test.go | 0 .../scanners/azure/functions/guid.go | 0 .../scanners/azure/functions/guid_test.go | 0 pkg/{ => iac}/scanners/azure/functions/if.go | 0 .../scanners/azure/functions/if_test.go | 0 .../scanners/azure/functions/index_of.go | 0 .../scanners/azure/functions/index_of_test.go | 0 pkg/{ => iac}/scanners/azure/functions/int.go | 0 .../scanners/azure/functions/int_test.go | 0 .../scanners/azure/functions/intersection.go | 0 .../azure/functions/intersection_test.go | 0 .../scanners/azure/functions/items.go | 0 .../scanners/azure/functions/join.go | 0 .../scanners/azure/functions/join_test.go | 0 .../scanners/azure/functions/json.go | 0 .../scanners/azure/functions/json_test.go | 0 .../scanners/azure/functions/last.go | 0 .../scanners/azure/functions/last_index_of.go | 0 .../azure/functions/last_index_of_test.go | 0 .../scanners/azure/functions/last_test.go | 0 .../scanners/azure/functions/length.go | 0 .../scanners/azure/functions/length_test.go | 0 .../scanners/azure/functions/less.go | 0 .../scanners/azure/functions/less_test.go | 0 pkg/{ => iac}/scanners/azure/functions/max.go | 0 .../scanners/azure/functions/max_test.go | 0 pkg/{ => iac}/scanners/azure/functions/min.go | 0 .../scanners/azure/functions/min_test.go | 0 pkg/{ => iac}/scanners/azure/functions/mod.go | 0 .../scanners/azure/functions/mod_test.go | 0 pkg/{ => iac}/scanners/azure/functions/mul.go | 0 .../scanners/azure/functions/mul_test.go | 0 pkg/{ => iac}/scanners/azure/functions/not.go | 0 .../scanners/azure/functions/not_test.go | 0 .../scanners/azure/functions/null.go | 0 .../scanners/azure/functions/null_test.go | 0 pkg/{ => iac}/scanners/azure/functions/or.go | 0 .../scanners/azure/functions/or_test.go | 0 pkg/{ => iac}/scanners/azure/functions/pad.go | 0 .../scanners/azure/functions/pad_test.go | 0 .../scanners/azure/functions/parameters.go | 0 .../scanners/azure/functions/pick_zones.go | 0 .../azure/functions/pick_zones_test.go | 0 .../scanners/azure/functions/range.go | 0 .../scanners/azure/functions/range_test.go | 0 .../scanners/azure/functions/reference.go | 0 .../azure/functions/reference_test.go | 0 .../scanners/azure/functions/replace.go | 0 .../scanners/azure/functions/replace_test.go | 0 .../scanners/azure/functions/resource.go | 0 .../scanners/azure/functions/resource_test.go | 0 .../scanners/azure/functions/scope.go | 0 .../scanners/azure/functions/scope_test.go | 0 .../scanners/azure/functions/skip.go | 0 .../scanners/azure/functions/skip_test.go | 0 .../scanners/azure/functions/split.go | 0 .../scanners/azure/functions/split_test.go | 0 .../scanners/azure/functions/starts_with.go | 0 .../azure/functions/starts_with_test.go | 0 .../scanners/azure/functions/string.go | 0 .../scanners/azure/functions/string_test.go | 0 pkg/{ => iac}/scanners/azure/functions/sub.go | 0 .../scanners/azure/functions/sub_test.go | 0 .../scanners/azure/functions/substring.go | 0 .../azure/functions/substring_test.go | 0 .../scanners/azure/functions/take.go | 0 .../scanners/azure/functions/take_test.go | 0 .../scanners/azure/functions/trim.go | 0 .../scanners/azure/functions/trim_test.go | 0 .../scanners/azure/functions/true.go | 0 .../scanners/azure/functions/union.go | 0 .../scanners/azure/functions/union_test.go | 0 .../scanners/azure/functions/unique_string.go | 0 .../azure/functions/unique_string_test.go | 0 pkg/{ => iac}/scanners/azure/functions/uri.go | 0 .../scanners/azure/functions/uri_test.go | 0 .../scanners/azure/functions/utc_now.go | 0 .../scanners/azure/functions/utc_now_test.go | 0 .../scanners/azure/resolver/resolver.go | 24 ++--- .../scanners/azure/resolver/resolver_test.go | 16 +-- pkg/{ => iac}/scanners/azure/value.go | 30 +++--- pkg/{ => iac}/scanners/azure/value_test.go | 0 .../scanners/cloudformation/cftypes/types.go | 0 .../scanners/cloudformation/parser/errors.go | 0 .../cloudformation/parser/file_context.go | 0 .../parser/file_context_test.go | 0 .../scanners/cloudformation/parser/fn_and.go | 4 +- .../cloudformation/parser/fn_and_test.go | 3 +- .../cloudformation/parser/fn_base64.go | 2 +- .../cloudformation/parser/fn_base64_test.go | 2 +- .../cloudformation/parser/fn_builtin.go | 3 +- .../cloudformation/parser/fn_builtin_test.go | 0 .../cloudformation/parser/fn_condition.go | 0 .../parser/fn_condition_test.go | 2 +- .../cloudformation/parser/fn_equals.go | 2 +- .../cloudformation/parser/fn_equals_test.go | 3 +- .../cloudformation/parser/fn_find_in_map.go | 2 +- .../parser/fn_find_in_map_test.go | 0 .../cloudformation/parser/fn_get_attr.go | 2 +- .../cloudformation/parser/fn_get_attr_test.go | 0 .../scanners/cloudformation/parser/fn_if.go | 0 .../cloudformation/parser/fn_if_test.go | 3 +- .../scanners/cloudformation/parser/fn_join.go | 2 +- .../cloudformation/parser/fn_join_test.go | 3 +- .../cloudformation/parser/fn_length.go | 4 +- .../cloudformation/parser/fn_length_test.go | 2 +- .../scanners/cloudformation/parser/fn_not.go | 4 +- .../cloudformation/parser/fn_not_test.go | 3 +- .../scanners/cloudformation/parser/fn_or.go | 4 +- .../cloudformation/parser/fn_or_test.go | 3 +- .../scanners/cloudformation/parser/fn_ref.go | 2 +- .../cloudformation/parser/fn_ref_test.go | 3 +- .../cloudformation/parser/fn_select.go | 2 +- .../cloudformation/parser/fn_select_test.go | 0 .../cloudformation/parser/fn_split.go | 2 +- .../cloudformation/parser/fn_split_test.go | 2 +- .../scanners/cloudformation/parser/fn_sub.go | 2 +- .../cloudformation/parser/fn_sub_test.go | 0 .../cloudformation/parser/intrinsics.go | 0 .../cloudformation/parser/intrinsics_test.go | 0 .../cloudformation/parser/parameter.go | 3 +- .../cloudformation/parser/parameters_test.go | 0 .../scanners/cloudformation/parser/parser.go | 2 +- .../cloudformation/parser/parser_test.go | 0 .../cloudformation/parser/property.go | 2 +- .../parser/property_conversion.go | 2 +- .../cloudformation/parser/property_helpers.go | 2 +- .../parser/property_helpers_test.go | 2 +- .../parser/pseudo_parameters.go | 4 +- .../parser/pseudo_parameters_test.go | 0 .../cloudformation/parser/reference.go | 0 .../cloudformation/parser/resource.go | 0 .../cloudformation/parser/resource_test.go | 2 +- .../scanners/cloudformation/parser/util.go | 5 +- .../scanners/cloudformation/scanner.go | 18 ++-- .../scanners/cloudformation/scanner_test.go | 0 .../cloudformation/test/cf_scanning_test.go | 2 +- .../test/examples/bucket/bucket.yaml | 0 .../examples/ignores/bucket_with_ignores.yaml | 0 .../test/examples/roles/roles.yml | 0 .../scanners/dockerfile/parser/parser.go | 2 +- .../scanners/dockerfile/parser/parser_test.go | 0 pkg/{ => iac}/scanners/dockerfile/scanner.go | 4 +- .../scanners/dockerfile/scanner_test.go | 0 pkg/{ => iac}/scanners/helm/options.go | 2 +- pkg/{ => iac}/scanners/helm/parser/option.go | 0 pkg/{ => iac}/scanners/helm/parser/parser.go | 6 +- .../scanners/helm/parser/parser_tar.go | 3 +- .../scanners/helm/parser/parser_test.go | 0 .../my-chart-0.1.0.tgz | Bin .../my-chart/Chart.yaml | 0 .../my-chart/templates/pod.yaml | 0 pkg/{ => iac}/scanners/helm/parser/vals.go | 0 pkg/{ => iac}/scanners/helm/scanner.go | 8 +- .../scanners/helm/test/mysql/.helmignore | 0 .../scanners/helm/test/mysql/Chart.lock | 0 .../scanners/helm/test/mysql/Chart.yaml | 0 .../scanners/helm/test/mysql/README.md | 0 .../helm/test/mysql/charts/common/.helmignore | 0 .../helm/test/mysql/charts/common/Chart.yaml | 0 .../helm/test/mysql/charts/common/README.md | 0 .../charts/common/templates/_affinities.tpl | 0 .../charts/common/templates/_capabilities.tpl | 0 .../mysql/charts/common/templates/_errors.tpl | 0 .../mysql/charts/common/templates/_images.tpl | 0 .../charts/common/templates/_ingress.tpl | 0 .../mysql/charts/common/templates/_labels.tpl | 0 .../mysql/charts/common/templates/_names.tpl | 0 .../charts/common/templates/_secrets.tpl | 0 .../charts/common/templates/_storage.tpl | 0 .../charts/common/templates/_tplvalues.tpl | 0 .../mysql/charts/common/templates/_utils.tpl | 0 .../charts/common/templates/_warnings.tpl | 0 .../templates/validations/_cassandra.tpl | 0 .../common/templates/validations/_mariadb.tpl | 0 .../common/templates/validations/_mongodb.tpl | 0 .../templates/validations/_postgresql.tpl | 0 .../common/templates/validations/_redis.tpl | 0 .../templates/validations/_validations.tpl | 0 .../helm/test/mysql/charts/common/values.yaml | 0 .../mysql/ci/values-production-with-rbac.yaml | 0 .../helm/test/mysql/templates/NOTES.txt | 0 .../helm/test/mysql/templates/_helpers.tpl | 0 .../helm/test/mysql/templates/extra-list.yaml | 0 .../test/mysql/templates/metrics-svc.yaml | 0 .../test/mysql/templates/networkpolicy.yaml | 0 .../mysql/templates/primary/configmap.yaml | 0 .../primary/initialization-configmap.yaml | 0 .../test/mysql/templates/primary/pdb.yaml | 0 .../mysql/templates/primary/statefulset.yaml | 0 .../mysql/templates/primary/svc-headless.yaml | 0 .../test/mysql/templates/primary/svc.yaml | 0 .../helm/test/mysql/templates/role.yaml | 0 .../test/mysql/templates/rolebinding.yaml | 0 .../mysql/templates/secondary/configmap.yaml | 0 .../test/mysql/templates/secondary/pdb.yaml | 0 .../templates/secondary/statefulset.yaml | 0 .../templates/secondary/svc-headless.yaml | 0 .../test/mysql/templates/secondary/svc.yaml | 0 .../helm/test/mysql/templates/secrets.yaml | 0 .../test/mysql/templates/serviceaccount.yaml | 0 .../test/mysql/templates/servicemonitor.yaml | 0 .../helm/test/mysql/values.schema.json | 0 .../scanners/helm/test/mysql/values.yaml | 0 .../scanners/helm/test/option_test.go | 16 +-- .../scanners/helm/test/parser_test.go | 4 +- .../scanners/helm/test/scanner_test.go | 2 +- .../aws-cluster-autoscaler-bad.tar.gz | Bin .../mysql/templates/primary/configmap.yaml | 0 .../mysql/templates/primary/statefulset.yaml | 0 .../mysql/templates/primary/svc-headless.yaml | 0 .../expected/mysql/templates/primary/svc.yaml | 0 .../expected/mysql/templates/secrets.yaml | 0 .../mysql/templates/serviceaccount.yaml | 0 .../testchart/templates/deployment.yaml | 0 .../options/testchart/templates/service.yaml | 0 .../testchart/templates/serviceaccount.yaml | 0 .../with-api-version/templates/pdb.yaml | 0 .../testchart/templates/deployment.yaml | 0 .../expected/testchart/templates/service.yaml | 0 .../testchart/templates/serviceaccount.yaml | 0 .../with-tarred-dep/templates/deployment.yaml | 0 .../with-tarred-dep/templates/ingress.yaml | 0 .../with-tarred-dep/templates/service.yaml | 0 .../helm/test/testdata/mysql-8.8.26.tar | Bin .../helm/test/testdata/mysql-8.8.26.tar.gz | Bin .../helm/test/testdata/mysql-8.8.26.tgz | Bin .../scanners/helm/test/testdata/nope.tgz | Bin .../helm/test/testdata/numberName/Chart.yaml | 0 .../testdata/simmilar-templates/Chart.yaml | 0 .../templates/deployment.yaml | 0 .../templates/manifest.yaml | 0 .../test/testdata/templated-name/Chart.yaml | 0 .../helm/test/testdata/testchart/.helmignore | 0 .../helm/test/testdata/testchart/Chart.yaml | 0 .../testdata/testchart/templates/NOTES.txt | 0 .../testdata/testchart/templates/_helpers.tpl | 0 .../testchart/templates/deployment.yaml | 0 .../testdata/testchart/templates/hpa.yaml | 0 .../testdata/testchart/templates/ingress.yaml | 0 .../testdata/testchart/templates/service.yaml | 0 .../testchart/templates/serviceaccount.yaml | 0 .../templates/tests/test-connection.yaml | 0 .../helm/test/testdata/testchart/values.yaml | 0 .../testdata/with-api-version/.helmignore | 0 .../test/testdata/with-api-version/Chart.yaml | 0 .../with-api-version/templates/_helpers.tpl | 0 .../with-api-version/templates/pdb.yaml | 0 .../testdata/with-api-version/values.yaml | 0 .../test/testdata/with-tarred-dep/.helmignore | 0 .../test/testdata/with-tarred-dep/Chart.yaml | 0 .../test/testdata/with-tarred-dep/LICENSE | 0 .../with-tarred-dep/charts/common-1.16.1.tgz | Bin .../testdata/with-tarred-dep/renovate.json | 0 .../with-tarred-dep/templates/.gitkeep | 0 .../with-tarred-dep/templates/deployment.yaml | 0 .../with-tarred-dep/templates/ingress.yaml | 0 .../templates/secrets-crdb-ca.yaml | 0 .../templates/secrets-dbconn.yaml | 0 .../with-tarred-dep/templates/service.yaml | 0 .../test/testdata/with-tarred-dep/values.yaml | 0 .../scanners/helm/test/values/values.yaml | 0 pkg/{ => iac}/scanners/json/parser/parser.go | 2 +- .../scanners/json/parser/parser_test.go | 0 pkg/{ => iac}/scanners/json/scanner.go | 4 +- pkg/{ => iac}/scanners/json/scanner_test.go | 0 .../scanners/kubernetes/parser/manifest.go | 0 .../kubernetes/parser/manifest_node.go | 6 +- .../scanners/kubernetes/parser/parser.go | 2 +- pkg/{ => iac}/scanners/kubernetes/scanner.go | 4 +- .../scanners/kubernetes/scanner_test.go | 0 pkg/{ => iac}/scanners/scanner.go | 0 .../scanners/terraform/executor/executor.go | 2 +- .../terraform/executor/executor_test.go | 10 +- .../scanners/terraform/executor/option.go | 0 .../scanners/terraform/executor/pool.go | 0 .../scanners/terraform/executor/statistics.go | 0 pkg/{ => iac}/scanners/terraform/options.go | 4 +- .../scanners/terraform/parser/evaluator.go | 0 .../terraform/parser/evaluator_test.go | 0 .../scanners/terraform/parser/funcs/cidr.go | 0 .../terraform/parser/funcs/collection.go | 0 .../terraform/parser/funcs/conversion.go | 0 .../scanners/terraform/parser/funcs/crypto.go | 0 .../terraform/parser/funcs/datetime.go | 0 .../terraform/parser/funcs/defaults.go | 0 .../terraform/parser/funcs/encoding.go | 0 .../terraform/parser/funcs/filesystem.go | 0 .../scanners/terraform/parser/funcs/marks.go | 0 .../scanners/terraform/parser/funcs/number.go | 0 .../terraform/parser/funcs/sensitive.go | 0 .../scanners/terraform/parser/funcs/string.go | 0 .../scanners/terraform/parser/functions.go | 99 +++++++++--------- .../scanners/terraform/parser/load_blocks.go | 0 .../terraform/parser/load_blocks_test.go | 0 .../scanners/terraform/parser/load_module.go | 2 +- .../terraform/parser/load_module_metadata.go | 0 .../scanners/terraform/parser/load_vars.go | 0 .../terraform/parser/load_vars_test.go | 0 .../terraform/parser/module_retrieval.go | 14 +-- .../scanners/terraform/parser/option.go | 0 .../scanners/terraform/parser/parser.go | 14 +-- .../parser/parser_integration_test.go | 0 .../scanners/terraform/parser/parser_test.go | 0 .../terraform/parser/resolvers/cache.go | 0 .../terraform/parser/resolvers/local.go | 0 .../terraform/parser/resolvers/options.go | 0 .../terraform/parser/resolvers/registry.go | 0 .../terraform/parser/resolvers/remote.go | 0 .../terraform/parser/resolvers/writable.go | 0 .../parser/resolvers/writable_windows.go | 0 .../scanners/terraform/parser/sort.go | 0 .../parser/testdata/tfvars/terraform.tfvars | 0 .../testdata/tfvars/terraform.tfvars.json | 0 pkg/{ => iac}/scanners/terraform/scanner.go | 22 ++-- .../terraform/scanner_integration_test.go | 0 .../scanners/terraform/scanner_test.go | 0 .../scanners/terraformplan/parser/option.go | 0 .../scanners/terraformplan/parser/parser.go | 0 .../terraformplan/parser/plan_file.go | 0 .../scanners/terraformplan/scanner.go | 6 +- .../scanners/terraformplan/scanner_test.go | 0 .../terraformplan/test/parser_test.go | 2 +- .../terraformplan/test/scanner_test.go | 2 +- .../terraformplan/test/testdata/plan.json | 0 pkg/{ => iac}/scanners/toml/parser/parser.go | 2 +- .../scanners/toml/parser/parser_test.go | 0 pkg/{ => iac}/scanners/toml/scanner.go | 2 +- pkg/{ => iac}/scanners/toml/scanner_test.go | 0 pkg/{ => iac}/scanners/universal/scanner.go | 20 ++-- pkg/{ => iac}/scanners/yaml/parser/parser.go | 2 +- .../scanners/yaml/parser/parser_test.go | 0 pkg/{ => iac}/scanners/yaml/scanner.go | 2 +- pkg/{ => iac}/scanners/yaml/scanner_test.go | 0 pkg/misconf/scanner.go | 44 ++++---- test/deterministic_test.go | 6 +- test/docker_test.go | 3 +- test/fs_test.go | 3 +- test/kubernetes_test.go | 3 +- test/module_test.go | 32 +++--- test/performance_test.go | 6 +- test/setup_test.go | 12 +-- 772 files changed, 958 insertions(+), 1022 deletions(-) delete mode 100644 internal/adapters/cloudformation/aws/adapt.go delete mode 100644 internal/adapters/terraform/adapt.go delete mode 100644 internal/adapters/terraform/aws/adapt.go delete mode 100644 internal/adapters/terraform/azure/adapt.go delete mode 100644 internal/adapters/terraform/google/adapt.go rename {internal => pkg/iac}/adapters/arm/adapt.go (52%) rename {internal => pkg/iac}/adapters/arm/appservice/adapt.go (97%) rename {internal => pkg/iac}/adapters/arm/authorization/adapt.go (95%) rename {internal => pkg/iac}/adapters/arm/compute/adapt.go (98%) rename {internal => pkg/iac}/adapters/arm/compute/adapt_test.go (50%) rename {internal => pkg/iac}/adapters/arm/container/adapt.go (86%) rename {internal => pkg/iac}/adapters/arm/database/adapt.go (96%) rename {internal => pkg/iac}/adapters/arm/database/firewall.go (90%) rename {internal => pkg/iac}/adapters/arm/database/maria.go (95%) rename {internal => pkg/iac}/adapters/arm/database/mssql.go (77%) rename {internal => pkg/iac}/adapters/arm/database/postgresql.go (97%) rename {internal => pkg/iac}/adapters/arm/datafactory/adapt.go (93%) rename {internal => pkg/iac}/adapters/arm/datalake/adapt.go (92%) rename {internal => pkg/iac}/adapters/arm/keyvault/adapt.go (97%) rename {internal => pkg/iac}/adapters/arm/monitor/adapt.go (96%) rename {internal => pkg/iac}/adapters/arm/network/adapt.go (98%) rename {internal => pkg/iac}/adapters/arm/securitycenter/adapt.go (96%) rename {internal => pkg/iac}/adapters/arm/storage/adapt.go (97%) rename {internal => pkg/iac}/adapters/arm/storage/adapt_test.go (51%) rename {internal => pkg/iac}/adapters/arm/synapse/adapt.go (94%) rename {internal => pkg/iac}/adapters/cloudformation/adapt.go (61%) rename {internal => pkg/iac}/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go (80%) rename {internal => pkg/iac}/adapters/cloudformation/aws/accessanalyzer/analyzer.go (89%) create mode 100644 pkg/iac/adapters/cloudformation/aws/adapt.go rename {internal => pkg/iac}/adapters/cloudformation/aws/apigateway/apigateway.go (87%) rename {internal => pkg/iac}/adapters/cloudformation/aws/apigateway/stage.go (85%) rename {internal => pkg/iac}/adapters/cloudformation/aws/athena/athena.go (78%) rename {internal => pkg/iac}/adapters/cloudformation/aws/athena/workgroup.go (91%) rename {internal => pkg/iac}/adapters/cloudformation/aws/cloudfront/cloudfront.go (79%) rename {internal => pkg/iac}/adapters/cloudformation/aws/cloudfront/distribution.go (86%) rename {internal => pkg/iac}/adapters/cloudformation/aws/cloudtrail/cloudtrail.go (78%) rename {internal => pkg/iac}/adapters/cloudformation/aws/cloudtrail/trails.go (92%) rename {internal => pkg/iac}/adapters/cloudformation/aws/cloudwatch/cloudwatch.go (79%) rename {internal => pkg/iac}/adapters/cloudformation/aws/cloudwatch/log_group.go (90%) rename {internal => pkg/iac}/adapters/cloudformation/aws/codebuild/codebuild.go (78%) rename {internal => pkg/iac}/adapters/cloudformation/aws/codebuild/project.go (83%) rename {internal => pkg/iac}/adapters/cloudformation/aws/config/adapt_test.go (95%) rename {internal => pkg/iac}/adapters/cloudformation/aws/config/aggregator.go (80%) rename {internal => pkg/iac}/adapters/cloudformation/aws/config/config.go (79%) rename {internal => pkg/iac}/adapters/cloudformation/aws/documentdb/cluster.go (85%) rename {internal => pkg/iac}/adapters/cloudformation/aws/documentdb/documentdb.go (78%) rename {internal => pkg/iac}/adapters/cloudformation/aws/dynamodb/cluster.go (93%) rename {internal => pkg/iac}/adapters/cloudformation/aws/dynamodb/dynamodb.go (77%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ec2/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ec2/ec2.go (88%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ec2/instance.go (85%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ec2/launch_configuration.go (95%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ec2/launch_template.go (86%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ec2/nacl.go (97%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ec2/security_group.go (87%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ec2/subnet.go (85%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ec2/volume.go (87%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ecr/ecr.go (75%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ecr/repository.go (89%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ecs/cluster.go (79%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ecs/ecs.go (78%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ecs/task_definition.go (88%) rename {internal => pkg/iac}/adapters/cloudformation/aws/efs/efs.go (75%) rename {internal => pkg/iac}/adapters/cloudformation/aws/efs/filesystem.go (86%) rename {internal => pkg/iac}/adapters/cloudformation/aws/eks/cluster.go (88%) rename {internal => pkg/iac}/adapters/cloudformation/aws/eks/eks.go (75%) rename {internal => pkg/iac}/adapters/cloudformation/aws/elasticache/cluster.go (89%) rename {internal => pkg/iac}/adapters/cloudformation/aws/elasticache/elasticache.go (84%) rename {internal => pkg/iac}/adapters/cloudformation/aws/elasticache/replication_group.go (90%) rename {internal => pkg/iac}/adapters/cloudformation/aws/elasticache/security_group.go (87%) rename {internal => pkg/iac}/adapters/cloudformation/aws/elasticsearch/domain.go (97%) rename {internal => pkg/iac}/adapters/cloudformation/aws/elasticsearch/elasticsearch.go (79%) rename {internal => pkg/iac}/adapters/cloudformation/aws/elb/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/cloudformation/aws/elb/elb.go (75%) rename {internal => pkg/iac}/adapters/cloudformation/aws/elb/loadbalancer.go (77%) rename {internal => pkg/iac}/adapters/cloudformation/aws/iam/iam.go (93%) rename {internal => pkg/iac}/adapters/cloudformation/aws/iam/policy.go (87%) rename {internal => pkg/iac}/adapters/cloudformation/aws/kinesis/kinesis.go (76%) rename {internal => pkg/iac}/adapters/cloudformation/aws/kinesis/stream.go (92%) rename {internal => pkg/iac}/adapters/cloudformation/aws/lambda/function.go (81%) rename {internal => pkg/iac}/adapters/cloudformation/aws/lambda/lambda.go (76%) rename {internal => pkg/iac}/adapters/cloudformation/aws/mq/broker.go (91%) rename {internal => pkg/iac}/adapters/cloudformation/aws/mq/mq.go (74%) rename {internal => pkg/iac}/adapters/cloudformation/aws/msk/cluster.go (97%) rename {internal => pkg/iac}/adapters/cloudformation/aws/msk/msk.go (75%) rename {internal => pkg/iac}/adapters/cloudformation/aws/neptune/cluster.go (78%) rename {internal => pkg/iac}/adapters/cloudformation/aws/neptune/neptune.go (77%) rename {internal => pkg/iac}/adapters/cloudformation/aws/rds/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/cloudformation/aws/rds/cluster.go (96%) rename {internal => pkg/iac}/adapters/cloudformation/aws/rds/instance.go (88%) rename {internal => pkg/iac}/adapters/cloudformation/aws/rds/parameter_groups.go (79%) rename {internal => pkg/iac}/adapters/cloudformation/aws/rds/rds.go (84%) rename {internal => pkg/iac}/adapters/cloudformation/aws/redshift/cluster.go (96%) rename {internal => pkg/iac}/adapters/cloudformation/aws/redshift/redshift.go (83%) rename {internal => pkg/iac}/adapters/cloudformation/aws/redshift/security_group.go (86%) rename {internal => pkg/iac}/adapters/cloudformation/aws/s3/bucket.go (89%) rename {internal => pkg/iac}/adapters/cloudformation/aws/s3/s3.go (74%) rename {internal => pkg/iac}/adapters/cloudformation/aws/sam/api.go (86%) rename {internal => pkg/iac}/adapters/cloudformation/aws/sam/function.go (87%) rename {internal => pkg/iac}/adapters/cloudformation/aws/sam/http_api.go (85%) rename {internal => pkg/iac}/adapters/cloudformation/aws/sam/sam.go (84%) rename {internal => pkg/iac}/adapters/cloudformation/aws/sam/state_machines.go (87%) rename {internal => pkg/iac}/adapters/cloudformation/aws/sam/tables.go (80%) rename {internal => pkg/iac}/adapters/cloudformation/aws/sns/sns.go (74%) rename {internal => pkg/iac}/adapters/cloudformation/aws/sns/topic.go (88%) rename {internal => pkg/iac}/adapters/cloudformation/aws/sqs/queue.go (96%) rename {internal => pkg/iac}/adapters/cloudformation/aws/sqs/sqs.go (74%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ssm/secret.go (84%) rename {internal => pkg/iac}/adapters/cloudformation/aws/ssm/ssm.go (74%) rename {internal => pkg/iac}/adapters/cloudformation/aws/workspaces/workspace.go (91%) rename {internal => pkg/iac}/adapters/cloudformation/aws/workspaces/workspaces.go (78%) create mode 100644 pkg/iac/adapters/terraform/adapt.go rename {internal => pkg/iac}/adapters/terraform/aws/accessanalyzer/accessanalyzer.go (100%) create mode 100644 pkg/iac/adapters/terraform/aws/adapt.go rename {internal => pkg/iac}/adapters/terraform/aws/apigateway/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/apigateway/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/aws/apigateway/apiv1.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/apigateway/apiv1_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/aws/apigateway/apiv2.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/apigateway/apiv2_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/aws/apigateway/namesv1.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/apigateway/namesv1_test.go (93%) rename {internal => pkg/iac}/adapters/terraform/aws/apigateway/namesv2.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/apigateway/namesv2_test.go (94%) rename {internal => pkg/iac}/adapters/terraform/aws/athena/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/athena/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/cloudfront/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/cloudfront/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/cloudtrail/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/cloudtrail/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/cloudwatch/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/cloudwatch/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/aws/codebuild/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/codebuild/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/aws/config/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/config/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/aws/documentdb/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/documentdb/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/dynamodb/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/dynamodb/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/ec2/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/ec2/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/aws/ec2/autoscaling.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/ec2/autoscaling_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/ec2/subnet.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/ec2/subnet_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/aws/ec2/volume.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/ec2/volume_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/aws/ec2/vpc.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/ec2/vpc_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/aws/ecr/adapt.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/ecr/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/ecs/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/ecs/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/efs/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/efs/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/aws/eks/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/eks/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/elasticache/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/elasticache/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/aws/elasticsearch/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/elasticsearch/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/elb/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/elb/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/emr/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/emr/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/convert.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/groups.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/groups_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/passwords.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/passwords_test.go (95%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/policies.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/policies_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/roles.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/roles_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/users.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/iam/users_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/kinesis/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/kinesis/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/aws/kms/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/kms/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/aws/lambda/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/lambda/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/mq/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/mq/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/aws/msk/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/msk/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/neptune/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/neptune/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/aws/provider/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/provider/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/rds/adapt.go (86%) rename {internal => pkg/iac}/adapters/terraform/aws/rds/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/aws/redshift/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/redshift/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/aws/s3/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/s3/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/aws/s3/bucket.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/s3/bucket_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/s3/policies.go (94%) rename {internal => pkg/iac}/adapters/terraform/aws/s3/public_access_block.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/sns/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/sns/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/aws/sqs/adapt.go (98%) rename {internal => pkg/iac}/adapters/terraform/aws/sqs/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/aws/ssm/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/ssm/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/aws/workspaces/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/aws/workspaces/adapt_test.go (97%) create mode 100644 pkg/iac/adapters/terraform/azure/adapt.go rename {internal => pkg/iac}/adapters/terraform/azure/appservice/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/appservice/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/azure/authorization/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/authorization/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/azure/compute/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/compute/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/azure/container/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/container/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/azure/database/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/database/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/azure/datafactory/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/datafactory/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/azure/datalake/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/datalake/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/azure/keyvault/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/keyvault/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/azure/monitor/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/monitor/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/azure/network/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/network/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/azure/securitycenter/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/securitycenter/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/azure/storage/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/storage/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/azure/synapse/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/azure/synapse/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/cloudstack/adapt.go (77%) rename {internal => pkg/iac}/adapters/terraform/cloudstack/compute/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/cloudstack/compute/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/digitalocean/adapt.go (66%) rename {internal => pkg/iac}/adapters/terraform/digitalocean/compute/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/digitalocean/compute/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/digitalocean/spaces/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/digitalocean/spaces/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/github/adapt.go (60%) rename {internal => pkg/iac}/adapters/terraform/github/branch_protections/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/github/branch_protections/adapt_test.go (95%) rename {internal => pkg/iac}/adapters/terraform/github/repositories/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/github/repositories/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/github/secrets/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/github/secrets/adapt_test.go (96%) create mode 100644 pkg/iac/adapters/terraform/google/adapt.go rename {internal => pkg/iac}/adapters/terraform/google/bigquery/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/bigquery/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/google/compute/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/compute/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/google/compute/disks.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/compute/disks_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/google/compute/instances.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/compute/instances_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/google/compute/metadata.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/compute/metadata_test.go (94%) rename {internal => pkg/iac}/adapters/terraform/google/compute/networks.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/compute/networks_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/google/compute/ssl.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/compute/ssl_test.go (95%) rename {internal => pkg/iac}/adapters/terraform/google/dns/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/dns/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/google/gke/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/gke/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/google/iam/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/iam/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/google/iam/convert.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/iam/folder_iam.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/iam/folders.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/iam/org_iam.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/iam/project_iam.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/iam/project_iam_test.go (95%) rename {internal => pkg/iac}/adapters/terraform/google/iam/projects.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/iam/workload_identity_pool_providers.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/kms/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/kms/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/google/sql/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/sql/adapt_test.go (99%) rename {internal => pkg/iac}/adapters/terraform/google/storage/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/google/storage/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/google/storage/iam.go (89%) rename {internal => pkg/iac}/adapters/terraform/kubernetes/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/kubernetes/adapt_test.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/computing/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/computing/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/computing/instance.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/computing/instance_test.go (95%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/computing/security_group.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/computing/security_group_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/dns/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/dns/adapt_test.go (90%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/dns/record.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/dns/record_test.go (94%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/nas/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/nas/adapt_test.go (93%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/nas/nas_instance.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/nas/nas_instance_test.go (94%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/nas/nas_security_group.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/nas/nas_security_group_test.go (95%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/network/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/network/adapt_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/network/elastic_load_balancer.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/network/load_balancer.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/network/load_balancer_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/network/router.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/network/router_test.go (95%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/network/vpn_gateway.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/network/vpn_gateway_test.go (94%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/nifcloud.go (50%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/rdb/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/rdb/adapt_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/rdb/db_instance.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/rdb/db_instance_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/rdb/db_security_group.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/rdb/db_security_group_test.go (95%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/sslcertificate/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/sslcertificate/adapt_test.go (89%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/sslcertificate/server_certificate.go (100%) rename {internal => pkg/iac}/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go (96%) rename {internal => pkg/iac}/adapters/terraform/openstack/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/openstack/adapt_test.go (98%) rename {internal => pkg/iac}/adapters/terraform/openstack/networking.go (100%) rename {internal => pkg/iac}/adapters/terraform/openstack/networking_test.go (97%) rename {internal => pkg/iac}/adapters/terraform/oracle/adapt.go (100%) rename {internal => pkg/iac}/adapters/terraform/tftestutil/testutil.go (78%) rename pkg/{ => iac}/detection/detect.go (98%) rename pkg/{ => iac}/detection/detect_test.go (100%) rename pkg/{ => iac}/detection/peek.go (100%) rename pkg/{ => iac}/detection/testdata/big.file (100%) rename pkg/{ => iac}/detection/testdata/small.file (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/bench_test.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/decode.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/decode_array.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/decode_boolean.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/decode_meta_test.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/decode_null.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/decode_number.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/decode_object.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/decode_string.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/kind.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/node.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_array.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_array_test.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_boolean.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_boolean_test.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_comment.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_complex_test.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_null.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_null_test.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_number.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_number_test.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_object.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_object_test.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_string.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_string_test.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/parse_whitespace.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/reader.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/reader_test.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/armjson/unmarshal.go (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/parser.go (79%) rename pkg/{ => iac}/scanners/azure/arm/parser/parser_test.go (77%) rename pkg/{ => iac}/scanners/azure/arm/parser/template.go (93%) rename pkg/{ => iac}/scanners/azure/arm/parser/template_test.go (93%) rename pkg/{ => iac}/scanners/azure/arm/parser/testdata/example.json (100%) rename pkg/{ => iac}/scanners/azure/arm/parser/testdata/postgres.json (100%) rename pkg/{ => iac}/scanners/azure/arm/scanner.go (95%) rename pkg/{ => iac}/scanners/azure/deployment.go (100%) rename pkg/{ => iac}/scanners/azure/expressions/lex.go (100%) rename pkg/{ => iac}/scanners/azure/expressions/node.go (75%) rename pkg/{ => iac}/scanners/azure/expressions/token_walker.go (100%) rename pkg/{ => iac}/scanners/azure/functions/add.go (100%) rename pkg/{ => iac}/scanners/azure/functions/add_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/and.go (100%) rename pkg/{ => iac}/scanners/azure/functions/and_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/array.go (100%) rename pkg/{ => iac}/scanners/azure/functions/array_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/base64.go (100%) rename pkg/{ => iac}/scanners/azure/functions/base64_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/bool.go (100%) rename pkg/{ => iac}/scanners/azure/functions/bool_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/casing.go (100%) rename pkg/{ => iac}/scanners/azure/functions/casing_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/coalesce.go (100%) rename pkg/{ => iac}/scanners/azure/functions/coalesce_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/concat.go (100%) rename pkg/{ => iac}/scanners/azure/functions/concat_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/contains.go (100%) rename pkg/{ => iac}/scanners/azure/functions/contains_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/copy_index.go (100%) rename pkg/{ => iac}/scanners/azure/functions/copy_index_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/create_array.go (100%) rename pkg/{ => iac}/scanners/azure/functions/create_array_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/create_object.go (100%) rename pkg/{ => iac}/scanners/azure/functions/create_object_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/data_uri.go (100%) rename pkg/{ => iac}/scanners/azure/functions/data_uri_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/date_time_add.go (100%) rename pkg/{ => iac}/scanners/azure/functions/date_time_epoch.go (100%) rename pkg/{ => iac}/scanners/azure/functions/date_time_epoch_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/datetime_add_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/deployment.go (100%) rename pkg/{ => iac}/scanners/azure/functions/div.go (100%) rename pkg/{ => iac}/scanners/azure/functions/div_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/empty.go (100%) rename pkg/{ => iac}/scanners/azure/functions/empty_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/ends_with.go (100%) rename pkg/{ => iac}/scanners/azure/functions/ends_with_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/equals.go (100%) rename pkg/{ => iac}/scanners/azure/functions/equals_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/false.go (100%) rename pkg/{ => iac}/scanners/azure/functions/first.go (100%) rename pkg/{ => iac}/scanners/azure/functions/first_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/float.go (100%) rename pkg/{ => iac}/scanners/azure/functions/float_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/format.go (100%) rename pkg/{ => iac}/scanners/azure/functions/format_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/functions.go (100%) rename pkg/{ => iac}/scanners/azure/functions/greater.go (100%) rename pkg/{ => iac}/scanners/azure/functions/greater_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/guid.go (100%) rename pkg/{ => iac}/scanners/azure/functions/guid_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/if.go (100%) rename pkg/{ => iac}/scanners/azure/functions/if_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/index_of.go (100%) rename pkg/{ => iac}/scanners/azure/functions/index_of_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/int.go (100%) rename pkg/{ => iac}/scanners/azure/functions/int_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/intersection.go (100%) rename pkg/{ => iac}/scanners/azure/functions/intersection_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/items.go (100%) rename pkg/{ => iac}/scanners/azure/functions/join.go (100%) rename pkg/{ => iac}/scanners/azure/functions/join_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/json.go (100%) rename pkg/{ => iac}/scanners/azure/functions/json_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/last.go (100%) rename pkg/{ => iac}/scanners/azure/functions/last_index_of.go (100%) rename pkg/{ => iac}/scanners/azure/functions/last_index_of_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/last_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/length.go (100%) rename pkg/{ => iac}/scanners/azure/functions/length_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/less.go (100%) rename pkg/{ => iac}/scanners/azure/functions/less_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/max.go (100%) rename pkg/{ => iac}/scanners/azure/functions/max_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/min.go (100%) rename pkg/{ => iac}/scanners/azure/functions/min_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/mod.go (100%) rename pkg/{ => iac}/scanners/azure/functions/mod_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/mul.go (100%) rename pkg/{ => iac}/scanners/azure/functions/mul_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/not.go (100%) rename pkg/{ => iac}/scanners/azure/functions/not_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/null.go (100%) rename pkg/{ => iac}/scanners/azure/functions/null_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/or.go (100%) rename pkg/{ => iac}/scanners/azure/functions/or_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/pad.go (100%) rename pkg/{ => iac}/scanners/azure/functions/pad_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/parameters.go (100%) rename pkg/{ => iac}/scanners/azure/functions/pick_zones.go (100%) rename pkg/{ => iac}/scanners/azure/functions/pick_zones_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/range.go (100%) rename pkg/{ => iac}/scanners/azure/functions/range_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/reference.go (100%) rename pkg/{ => iac}/scanners/azure/functions/reference_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/replace.go (100%) rename pkg/{ => iac}/scanners/azure/functions/replace_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/resource.go (100%) rename pkg/{ => iac}/scanners/azure/functions/resource_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/scope.go (100%) rename pkg/{ => iac}/scanners/azure/functions/scope_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/skip.go (100%) rename pkg/{ => iac}/scanners/azure/functions/skip_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/split.go (100%) rename pkg/{ => iac}/scanners/azure/functions/split_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/starts_with.go (100%) rename pkg/{ => iac}/scanners/azure/functions/starts_with_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/string.go (100%) rename pkg/{ => iac}/scanners/azure/functions/string_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/sub.go (100%) rename pkg/{ => iac}/scanners/azure/functions/sub_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/substring.go (100%) rename pkg/{ => iac}/scanners/azure/functions/substring_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/take.go (100%) rename pkg/{ => iac}/scanners/azure/functions/take_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/trim.go (100%) rename pkg/{ => iac}/scanners/azure/functions/trim_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/true.go (100%) rename pkg/{ => iac}/scanners/azure/functions/union.go (100%) rename pkg/{ => iac}/scanners/azure/functions/union_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/unique_string.go (100%) rename pkg/{ => iac}/scanners/azure/functions/unique_string_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/uri.go (100%) rename pkg/{ => iac}/scanners/azure/functions/uri_test.go (100%) rename pkg/{ => iac}/scanners/azure/functions/utc_now.go (100%) rename pkg/{ => iac}/scanners/azure/functions/utc_now_test.go (100%) rename pkg/{ => iac}/scanners/azure/resolver/resolver.go (52%) rename pkg/{ => iac}/scanners/azure/resolver/resolver_test.go (84%) rename pkg/{ => iac}/scanners/azure/value.go (91%) rename pkg/{ => iac}/scanners/azure/value_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/cftypes/types.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/errors.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/file_context.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/file_context_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_and.go (89%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_and_test.go (97%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_base64.go (82%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_base64_test.go (90%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_builtin.go (95%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_builtin_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_condition.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_condition_test.go (96%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_equals.go (87%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_equals_test.go (98%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_find_in_map.go (94%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_find_in_map_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_get_attr.go (93%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_get_attr_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_if.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_if_test.go (93%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_join.go (90%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_join_test.go (97%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_length.go (84%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_length_test.go (96%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_not.go (85%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_not_test.go (97%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_or.go (89%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_or_test.go (97%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_ref.go (93%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_ref_test.go (96%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_select.go (92%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_select_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_split.go (93%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_split_test.go (93%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_sub.go (96%) rename pkg/{ => iac}/scanners/cloudformation/parser/fn_sub_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/intrinsics.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/intrinsics_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/parameter.go (97%) rename pkg/{ => iac}/scanners/cloudformation/parser/parameters_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/parser.go (98%) rename pkg/{ => iac}/scanners/cloudformation/parser/parser_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/property.go (99%) rename pkg/{ => iac}/scanners/cloudformation/parser/property_conversion.go (97%) rename pkg/{ => iac}/scanners/cloudformation/parser/property_helpers.go (98%) rename pkg/{ => iac}/scanners/cloudformation/parser/property_helpers_test.go (98%) rename pkg/{ => iac}/scanners/cloudformation/parser/pseudo_parameters.go (92%) rename pkg/{ => iac}/scanners/cloudformation/parser/pseudo_parameters_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/reference.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/resource.go (100%) rename pkg/{ => iac}/scanners/cloudformation/parser/resource_test.go (95%) rename pkg/{ => iac}/scanners/cloudformation/parser/util.go (95%) rename pkg/{ => iac}/scanners/cloudformation/scanner.go (92%) rename pkg/{ => iac}/scanners/cloudformation/scanner_test.go (100%) rename pkg/{ => iac}/scanners/cloudformation/test/cf_scanning_test.go (95%) rename pkg/{ => iac}/scanners/cloudformation/test/examples/bucket/bucket.yaml (100%) rename pkg/{ => iac}/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml (100%) rename pkg/{ => iac}/scanners/cloudformation/test/examples/roles/roles.yml (100%) rename pkg/{ => iac}/scanners/dockerfile/parser/parser.go (98%) rename pkg/{ => iac}/scanners/dockerfile/parser/parser_test.go (100%) rename pkg/{ => iac}/scanners/dockerfile/scanner.go (97%) rename pkg/{ => iac}/scanners/dockerfile/scanner_test.go (100%) rename pkg/{ => iac}/scanners/helm/options.go (95%) rename pkg/{ => iac}/scanners/helm/parser/option.go (100%) rename pkg/{ => iac}/scanners/helm/parser/parser.go (97%) rename pkg/{ => iac}/scanners/helm/parser/parser_tar.go (98%) rename pkg/{ => iac}/scanners/helm/parser/parser_test.go (100%) rename pkg/{ => iac}/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz (100%) rename pkg/{ => iac}/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml (100%) rename pkg/{ => iac}/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml (100%) rename pkg/{ => iac}/scanners/helm/parser/vals.go (100%) rename pkg/{ => iac}/scanners/helm/scanner.go (95%) rename pkg/{ => iac}/scanners/helm/test/mysql/.helmignore (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/Chart.lock (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/Chart.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/README.md (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/.helmignore (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/Chart.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/README.md (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_errors.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_images.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_labels.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_names.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_storage.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_utils.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/charts/common/values.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/NOTES.txt (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/_helpers.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/extra-list.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/metrics-svc.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/networkpolicy.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/primary/configmap.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/primary/pdb.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/primary/statefulset.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/primary/svc-headless.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/primary/svc.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/role.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/rolebinding.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/secondary/configmap.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/secondary/pdb.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/secondary/statefulset.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/secondary/svc.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/secrets.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/serviceaccount.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/templates/servicemonitor.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/values.schema.json (100%) rename pkg/{ => iac}/scanners/helm/test/mysql/values.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/option_test.go (88%) rename pkg/{ => iac}/scanners/helm/test/parser_test.go (97%) rename pkg/{ => iac}/scanners/helm/test/scanner_test.go (99%) rename pkg/{ => iac}/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/testchart/templates/service.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/mysql-8.8.26.tar (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/mysql-8.8.26.tar.gz (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/mysql-8.8.26.tgz (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/nope.tgz (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/numberName/Chart.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/simmilar-templates/Chart.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/simmilar-templates/templates/deployment.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/simmilar-templates/templates/manifest.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/templated-name/Chart.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/.helmignore (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/Chart.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/templates/NOTES.txt (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/templates/_helpers.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/templates/deployment.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/templates/hpa.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/templates/ingress.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/templates/service.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/testchart/values.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-api-version/.helmignore (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-api-version/Chart.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-api-version/values.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/.helmignore (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/LICENSE (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/renovate.json (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/testdata/with-tarred-dep/values.yaml (100%) rename pkg/{ => iac}/scanners/helm/test/values/values.yaml (100%) rename pkg/{ => iac}/scanners/json/parser/parser.go (97%) rename pkg/{ => iac}/scanners/json/parser/parser_test.go (100%) rename pkg/{ => iac}/scanners/json/scanner.go (97%) rename pkg/{ => iac}/scanners/json/scanner_test.go (100%) rename pkg/{ => iac}/scanners/kubernetes/parser/manifest.go (100%) rename pkg/{ => iac}/scanners/kubernetes/parser/manifest_node.go (97%) rename pkg/{ => iac}/scanners/kubernetes/parser/parser.go (98%) rename pkg/{ => iac}/scanners/kubernetes/scanner.go (97%) rename pkg/{ => iac}/scanners/kubernetes/scanner_test.go (100%) rename pkg/{ => iac}/scanners/scanner.go (100%) rename pkg/{ => iac}/scanners/terraform/executor/executor.go (99%) rename pkg/{ => iac}/scanners/terraform/executor/executor_test.go (89%) rename pkg/{ => iac}/scanners/terraform/executor/option.go (100%) rename pkg/{ => iac}/scanners/terraform/executor/pool.go (100%) rename pkg/{ => iac}/scanners/terraform/executor/statistics.go (100%) rename pkg/{ => iac}/scanners/terraform/options.go (97%) rename pkg/{ => iac}/scanners/terraform/parser/evaluator.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/evaluator_test.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/cidr.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/collection.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/conversion.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/crypto.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/datetime.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/defaults.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/encoding.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/filesystem.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/marks.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/number.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/sensitive.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/funcs/string.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/functions.go (52%) rename pkg/{ => iac}/scanners/terraform/parser/load_blocks.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/load_blocks_test.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/load_module.go (98%) rename pkg/{ => iac}/scanners/terraform/parser/load_module_metadata.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/load_vars.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/load_vars_test.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/module_retrieval.go (63%) rename pkg/{ => iac}/scanners/terraform/parser/option.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/parser.go (97%) rename pkg/{ => iac}/scanners/terraform/parser/parser_integration_test.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/parser_test.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/resolvers/cache.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/resolvers/local.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/resolvers/options.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/resolvers/registry.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/resolvers/remote.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/resolvers/writable.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/resolvers/writable_windows.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/sort.go (100%) rename pkg/{ => iac}/scanners/terraform/parser/testdata/tfvars/terraform.tfvars (100%) rename pkg/{ => iac}/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json (100%) rename pkg/{ => iac}/scanners/terraform/scanner.go (92%) rename pkg/{ => iac}/scanners/terraform/scanner_integration_test.go (100%) rename pkg/{ => iac}/scanners/terraform/scanner_test.go (100%) rename pkg/{ => iac}/scanners/terraformplan/parser/option.go (100%) rename pkg/{ => iac}/scanners/terraformplan/parser/parser.go (100%) rename pkg/{ => iac}/scanners/terraformplan/parser/plan_file.go (100%) rename pkg/{ => iac}/scanners/terraformplan/scanner.go (94%) rename pkg/{ => iac}/scanners/terraformplan/scanner_test.go (100%) rename pkg/{ => iac}/scanners/terraformplan/test/parser_test.go (83%) rename pkg/{ => iac}/scanners/terraformplan/test/scanner_test.go (93%) rename pkg/{ => iac}/scanners/terraformplan/test/testdata/plan.json (100%) rename pkg/{ => iac}/scanners/toml/parser/parser.go (97%) rename pkg/{ => iac}/scanners/toml/parser/parser_test.go (100%) rename pkg/{ => iac}/scanners/toml/scanner.go (98%) rename pkg/{ => iac}/scanners/toml/scanner_test.go (100%) rename pkg/{ => iac}/scanners/universal/scanner.go (65%) rename pkg/{ => iac}/scanners/yaml/parser/parser.go (97%) rename pkg/{ => iac}/scanners/yaml/parser/parser_test.go (100%) rename pkg/{ => iac}/scanners/yaml/scanner.go (98%) rename pkg/{ => iac}/scanners/yaml/scanner_test.go (100%) diff --git a/internal/adapters/cloudformation/aws/adapt.go b/internal/adapters/cloudformation/aws/adapt.go deleted file mode 100644 index c92c0a276930..000000000000 --- a/internal/adapters/cloudformation/aws/adapt.go +++ /dev/null @@ -1,74 +0,0 @@ -package aws - -import ( - "github.com/aquasecurity/defsec/pkg/providers/aws" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/apigateway" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/athena" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/cloudfront" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/cloudtrail" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/cloudwatch" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/codebuild" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/config" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/documentdb" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/dynamodb" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ec2" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ecr" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ecs" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/efs" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/eks" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/elasticache" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/elasticsearch" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/elb" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/iam" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/kinesis" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/lambda" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/mq" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/msk" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/neptune" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/rds" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/redshift" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/s3" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/sam" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/sns" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/sqs" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ssm" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/workspaces" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" -) - -// Adapt adapts a Cloudformation AWS instance -func Adapt(cfFile parser.FileContext) aws.AWS { - return aws.AWS{ - APIGateway: apigateway.Adapt(cfFile), - Athena: athena.Adapt(cfFile), - Cloudfront: cloudfront.Adapt(cfFile), - CloudTrail: cloudtrail.Adapt(cfFile), - CloudWatch: cloudwatch.Adapt(cfFile), - CodeBuild: codebuild.Adapt(cfFile), - Config: config.Adapt(cfFile), - DocumentDB: documentdb.Adapt(cfFile), - DynamoDB: dynamodb.Adapt(cfFile), - EC2: ec2.Adapt(cfFile), - ECR: ecr.Adapt(cfFile), - ECS: ecs.Adapt(cfFile), - EFS: efs.Adapt(cfFile), - IAM: iam.Adapt(cfFile), - EKS: eks.Adapt(cfFile), - ElastiCache: elasticache.Adapt(cfFile), - Elasticsearch: elasticsearch.Adapt(cfFile), - ELB: elb.Adapt(cfFile), - MSK: msk.Adapt(cfFile), - MQ: mq.Adapt(cfFile), - Kinesis: kinesis.Adapt(cfFile), - Lambda: lambda.Adapt(cfFile), - Neptune: neptune.Adapt(cfFile), - RDS: rds.Adapt(cfFile), - Redshift: redshift.Adapt(cfFile), - S3: s3.Adapt(cfFile), - SAM: sam.Adapt(cfFile), - SNS: sns.Adapt(cfFile), - SQS: sqs.Adapt(cfFile), - SSM: ssm.Adapt(cfFile), - WorkSpaces: workspaces.Adapt(cfFile), - } -} diff --git a/internal/adapters/terraform/adapt.go b/internal/adapters/terraform/adapt.go deleted file mode 100644 index bbd27e88279d..000000000000 --- a/internal/adapters/terraform/adapt.go +++ /dev/null @@ -1,31 +0,0 @@ -package terraform - -import ( - "github.com/aquasecurity/defsec/pkg/state" - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure" - "github.com/aquasecurity/trivy/internal/adapters/terraform/cloudstack" - "github.com/aquasecurity/trivy/internal/adapters/terraform/digitalocean" - "github.com/aquasecurity/trivy/internal/adapters/terraform/github" - "github.com/aquasecurity/trivy/internal/adapters/terraform/google" - "github.com/aquasecurity/trivy/internal/adapters/terraform/kubernetes" - "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud" - "github.com/aquasecurity/trivy/internal/adapters/terraform/openstack" - "github.com/aquasecurity/trivy/internal/adapters/terraform/oracle" -) - -func Adapt(modules terraform.Modules) *state.State { - return &state.State{ - AWS: aws.Adapt(modules), - Azure: azure.Adapt(modules), - CloudStack: cloudstack.Adapt(modules), - DigitalOcean: digitalocean.Adapt(modules), - GitHub: github.Adapt(modules), - Google: google.Adapt(modules), - Kubernetes: kubernetes.Adapt(modules), - Nifcloud: nifcloud.Adapt(modules), - OpenStack: openstack.Adapt(modules), - Oracle: oracle.Adapt(modules), - } -} diff --git a/internal/adapters/terraform/aws/adapt.go b/internal/adapters/terraform/aws/adapt.go deleted file mode 100644 index e18ec4dc1633..000000000000 --- a/internal/adapters/terraform/aws/adapt.go +++ /dev/null @@ -1,79 +0,0 @@ -package aws - -import ( - "github.com/aquasecurity/defsec/pkg/providers/aws" - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/apigateway" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/athena" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/cloudfront" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/cloudtrail" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/cloudwatch" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/codebuild" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/config" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/documentdb" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/dynamodb" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ec2" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ecr" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ecs" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/efs" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/eks" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/elasticache" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/elasticsearch" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/elb" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/emr" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/kinesis" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/kms" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/lambda" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/mq" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/msk" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/neptune" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/provider" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/rds" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/redshift" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/s3" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/sns" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/sqs" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ssm" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/workspaces" -) - -func Adapt(modules terraform.Modules) aws.AWS { - return aws.AWS{ - Meta: aws.Meta{ - TFProviders: provider.Adapt(modules), - }, - APIGateway: apigateway.Adapt(modules), - Athena: athena.Adapt(modules), - Cloudfront: cloudfront.Adapt(modules), - CloudTrail: cloudtrail.Adapt(modules), - CloudWatch: cloudwatch.Adapt(modules), - CodeBuild: codebuild.Adapt(modules), - Config: config.Adapt(modules), - DocumentDB: documentdb.Adapt(modules), - DynamoDB: dynamodb.Adapt(modules), - EC2: ec2.Adapt(modules), - ECR: ecr.Adapt(modules), - ECS: ecs.Adapt(modules), - EFS: efs.Adapt(modules), - EKS: eks.Adapt(modules), - ElastiCache: elasticache.Adapt(modules), - Elasticsearch: elasticsearch.Adapt(modules), - ELB: elb.Adapt(modules), - EMR: emr.Adapt(modules), - IAM: iam.Adapt(modules), - Kinesis: kinesis.Adapt(modules), - KMS: kms.Adapt(modules), - Lambda: lambda.Adapt(modules), - MQ: mq.Adapt(modules), - MSK: msk.Adapt(modules), - Neptune: neptune.Adapt(modules), - RDS: rds.Adapt(modules), - Redshift: redshift.Adapt(modules), - S3: s3.Adapt(modules), - SNS: sns.Adapt(modules), - SQS: sqs.Adapt(modules), - SSM: ssm.Adapt(modules), - WorkSpaces: workspaces.Adapt(modules), - } -} diff --git a/internal/adapters/terraform/azure/adapt.go b/internal/adapters/terraform/azure/adapt.go deleted file mode 100644 index 1aa5198e1ca3..000000000000 --- a/internal/adapters/terraform/azure/adapt.go +++ /dev/null @@ -1,37 +0,0 @@ -package azure - -import ( - "github.com/aquasecurity/defsec/pkg/providers/azure" - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/appservice" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/authorization" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/compute" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/container" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/database" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/datafactory" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/datalake" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/keyvault" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/monitor" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/network" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/securitycenter" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/storage" - "github.com/aquasecurity/trivy/internal/adapters/terraform/azure/synapse" -) - -func Adapt(modules terraform.Modules) azure.Azure { - return azure.Azure{ - AppService: appservice.Adapt(modules), - Authorization: authorization.Adapt(modules), - Compute: compute.Adapt(modules), - Container: container.Adapt(modules), - Database: database.Adapt(modules), - DataFactory: datafactory.Adapt(modules), - DataLake: datalake.Adapt(modules), - KeyVault: keyvault.Adapt(modules), - Monitor: monitor.Adapt(modules), - Network: network.Adapt(modules), - SecurityCenter: securitycenter.Adapt(modules), - Storage: storage.Adapt(modules), - Synapse: synapse.Adapt(modules), - } -} diff --git a/internal/adapters/terraform/google/adapt.go b/internal/adapters/terraform/google/adapt.go deleted file mode 100644 index 1c0d8b0f1c16..000000000000 --- a/internal/adapters/terraform/google/adapt.go +++ /dev/null @@ -1,27 +0,0 @@ -package google - -import ( - "github.com/aquasecurity/defsec/pkg/providers/google" - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/internal/adapters/terraform/google/bigquery" - "github.com/aquasecurity/trivy/internal/adapters/terraform/google/compute" - "github.com/aquasecurity/trivy/internal/adapters/terraform/google/dns" - "github.com/aquasecurity/trivy/internal/adapters/terraform/google/gke" - "github.com/aquasecurity/trivy/internal/adapters/terraform/google/iam" - "github.com/aquasecurity/trivy/internal/adapters/terraform/google/kms" - "github.com/aquasecurity/trivy/internal/adapters/terraform/google/sql" - "github.com/aquasecurity/trivy/internal/adapters/terraform/google/storage" -) - -func Adapt(modules terraform.Modules) google.Google { - return google.Google{ - BigQuery: bigquery.Adapt(modules), - Compute: compute.Adapt(modules), - DNS: dns.Adapt(modules), - GKE: gke.Adapt(modules), - KMS: kms.Adapt(modules), - IAM: iam.Adapt(modules), - SQL: sql.Adapt(modules), - Storage: storage.Adapt(modules), - } -} diff --git a/pkg/fanal/analyzer/config/terraform/terraform.go b/pkg/fanal/analyzer/config/terraform/terraform.go index 96fb3bb47a07..363d35de87fe 100644 --- a/pkg/fanal/analyzer/config/terraform/terraform.go +++ b/pkg/fanal/analyzer/config/terraform/terraform.go @@ -3,9 +3,9 @@ package terraform import ( "os" - "github.com/aquasecurity/trivy/pkg/detection" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config" + "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/aquasecurity/trivy/pkg/misconf" ) diff --git a/pkg/fanal/analyzer/const.go b/pkg/fanal/analyzer/const.go index 0cd02c4290f3..93ad337ec83d 100644 --- a/pkg/fanal/analyzer/const.go +++ b/pkg/fanal/analyzer/const.go @@ -1,6 +1,8 @@ package analyzer -import "github.com/aquasecurity/trivy/pkg/detection" +import ( + "github.com/aquasecurity/trivy/pkg/iac/detection" +) type Type string diff --git a/internal/adapters/arm/adapt.go b/pkg/iac/adapters/arm/adapt.go similarity index 52% rename from internal/adapters/arm/adapt.go rename to pkg/iac/adapters/arm/adapt.go index c5d36af965bd..f8985a51d61f 100644 --- a/internal/adapters/arm/adapt.go +++ b/pkg/iac/adapters/arm/adapt.go @@ -5,20 +5,20 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/azure" "github.com/aquasecurity/defsec/pkg/state" - "github.com/aquasecurity/trivy/internal/adapters/arm/appservice" - "github.com/aquasecurity/trivy/internal/adapters/arm/authorization" - "github.com/aquasecurity/trivy/internal/adapters/arm/compute" - "github.com/aquasecurity/trivy/internal/adapters/arm/container" - "github.com/aquasecurity/trivy/internal/adapters/arm/database" - "github.com/aquasecurity/trivy/internal/adapters/arm/datafactory" - "github.com/aquasecurity/trivy/internal/adapters/arm/datalake" - "github.com/aquasecurity/trivy/internal/adapters/arm/keyvault" - "github.com/aquasecurity/trivy/internal/adapters/arm/monitor" - "github.com/aquasecurity/trivy/internal/adapters/arm/network" - "github.com/aquasecurity/trivy/internal/adapters/arm/securitycenter" - "github.com/aquasecurity/trivy/internal/adapters/arm/storage" - "github.com/aquasecurity/trivy/internal/adapters/arm/synapse" - scanner "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/appservice" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/authorization" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/compute" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/container" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/database" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/datafactory" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/datalake" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/keyvault" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/monitor" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/network" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/securitycenter" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/storage" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm/synapse" + scanner "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) // Adapt adapts an azure arm instance diff --git a/internal/adapters/arm/appservice/adapt.go b/pkg/iac/adapters/arm/appservice/adapt.go similarity index 97% rename from internal/adapters/arm/appservice/adapt.go rename to pkg/iac/adapters/arm/appservice/adapt.go index dbcf431f7b6e..78922d30f281 100644 --- a/internal/adapters/arm/appservice/adapt.go +++ b/pkg/iac/adapters/arm/appservice/adapt.go @@ -3,7 +3,7 @@ package appservice import ( "github.com/aquasecurity/defsec/pkg/providers/azure/appservice" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) appservice.AppService { diff --git a/internal/adapters/arm/authorization/adapt.go b/pkg/iac/adapters/arm/authorization/adapt.go similarity index 95% rename from internal/adapters/arm/authorization/adapt.go rename to pkg/iac/adapters/arm/authorization/adapt.go index 12b08e45ce43..aa5a2e80d642 100644 --- a/internal/adapters/arm/authorization/adapt.go +++ b/pkg/iac/adapters/arm/authorization/adapt.go @@ -2,7 +2,7 @@ package authorization import ( "github.com/aquasecurity/defsec/pkg/providers/azure/authorization" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) authorization.Authorization { diff --git a/internal/adapters/arm/compute/adapt.go b/pkg/iac/adapters/arm/compute/adapt.go similarity index 98% rename from internal/adapters/arm/compute/adapt.go rename to pkg/iac/adapters/arm/compute/adapt.go index 97940d367670..bc072571c7f3 100644 --- a/internal/adapters/arm/compute/adapt.go +++ b/pkg/iac/adapters/arm/compute/adapt.go @@ -3,7 +3,7 @@ package compute import ( "github.com/aquasecurity/defsec/pkg/providers/azure/compute" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) compute.Compute { diff --git a/internal/adapters/arm/compute/adapt_test.go b/pkg/iac/adapters/arm/compute/adapt_test.go similarity index 50% rename from internal/adapters/arm/compute/adapt_test.go rename to pkg/iac/adapters/arm/compute/adapt_test.go index 18b2f8528c83..03cd1a52d504 100644 --- a/internal/adapters/arm/compute/adapt_test.go +++ b/pkg/iac/adapters/arm/compute/adapt_test.go @@ -4,8 +4,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" - + azure2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -13,14 +12,14 @@ import ( func Test_AdaptLinuxVM(t *testing.T) { - input := azure.Deployment{ - Resources: []azure.Resource{ + input := azure2.Deployment{ + Resources: []azure2.Resource{ { - Type: azure.NewValue("Microsoft.Compute/virtualMachines", types.NewTestMetadata()), - Properties: azure.NewValue(map[string]azure.Value{ - "osProfile": azure.NewValue(map[string]azure.Value{ - "linuxConfiguration": azure.NewValue(map[string]azure.Value{ - "disablePasswordAuthentication": azure.NewValue(true, types.NewTestMetadata()), + Type: azure2.NewValue("Microsoft.Compute/virtualMachines", types.NewTestMetadata()), + Properties: azure2.NewValue(map[string]azure2.Value{ + "osProfile": azure2.NewValue(map[string]azure2.Value{ + "linuxConfiguration": azure2.NewValue(map[string]azure2.Value{ + "disablePasswordAuthentication": azure2.NewValue(true, types.NewTestMetadata()), }, types.NewTestMetadata()), }, types.NewTestMetadata()), }, types.NewTestMetadata()), @@ -40,13 +39,13 @@ func Test_AdaptLinuxVM(t *testing.T) { func Test_AdaptWindowsVM(t *testing.T) { - input := azure.Deployment{ - Resources: []azure.Resource{ + input := azure2.Deployment{ + Resources: []azure2.Resource{ { - Type: azure.NewValue("Microsoft.Compute/virtualMachines", types.NewTestMetadata()), - Properties: azure.NewValue(map[string]azure.Value{ - "osProfile": azure.NewValue(map[string]azure.Value{ - "windowsConfiguration": azure.NewValue(map[string]azure.Value{}, types.NewTestMetadata()), + Type: azure2.NewValue("Microsoft.Compute/virtualMachines", types.NewTestMetadata()), + Properties: azure2.NewValue(map[string]azure2.Value{ + "osProfile": azure2.NewValue(map[string]azure2.Value{ + "windowsConfiguration": azure2.NewValue(map[string]azure2.Value{}, types.NewTestMetadata()), }, types.NewTestMetadata()), }, types.NewTestMetadata()), }, diff --git a/internal/adapters/arm/container/adapt.go b/pkg/iac/adapters/arm/container/adapt.go similarity index 86% rename from internal/adapters/arm/container/adapt.go rename to pkg/iac/adapters/arm/container/adapt.go index 90acaf2c5539..0748411ad282 100644 --- a/internal/adapters/arm/container/adapt.go +++ b/pkg/iac/adapters/arm/container/adapt.go @@ -2,7 +2,7 @@ package container import ( "github.com/aquasecurity/defsec/pkg/providers/azure/container" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) container.Container { diff --git a/internal/adapters/arm/database/adapt.go b/pkg/iac/adapters/arm/database/adapt.go similarity index 96% rename from internal/adapters/arm/database/adapt.go rename to pkg/iac/adapters/arm/database/adapt.go index 84606f53359f..7c32428847a6 100644 --- a/internal/adapters/arm/database/adapt.go +++ b/pkg/iac/adapters/arm/database/adapt.go @@ -2,7 +2,7 @@ package database import ( "github.com/aquasecurity/defsec/pkg/providers/azure/database" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) database.Database { diff --git a/internal/adapters/arm/database/firewall.go b/pkg/iac/adapters/arm/database/firewall.go similarity index 90% rename from internal/adapters/arm/database/firewall.go rename to pkg/iac/adapters/arm/database/firewall.go index 3f4ca50f272e..c25412871a30 100644 --- a/internal/adapters/arm/database/firewall.go +++ b/pkg/iac/adapters/arm/database/firewall.go @@ -2,7 +2,7 @@ package database import ( "github.com/aquasecurity/defsec/pkg/providers/azure/database" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func addFirewallRule(resource azure.Resource) []database.FirewallRule { diff --git a/internal/adapters/arm/database/maria.go b/pkg/iac/adapters/arm/database/maria.go similarity index 95% rename from internal/adapters/arm/database/maria.go rename to pkg/iac/adapters/arm/database/maria.go index e645c3fe2230..853426ad84bd 100644 --- a/internal/adapters/arm/database/maria.go +++ b/pkg/iac/adapters/arm/database/maria.go @@ -2,7 +2,7 @@ package database import ( "github.com/aquasecurity/defsec/pkg/providers/azure/database" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func adaptMariaDBServers(deployment azure.Deployment) (mariaDbServers []database.MariaDBServer) { diff --git a/internal/adapters/arm/database/mssql.go b/pkg/iac/adapters/arm/database/mssql.go similarity index 77% rename from internal/adapters/arm/database/mssql.go rename to pkg/iac/adapters/arm/database/mssql.go index 6e598b5439b9..08b2ccdc2bec 100644 --- a/internal/adapters/arm/database/mssql.go +++ b/pkg/iac/adapters/arm/database/mssql.go @@ -3,17 +3,17 @@ package database import ( "github.com/aquasecurity/defsec/pkg/providers/azure/database" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + azure2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) -func adaptMSSQLServers(deployment azure.Deployment) (msSQlServers []database.MSSQLServer) { +func adaptMSSQLServers(deployment azure2.Deployment) (msSQlServers []database.MSSQLServer) { for _, resource := range deployment.GetResourcesByType("Microsoft.Sql/servers") { msSQlServers = append(msSQlServers, adaptMSSQLServer(resource, deployment)) } return msSQlServers } -func adaptMSSQLServer(resource azure.Resource, deployment azure.Deployment) database.MSSQLServer { +func adaptMSSQLServer(resource azure2.Resource, deployment azure2.Deployment) database.MSSQLServer { return database.MSSQLServer{ Metadata: resource.Metadata, Server: database.Server{ @@ -28,7 +28,7 @@ func adaptMSSQLServer(resource azure.Resource, deployment azure.Deployment) data } } -func adaptExtendedAuditingPolicies(resource azure.Resource, deployment azure.Deployment) (policies []database.ExtendedAuditingPolicy) { +func adaptExtendedAuditingPolicies(resource azure2.Resource, deployment azure2.Deployment) (policies []database.ExtendedAuditingPolicy) { for _, policy := range deployment.GetResourcesByType("Microsoft.Sql/servers/extendedAuditingSettings") { policies = append(policies, database.ExtendedAuditingPolicy{ @@ -40,7 +40,7 @@ func adaptExtendedAuditingPolicies(resource azure.Resource, deployment azure.Dep return policies } -func adaptSecurityAlertPolicies(resource azure.Resource, deployment azure.Deployment) (policies []database.SecurityAlertPolicy) { +func adaptSecurityAlertPolicies(resource azure2.Resource, deployment azure2.Deployment) (policies []database.SecurityAlertPolicy) { for _, policy := range deployment.GetResourcesByType("Microsoft.Sql/servers/securityAlertPolicies") { policies = append(policies, database.SecurityAlertPolicy{ Metadata: policy.Metadata, @@ -52,7 +52,7 @@ func adaptSecurityAlertPolicies(resource azure.Resource, deployment azure.Deploy return policies } -func adaptStringList(value azure.Value) []defsecTypes.StringValue { +func adaptStringList(value azure2.Value) []defsecTypes.StringValue { var list []defsecTypes.StringValue for _, v := range value.AsList() { list = append(list, v.AsStringValue("", value.Metadata)) diff --git a/internal/adapters/arm/database/postgresql.go b/pkg/iac/adapters/arm/database/postgresql.go similarity index 97% rename from internal/adapters/arm/database/postgresql.go rename to pkg/iac/adapters/arm/database/postgresql.go index d7847eae334a..b4e37e7c43c2 100644 --- a/internal/adapters/arm/database/postgresql.go +++ b/pkg/iac/adapters/arm/database/postgresql.go @@ -6,7 +6,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/azure/database" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func adaptPostgreSQLServers(deployment azure.Deployment) (databases []database.PostgreSQLServer) { diff --git a/internal/adapters/arm/datafactory/adapt.go b/pkg/iac/adapters/arm/datafactory/adapt.go similarity index 93% rename from internal/adapters/arm/datafactory/adapt.go rename to pkg/iac/adapters/arm/datafactory/adapt.go index f39bdf39a433..dfc94b537311 100644 --- a/internal/adapters/arm/datafactory/adapt.go +++ b/pkg/iac/adapters/arm/datafactory/adapt.go @@ -2,7 +2,7 @@ package datafactory import ( "github.com/aquasecurity/defsec/pkg/providers/azure/datafactory" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) datafactory.DataFactory { diff --git a/internal/adapters/arm/datalake/adapt.go b/pkg/iac/adapters/arm/datalake/adapt.go similarity index 92% rename from internal/adapters/arm/datalake/adapt.go rename to pkg/iac/adapters/arm/datalake/adapt.go index facb5d5d0be3..b11d43618f25 100644 --- a/internal/adapters/arm/datalake/adapt.go +++ b/pkg/iac/adapters/arm/datalake/adapt.go @@ -2,7 +2,7 @@ package datalake import ( "github.com/aquasecurity/defsec/pkg/providers/azure/datalake" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) datalake.DataLake { diff --git a/internal/adapters/arm/keyvault/adapt.go b/pkg/iac/adapters/arm/keyvault/adapt.go similarity index 97% rename from internal/adapters/arm/keyvault/adapt.go rename to pkg/iac/adapters/arm/keyvault/adapt.go index a64da026af3e..b1eef216cf85 100644 --- a/internal/adapters/arm/keyvault/adapt.go +++ b/pkg/iac/adapters/arm/keyvault/adapt.go @@ -2,7 +2,7 @@ package keyvault import ( "github.com/aquasecurity/defsec/pkg/providers/azure/keyvault" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) keyvault.KeyVault { diff --git a/internal/adapters/arm/monitor/adapt.go b/pkg/iac/adapters/arm/monitor/adapt.go similarity index 96% rename from internal/adapters/arm/monitor/adapt.go rename to pkg/iac/adapters/arm/monitor/adapt.go index e6a2afb341b3..271bc2ea58d8 100644 --- a/internal/adapters/arm/monitor/adapt.go +++ b/pkg/iac/adapters/arm/monitor/adapt.go @@ -3,7 +3,7 @@ package monitor import ( "github.com/aquasecurity/defsec/pkg/providers/azure/monitor" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) monitor.Monitor { diff --git a/internal/adapters/arm/network/adapt.go b/pkg/iac/adapters/arm/network/adapt.go similarity index 98% rename from internal/adapters/arm/network/adapt.go rename to pkg/iac/adapters/arm/network/adapt.go index fe92618143b9..2ed036c193d2 100644 --- a/internal/adapters/arm/network/adapt.go +++ b/pkg/iac/adapters/arm/network/adapt.go @@ -6,7 +6,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/azure/network" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) network.Network { diff --git a/internal/adapters/arm/securitycenter/adapt.go b/pkg/iac/adapters/arm/securitycenter/adapt.go similarity index 96% rename from internal/adapters/arm/securitycenter/adapt.go rename to pkg/iac/adapters/arm/securitycenter/adapt.go index ee3f73cad064..dfa44e943cf8 100644 --- a/internal/adapters/arm/securitycenter/adapt.go +++ b/pkg/iac/adapters/arm/securitycenter/adapt.go @@ -2,7 +2,7 @@ package securitycenter import ( "github.com/aquasecurity/defsec/pkg/providers/azure/securitycenter" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) securitycenter.SecurityCenter { diff --git a/internal/adapters/arm/storage/adapt.go b/pkg/iac/adapters/arm/storage/adapt.go similarity index 97% rename from internal/adapters/arm/storage/adapt.go rename to pkg/iac/adapters/arm/storage/adapt.go index 5d2387a85d3e..10d60b1f3cd0 100644 --- a/internal/adapters/arm/storage/adapt.go +++ b/pkg/iac/adapters/arm/storage/adapt.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/azure/storage" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) storage.Storage { diff --git a/internal/adapters/arm/storage/adapt_test.go b/pkg/iac/adapters/arm/storage/adapt_test.go similarity index 51% rename from internal/adapters/arm/storage/adapt_test.go rename to pkg/iac/adapters/arm/storage/adapt_test.go index 494863b607a9..4735af9cdf47 100644 --- a/internal/adapters/arm/storage/adapt_test.go +++ b/pkg/iac/adapters/arm/storage/adapt_test.go @@ -3,8 +3,7 @@ package storage import ( "testing" - "github.com/aquasecurity/trivy/pkg/scanners/azure" - + azure2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" "github.com/stretchr/testify/assert" "github.com/aquasecurity/defsec/pkg/types" @@ -14,11 +13,11 @@ import ( func Test_AdaptStorageDefaults(t *testing.T) { - input := azure.Deployment{ - Resources: []azure.Resource{ + input := azure2.Deployment{ + Resources: []azure2.Resource{ { - Type: azure.NewValue("Microsoft.Storage/storageAccounts", types.NewTestMetadata()), - Properties: azure.NewValue(map[string]azure.Value{}, types.NewTestMetadata()), + Type: azure2.NewValue("Microsoft.Storage/storageAccounts", types.NewTestMetadata()), + Properties: azure2.NewValue(map[string]azure2.Value{}, types.NewTestMetadata()), }, }, } @@ -35,14 +34,14 @@ func Test_AdaptStorageDefaults(t *testing.T) { func Test_AdaptStorage(t *testing.T) { - input := azure.Deployment{ - Resources: []azure.Resource{ + input := azure2.Deployment{ + Resources: []azure2.Resource{ { - Type: azure.NewValue("Microsoft.Storage/storageAccounts", types.NewTestMetadata()), - Name: azure.Value{}, - Properties: azure.NewValue(map[string]azure.Value{ - "minimumTlsVersion": azure.NewValue("TLS1_2", types.NewTestMetadata()), - "supportsHttpsTrafficOnly": azure.NewValue(true, types.NewTestMetadata()), + Type: azure2.NewValue("Microsoft.Storage/storageAccounts", types.NewTestMetadata()), + Name: azure2.Value{}, + Properties: azure2.NewValue(map[string]azure2.Value{ + "minimumTlsVersion": azure2.NewValue("TLS1_2", types.NewTestMetadata()), + "supportsHttpsTrafficOnly": azure2.NewValue(true, types.NewTestMetadata()), }, types.NewTestMetadata()), }, }, diff --git a/internal/adapters/arm/synapse/adapt.go b/pkg/iac/adapters/arm/synapse/adapt.go similarity index 94% rename from internal/adapters/arm/synapse/adapt.go rename to pkg/iac/adapters/arm/synapse/adapt.go index f67dbaf5bfef..e295772091bc 100644 --- a/internal/adapters/arm/synapse/adapt.go +++ b/pkg/iac/adapters/arm/synapse/adapt.go @@ -3,7 +3,7 @@ package synapse import ( "github.com/aquasecurity/defsec/pkg/providers/azure/synapse" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) synapse.Synapse { diff --git a/internal/adapters/cloudformation/adapt.go b/pkg/iac/adapters/cloudformation/adapt.go similarity index 61% rename from internal/adapters/cloudformation/adapt.go rename to pkg/iac/adapters/cloudformation/adapt.go index 7fc5fe2c81ee..8b7eb58e4933 100644 --- a/internal/adapters/cloudformation/adapt.go +++ b/pkg/iac/adapters/cloudformation/adapt.go @@ -2,8 +2,8 @@ package cloudformation import ( "github.com/aquasecurity/defsec/pkg/state" - "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts the Cloudformation instance diff --git a/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go b/pkg/iac/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go similarity index 80% rename from internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go rename to pkg/iac/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go index dac3695f1270..3afc0b146756 100644 --- a/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go +++ b/pkg/iac/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go @@ -2,7 +2,7 @@ package accessanalyzer import ( "github.com/aquasecurity/defsec/pkg/providers/aws/accessanalyzer" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an AccessAnalyzer instance diff --git a/internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go b/pkg/iac/adapters/cloudformation/aws/accessanalyzer/analyzer.go similarity index 89% rename from internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go rename to pkg/iac/adapters/cloudformation/aws/accessanalyzer/analyzer.go index 50cdd9c920b0..c592f1348ad7 100644 --- a/internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go +++ b/pkg/iac/adapters/cloudformation/aws/accessanalyzer/analyzer.go @@ -3,7 +3,7 @@ package accessanalyzer import ( "github.com/aquasecurity/defsec/pkg/providers/aws/accessanalyzer" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getAccessAnalyzer(ctx parser.FileContext) (analyzers []accessanalyzer.Analyzer) { diff --git a/pkg/iac/adapters/cloudformation/aws/adapt.go b/pkg/iac/adapters/cloudformation/aws/adapt.go new file mode 100644 index 000000000000..18f36e2fa9ed --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/adapt.go @@ -0,0 +1,74 @@ +package aws + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/apigateway" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/athena" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/cloudfront" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/cloudtrail" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/cloudwatch" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/codebuild" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/config" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/documentdb" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/dynamodb" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/ec2" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/ecr" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/ecs" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/efs" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/eks" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/elasticsearch" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/elb" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/iam" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/kinesis" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/lambda" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/mq" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/msk" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/neptune" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/rds" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/redshift" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/s3" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/sam" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/sns" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/sqs" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/ssm" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/aws/workspaces" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt adapts a Cloudformation AWS instance +func Adapt(cfFile parser.FileContext) aws.AWS { + return aws.AWS{ + APIGateway: apigateway.Adapt(cfFile), + Athena: athena.Adapt(cfFile), + Cloudfront: cloudfront.Adapt(cfFile), + CloudTrail: cloudtrail.Adapt(cfFile), + CloudWatch: cloudwatch.Adapt(cfFile), + CodeBuild: codebuild.Adapt(cfFile), + Config: config.Adapt(cfFile), + DocumentDB: documentdb.Adapt(cfFile), + DynamoDB: dynamodb.Adapt(cfFile), + EC2: ec2.Adapt(cfFile), + ECR: ecr.Adapt(cfFile), + ECS: ecs.Adapt(cfFile), + EFS: efs.Adapt(cfFile), + IAM: iam.Adapt(cfFile), + EKS: eks.Adapt(cfFile), + ElastiCache: elasticache.Adapt(cfFile), + Elasticsearch: elasticsearch.Adapt(cfFile), + ELB: elb.Adapt(cfFile), + MSK: msk.Adapt(cfFile), + MQ: mq.Adapt(cfFile), + Kinesis: kinesis.Adapt(cfFile), + Lambda: lambda.Adapt(cfFile), + Neptune: neptune.Adapt(cfFile), + RDS: rds.Adapt(cfFile), + Redshift: redshift.Adapt(cfFile), + S3: s3.Adapt(cfFile), + SAM: sam.Adapt(cfFile), + SNS: sns.Adapt(cfFile), + SQS: sqs.Adapt(cfFile), + SSM: ssm.Adapt(cfFile), + WorkSpaces: workspaces.Adapt(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/apigateway/apigateway.go b/pkg/iac/adapters/cloudformation/aws/apigateway/apigateway.go similarity index 87% rename from internal/adapters/cloudformation/aws/apigateway/apigateway.go rename to pkg/iac/adapters/cloudformation/aws/apigateway/apigateway.go index 0004eff9a096..fbe540024123 100644 --- a/internal/adapters/cloudformation/aws/apigateway/apigateway.go +++ b/pkg/iac/adapters/cloudformation/aws/apigateway/apigateway.go @@ -4,7 +4,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway" v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an APIGateway instance diff --git a/internal/adapters/cloudformation/aws/apigateway/stage.go b/pkg/iac/adapters/cloudformation/aws/apigateway/stage.go similarity index 85% rename from internal/adapters/cloudformation/aws/apigateway/stage.go rename to pkg/iac/adapters/cloudformation/aws/apigateway/stage.go index 550df03de608..7c8360899a4b 100644 --- a/internal/adapters/cloudformation/aws/apigateway/stage.go +++ b/pkg/iac/adapters/cloudformation/aws/apigateway/stage.go @@ -3,10 +3,10 @@ package apigateway import ( v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getApis(cfFile parser.FileContext) (apis []v2.API) { +func getApis(cfFile parser2.FileContext) (apis []v2.API) { apiResources := cfFile.GetResourcesByType("AWS::ApiGatewayV2::Api") for _, apiRes := range apiResources { @@ -22,7 +22,7 @@ func getApis(cfFile parser.FileContext) (apis []v2.API) { return apis } -func getStages(apiId string, cfFile parser.FileContext) []v2.Stage { +func getStages(apiId string, cfFile parser2.FileContext) []v2.Stage { var apiStages []v2.Stage stageResources := cfFile.GetResourcesByType("AWS::ApiGatewayV2::Stage") @@ -43,7 +43,7 @@ func getStages(apiId string, cfFile parser.FileContext) []v2.Stage { return apiStages } -func getAccessLogging(r *parser.Resource) v2.AccessLogging { +func getAccessLogging(r *parser2.Resource) v2.AccessLogging { loggingProp := r.GetProperty("AccessLogSettings") if loggingProp.IsNil() { diff --git a/internal/adapters/cloudformation/aws/athena/athena.go b/pkg/iac/adapters/cloudformation/aws/athena/athena.go similarity index 78% rename from internal/adapters/cloudformation/aws/athena/athena.go rename to pkg/iac/adapters/cloudformation/aws/athena/athena.go index 37f42512567c..14c8254e01a9 100644 --- a/internal/adapters/cloudformation/aws/athena/athena.go +++ b/pkg/iac/adapters/cloudformation/aws/athena/athena.go @@ -2,7 +2,7 @@ package athena import ( "github.com/aquasecurity/defsec/pkg/providers/aws/athena" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an Athena instance diff --git a/internal/adapters/cloudformation/aws/athena/workgroup.go b/pkg/iac/adapters/cloudformation/aws/athena/workgroup.go similarity index 91% rename from internal/adapters/cloudformation/aws/athena/workgroup.go rename to pkg/iac/adapters/cloudformation/aws/athena/workgroup.go index fa4f2219b82f..b62eef37566a 100644 --- a/internal/adapters/cloudformation/aws/athena/workgroup.go +++ b/pkg/iac/adapters/cloudformation/aws/athena/workgroup.go @@ -2,7 +2,7 @@ package athena import ( "github.com/aquasecurity/defsec/pkg/providers/aws/athena" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getWorkGroups(cfFile parser.FileContext) []athena.Workgroup { diff --git a/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go b/pkg/iac/adapters/cloudformation/aws/cloudfront/cloudfront.go similarity index 79% rename from internal/adapters/cloudformation/aws/cloudfront/cloudfront.go rename to pkg/iac/adapters/cloudformation/aws/cloudfront/cloudfront.go index 6f647ccdff7e..dad0b1b6ed3b 100644 --- a/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudfront/cloudfront.go @@ -2,7 +2,7 @@ package cloudfront import ( "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a CloudFront instance diff --git a/internal/adapters/cloudformation/aws/cloudfront/distribution.go b/pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go similarity index 86% rename from internal/adapters/cloudformation/aws/cloudfront/distribution.go rename to pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go index e1b23c80794f..c0687d4cffb0 100644 --- a/internal/adapters/cloudformation/aws/cloudfront/distribution.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go @@ -3,10 +3,10 @@ package cloudfront import ( "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getDistributions(ctx parser.FileContext) (distributions []cloudfront.Distribution) { +func getDistributions(ctx parser2.FileContext) (distributions []cloudfront.Distribution) { distributionResources := ctx.GetResourcesByType("AWS::CloudFront::Distribution") @@ -32,7 +32,7 @@ func getDistributions(ctx parser.FileContext) (distributions []cloudfront.Distri return distributions } -func getDefaultCacheBehaviour(r *parser.Resource) cloudfront.CacheBehaviour { +func getDefaultCacheBehaviour(r *parser2.Resource) cloudfront.CacheBehaviour { defaultCache := r.GetProperty("DistributionConfig.DefaultCacheBehavior") if defaultCache.IsNil() { return cloudfront.CacheBehaviour{ diff --git a/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go b/pkg/iac/adapters/cloudformation/aws/cloudtrail/cloudtrail.go similarity index 78% rename from internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go rename to pkg/iac/adapters/cloudformation/aws/cloudtrail/cloudtrail.go index 7521eeaf57c9..982c68ca5ed4 100644 --- a/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudtrail/cloudtrail.go @@ -2,7 +2,7 @@ package cloudtrail import ( "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a CloudTrail instance diff --git a/internal/adapters/cloudformation/aws/cloudtrail/trails.go b/pkg/iac/adapters/cloudformation/aws/cloudtrail/trails.go similarity index 92% rename from internal/adapters/cloudformation/aws/cloudtrail/trails.go rename to pkg/iac/adapters/cloudformation/aws/cloudtrail/trails.go index ebaf6250cda4..60c8f4417187 100644 --- a/internal/adapters/cloudformation/aws/cloudtrail/trails.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudtrail/trails.go @@ -2,7 +2,7 @@ package cloudtrail import ( "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getCloudTrails(ctx parser.FileContext) (trails []cloudtrail.Trail) { diff --git a/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go b/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go similarity index 79% rename from internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go rename to pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go index 6ad600648a0d..dcc7074008fd 100644 --- a/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go @@ -2,7 +2,7 @@ package cloudwatch import ( "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a Cloudwatch instance diff --git a/internal/adapters/cloudformation/aws/cloudwatch/log_group.go b/pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go similarity index 90% rename from internal/adapters/cloudformation/aws/cloudwatch/log_group.go rename to pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go index bffb0fa361ef..0f513a314006 100644 --- a/internal/adapters/cloudformation/aws/cloudwatch/log_group.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go @@ -3,7 +3,7 @@ package cloudwatch import ( "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getLogGroups(ctx parser.FileContext) (logGroups []cloudwatch.LogGroup) { diff --git a/internal/adapters/cloudformation/aws/codebuild/codebuild.go b/pkg/iac/adapters/cloudformation/aws/codebuild/codebuild.go similarity index 78% rename from internal/adapters/cloudformation/aws/codebuild/codebuild.go rename to pkg/iac/adapters/cloudformation/aws/codebuild/codebuild.go index 7874f42292b8..1a36b5d1406f 100644 --- a/internal/adapters/cloudformation/aws/codebuild/codebuild.go +++ b/pkg/iac/adapters/cloudformation/aws/codebuild/codebuild.go @@ -2,7 +2,7 @@ package codebuild import ( "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a CodeBuild instance diff --git a/internal/adapters/cloudformation/aws/codebuild/project.go b/pkg/iac/adapters/cloudformation/aws/codebuild/project.go similarity index 83% rename from internal/adapters/cloudformation/aws/codebuild/project.go rename to pkg/iac/adapters/cloudformation/aws/codebuild/project.go index 7d72c4760c4e..431a369218b1 100644 --- a/internal/adapters/cloudformation/aws/codebuild/project.go +++ b/pkg/iac/adapters/cloudformation/aws/codebuild/project.go @@ -3,10 +3,10 @@ package codebuild import ( "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getProjects(ctx parser.FileContext) (projects []codebuild.Project) { +func getProjects(ctx parser2.FileContext) (projects []codebuild.Project) { projectResources := ctx.GetResourcesByType("AWS::CodeBuild::Project") @@ -23,7 +23,7 @@ func getProjects(ctx parser.FileContext) (projects []codebuild.Project) { return projects } -func getSecondaryArtifactSettings(r *parser.Resource) (secondaryArtifacts []codebuild.ArtifactSettings) { +func getSecondaryArtifactSettings(r *parser2.Resource) (secondaryArtifacts []codebuild.ArtifactSettings) { secondaryArtifactsList := r.GetProperty("SecondaryArtifacts") if secondaryArtifactsList.IsNil() || !secondaryArtifactsList.IsList() { return @@ -44,7 +44,7 @@ func getSecondaryArtifactSettings(r *parser.Resource) (secondaryArtifacts []code return secondaryArtifacts } -func getArtifactSettings(r *parser.Resource) codebuild.ArtifactSettings { +func getArtifactSettings(r *parser2.Resource) codebuild.ArtifactSettings { settings := codebuild.ArtifactSettings{ Metadata: r.Metadata(), diff --git a/internal/adapters/cloudformation/aws/config/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/config/adapt_test.go similarity index 95% rename from internal/adapters/cloudformation/aws/config/adapt_test.go rename to pkg/iac/adapters/cloudformation/aws/config/adapt_test.go index 05972445eb12..fdf878d98a26 100644 --- a/internal/adapters/cloudformation/aws/config/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/config/adapt_test.go @@ -6,9 +6,9 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/config" "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/cloudformation/aws/config/aggregator.go b/pkg/iac/adapters/cloudformation/aws/config/aggregator.go similarity index 80% rename from internal/adapters/cloudformation/aws/config/aggregator.go rename to pkg/iac/adapters/cloudformation/aws/config/aggregator.go index cecea4af59f1..ee29524c22dc 100644 --- a/internal/adapters/cloudformation/aws/config/aggregator.go +++ b/pkg/iac/adapters/cloudformation/aws/config/aggregator.go @@ -3,10 +3,10 @@ package config import ( "github.com/aquasecurity/defsec/pkg/providers/aws/config" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getConfigurationAggregator(ctx parser.FileContext) config.ConfigurationAggregrator { +func getConfigurationAggregator(ctx parser2.FileContext) config.ConfigurationAggregrator { aggregator := config.ConfigurationAggregrator{ Metadata: defsecTypes.NewUnmanagedMetadata(), @@ -25,7 +25,7 @@ func getConfigurationAggregator(ctx parser.FileContext) config.ConfigurationAggr } } -func isSourcingAllRegions(r *parser.Resource) defsecTypes.BoolValue { +func isSourcingAllRegions(r *parser2.Resource) defsecTypes.BoolValue { accountProp := r.GetProperty("AccountAggregationSources") if accountProp.IsNotNil() && accountProp.IsList() { diff --git a/internal/adapters/cloudformation/aws/config/config.go b/pkg/iac/adapters/cloudformation/aws/config/config.go similarity index 79% rename from internal/adapters/cloudformation/aws/config/config.go rename to pkg/iac/adapters/cloudformation/aws/config/config.go index 26aecd6568ec..164b6ed22f87 100644 --- a/internal/adapters/cloudformation/aws/config/config.go +++ b/pkg/iac/adapters/cloudformation/aws/config/config.go @@ -2,7 +2,7 @@ package config import ( "github.com/aquasecurity/defsec/pkg/providers/aws/config" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a configurationaggregator instance diff --git a/internal/adapters/cloudformation/aws/documentdb/cluster.go b/pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go similarity index 85% rename from internal/adapters/cloudformation/aws/documentdb/cluster.go rename to pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go index 8f19241e68da..762e90e54e47 100644 --- a/internal/adapters/cloudformation/aws/documentdb/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go @@ -3,10 +3,10 @@ package documentdb import ( "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getClusters(ctx parser.FileContext) (clusters []documentdb.Cluster) { +func getClusters(ctx parser2.FileContext) (clusters []documentdb.Cluster) { clusterResources := ctx.GetResourcesByType("AWS::DocDB::DBCluster") @@ -28,7 +28,7 @@ func getClusters(ctx parser.FileContext) (clusters []documentdb.Cluster) { return clusters } -func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser.FileContext) { +func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser2.FileContext) { instanceResources := ctx.GetResourcesByType("AWS::DocDB::DBInstance") @@ -43,7 +43,7 @@ func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser.FileContex } } -func getLogExports(r *parser.Resource) (logExports []types.StringValue) { +func getLogExports(r *parser2.Resource) (logExports []types.StringValue) { exportsList := r.GetProperty("EnableCloudwatchLogsExports") diff --git a/internal/adapters/cloudformation/aws/documentdb/documentdb.go b/pkg/iac/adapters/cloudformation/aws/documentdb/documentdb.go similarity index 78% rename from internal/adapters/cloudformation/aws/documentdb/documentdb.go rename to pkg/iac/adapters/cloudformation/aws/documentdb/documentdb.go index 220131b0473a..23e5c7087d9a 100644 --- a/internal/adapters/cloudformation/aws/documentdb/documentdb.go +++ b/pkg/iac/adapters/cloudformation/aws/documentdb/documentdb.go @@ -2,7 +2,7 @@ package documentdb import ( "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adaps a documentDB instance diff --git a/internal/adapters/cloudformation/aws/dynamodb/cluster.go b/pkg/iac/adapters/cloudformation/aws/dynamodb/cluster.go similarity index 93% rename from internal/adapters/cloudformation/aws/dynamodb/cluster.go rename to pkg/iac/adapters/cloudformation/aws/dynamodb/cluster.go index d2a38b59c39b..76e28c971939 100644 --- a/internal/adapters/cloudformation/aws/dynamodb/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/dynamodb/cluster.go @@ -3,7 +3,7 @@ package dynamodb import ( "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getClusters(file parser.FileContext) (clusters []dynamodb.DAXCluster) { diff --git a/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go b/pkg/iac/adapters/cloudformation/aws/dynamodb/dynamodb.go similarity index 77% rename from internal/adapters/cloudformation/aws/dynamodb/dynamodb.go rename to pkg/iac/adapters/cloudformation/aws/dynamodb/dynamodb.go index 20cf041f6f0c..6129be5b0cb6 100644 --- a/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go +++ b/pkg/iac/adapters/cloudformation/aws/dynamodb/dynamodb.go @@ -2,7 +2,7 @@ package dynamodb import ( "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a dynamodb instance diff --git a/internal/adapters/cloudformation/aws/ec2/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go similarity index 98% rename from internal/adapters/cloudformation/aws/ec2/adapt_test.go rename to pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go index 3f89796f9601..451915f873a8 100644 --- a/internal/adapters/cloudformation/aws/ec2/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go @@ -6,9 +6,9 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/cloudformation/aws/ec2/ec2.go b/pkg/iac/adapters/cloudformation/aws/ec2/ec2.go similarity index 88% rename from internal/adapters/cloudformation/aws/ec2/ec2.go rename to pkg/iac/adapters/cloudformation/aws/ec2/ec2.go index 2e01a57079a6..38157fbd3ea4 100644 --- a/internal/adapters/cloudformation/aws/ec2/ec2.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/ec2.go @@ -2,7 +2,7 @@ package ec2 import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an EC2 instance diff --git a/internal/adapters/cloudformation/aws/ec2/instance.go b/pkg/iac/adapters/cloudformation/aws/ec2/instance.go similarity index 85% rename from internal/adapters/cloudformation/aws/ec2/instance.go rename to pkg/iac/adapters/cloudformation/aws/ec2/instance.go index 7bb3637e9bfe..93d9e2eb3783 100644 --- a/internal/adapters/cloudformation/aws/ec2/instance.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/instance.go @@ -3,10 +3,10 @@ package ec2 import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getInstances(ctx parser.FileContext) (instances []ec2.Instance) { +func getInstances(ctx parser2.FileContext) (instances []ec2.Instance) { instanceResources := ctx.GetResourcesByType("AWS::EC2::Instance") for _, r := range instanceResources { @@ -48,7 +48,7 @@ func getInstances(ctx parser.FileContext) (instances []ec2.Instance) { return instances } -func findRelatedLaunchTemplate(fctx parser.FileContext, r *parser.Resource) (ec2.LaunchTemplate, bool) { +func findRelatedLaunchTemplate(fctx parser2.FileContext, r *parser2.Resource) (ec2.LaunchTemplate, bool) { launchTemplateRef := r.GetProperty("LaunchTemplate.LaunchTemplateName") if launchTemplateRef.IsString() { res := findLaunchTemplateByName(fctx, launchTemplateRef) @@ -69,7 +69,7 @@ func findRelatedLaunchTemplate(fctx parser.FileContext, r *parser.Resource) (ec2 return adaptLaunchTemplate(resource), true } -func findLaunchTemplateByName(fctx parser.FileContext, prop *parser.Property) *parser.Resource { +func findLaunchTemplateByName(fctx parser2.FileContext, prop *parser2.Property) *parser2.Resource { for _, res := range fctx.GetResourcesByType("AWS::EC2::LaunchTemplate") { templateName := res.GetProperty("LaunchTemplateName") if templateName.IsNotString() { @@ -84,7 +84,7 @@ func findLaunchTemplateByName(fctx parser.FileContext, prop *parser.Property) *p return nil } -func getBlockDevices(r *parser.Resource) []*ec2.BlockDevice { +func getBlockDevices(r *parser2.Resource) []*ec2.BlockDevice { var blockDevices []*ec2.BlockDevice devicesProp := r.GetProperty("BlockDeviceMappings") diff --git a/internal/adapters/cloudformation/aws/ec2/launch_configuration.go b/pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go similarity index 95% rename from internal/adapters/cloudformation/aws/ec2/launch_configuration.go rename to pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go index d9a9137d2deb..21051ad1e7e2 100644 --- a/internal/adapters/cloudformation/aws/ec2/launch_configuration.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go @@ -3,7 +3,7 @@ package ec2 import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getLaunchConfigurations(file parser.FileContext) (launchConfigurations []ec2.LaunchConfiguration) { diff --git a/internal/adapters/cloudformation/aws/ec2/launch_template.go b/pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go similarity index 86% rename from internal/adapters/cloudformation/aws/ec2/launch_template.go rename to pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go index 08c899576f68..5069107c17a3 100644 --- a/internal/adapters/cloudformation/aws/ec2/launch_template.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go @@ -3,10 +3,10 @@ package ec2 import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getLaunchTemplates(file parser.FileContext) (templates []ec2.LaunchTemplate) { +func getLaunchTemplates(file parser2.FileContext) (templates []ec2.LaunchTemplate) { launchConfigResources := file.GetResourcesByType("AWS::EC2::LaunchTemplate") for _, r := range launchConfigResources { @@ -15,7 +15,7 @@ func getLaunchTemplates(file parser.FileContext) (templates []ec2.LaunchTemplate return templates } -func adaptLaunchTemplate(r *parser.Resource) ec2.LaunchTemplate { +func adaptLaunchTemplate(r *parser2.Resource) ec2.LaunchTemplate { launchTemplate := ec2.LaunchTemplate{ Metadata: r.Metadata(), Name: r.GetStringProperty("LaunchTemplateName", ""), diff --git a/internal/adapters/cloudformation/aws/ec2/nacl.go b/pkg/iac/adapters/cloudformation/aws/ec2/nacl.go similarity index 97% rename from internal/adapters/cloudformation/aws/ec2/nacl.go rename to pkg/iac/adapters/cloudformation/aws/ec2/nacl.go index c6add109bb7c..6c6de06e9559 100644 --- a/internal/adapters/cloudformation/aws/ec2/nacl.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/nacl.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getNetworkACLs(ctx parser.FileContext) (acls []ec2.NetworkACL) { diff --git a/internal/adapters/cloudformation/aws/ec2/security_group.go b/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go similarity index 87% rename from internal/adapters/cloudformation/aws/ec2/security_group.go rename to pkg/iac/adapters/cloudformation/aws/ec2/security_group.go index 7989c62fab2d..c6447f38a3fa 100644 --- a/internal/adapters/cloudformation/aws/ec2/security_group.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go @@ -3,10 +3,10 @@ package ec2 import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getSecurityGroups(ctx parser.FileContext) (groups []ec2.SecurityGroup) { +func getSecurityGroups(ctx parser2.FileContext) (groups []ec2.SecurityGroup) { for _, r := range ctx.GetResourcesByType("AWS::EC2::SecurityGroup") { group := ec2.SecurityGroup{ Metadata: r.Metadata(), @@ -22,7 +22,7 @@ func getSecurityGroups(ctx parser.FileContext) (groups []ec2.SecurityGroup) { return groups } -func getIngressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { +func getIngressRules(r *parser2.Resource) (sgRules []ec2.SecurityGroupRule) { if ingressProp := r.GetProperty("SecurityGroupIngress"); ingressProp.IsList() { for _, ingress := range ingressProp.AsList() { rule := ec2.SecurityGroupRule{ @@ -45,7 +45,7 @@ func getIngressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { return sgRules } -func getEgressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { +func getEgressRules(r *parser2.Resource) (sgRules []ec2.SecurityGroupRule) { if egressProp := r.GetProperty("SecurityGroupEgress"); egressProp.IsList() { for _, egress := range egressProp.AsList() { rule := ec2.SecurityGroupRule{ diff --git a/internal/adapters/cloudformation/aws/ec2/subnet.go b/pkg/iac/adapters/cloudformation/aws/ec2/subnet.go similarity index 85% rename from internal/adapters/cloudformation/aws/ec2/subnet.go rename to pkg/iac/adapters/cloudformation/aws/ec2/subnet.go index 364a904a3d24..be75af836593 100644 --- a/internal/adapters/cloudformation/aws/ec2/subnet.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/subnet.go @@ -2,7 +2,7 @@ package ec2 import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getSubnets(ctx parser.FileContext) (subnets []ec2.Subnet) { diff --git a/internal/adapters/cloudformation/aws/ec2/volume.go b/pkg/iac/adapters/cloudformation/aws/ec2/volume.go similarity index 87% rename from internal/adapters/cloudformation/aws/ec2/volume.go rename to pkg/iac/adapters/cloudformation/aws/ec2/volume.go index d45913f01e22..b1e48835a0a6 100644 --- a/internal/adapters/cloudformation/aws/ec2/volume.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/volume.go @@ -2,7 +2,7 @@ package ec2 import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getVolumes(ctx parser.FileContext) (volumes []ec2.Volume) { diff --git a/internal/adapters/cloudformation/aws/ecr/ecr.go b/pkg/iac/adapters/cloudformation/aws/ecr/ecr.go similarity index 75% rename from internal/adapters/cloudformation/aws/ecr/ecr.go rename to pkg/iac/adapters/cloudformation/aws/ecr/ecr.go index 841911d82280..80119315af78 100644 --- a/internal/adapters/cloudformation/aws/ecr/ecr.go +++ b/pkg/iac/adapters/cloudformation/aws/ecr/ecr.go @@ -2,7 +2,7 @@ package ecr import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an ECR instance diff --git a/internal/adapters/cloudformation/aws/ecr/repository.go b/pkg/iac/adapters/cloudformation/aws/ecr/repository.go similarity index 89% rename from internal/adapters/cloudformation/aws/ecr/repository.go rename to pkg/iac/adapters/cloudformation/aws/ecr/repository.go index 029feab3f877..7e4a3710c70d 100644 --- a/internal/adapters/cloudformation/aws/ecr/repository.go +++ b/pkg/iac/adapters/cloudformation/aws/ecr/repository.go @@ -3,15 +3,15 @@ package ecr import ( "fmt" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -func getRepositories(ctx parser.FileContext) (repositories []ecr.Repository) { +func getRepositories(ctx parser2.FileContext) (repositories []ecr.Repository) { repositoryResources := ctx.GetResourcesByType("AWS::ECR::Repository") @@ -57,7 +57,7 @@ func getRepositories(ctx parser.FileContext) (repositories []ecr.Repository) { return repositories } -func getPolicy(r *parser.Resource) (*iam.Policy, error) { +func getPolicy(r *parser2.Resource) (*iam.Policy, error) { policyProp := r.GetProperty("RepositoryPolicyText") if policyProp.IsNil() { return nil, fmt.Errorf("missing policy") @@ -79,7 +79,7 @@ func getPolicy(r *parser.Resource) (*iam.Policy, error) { }, nil } -func hasImmutableImageTags(r *parser.Resource) defsecTypes.BoolValue { +func hasImmutableImageTags(r *parser2.Resource) defsecTypes.BoolValue { mutabilityProp := r.GetProperty("ImageTagMutability") if mutabilityProp.IsNil() { return defsecTypes.BoolDefault(false, r.Metadata()) diff --git a/internal/adapters/cloudformation/aws/ecs/cluster.go b/pkg/iac/adapters/cloudformation/aws/ecs/cluster.go similarity index 79% rename from internal/adapters/cloudformation/aws/ecs/cluster.go rename to pkg/iac/adapters/cloudformation/aws/ecs/cluster.go index 1a512eee4226..c2c584ddb92f 100644 --- a/internal/adapters/cloudformation/aws/ecs/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/ecs/cluster.go @@ -3,10 +3,10 @@ package ecs import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getClusters(ctx parser.FileContext) (clusters []ecs.Cluster) { +func getClusters(ctx parser2.FileContext) (clusters []ecs.Cluster) { clusterResources := ctx.GetResourcesByType("AWS::ECS::Cluster") @@ -24,7 +24,7 @@ func getClusters(ctx parser.FileContext) (clusters []ecs.Cluster) { return clusters } -func getClusterSettings(r *parser.Resource) ecs.ClusterSettings { +func getClusterSettings(r *parser2.Resource) ecs.ClusterSettings { clusterSettings := ecs.ClusterSettings{ Metadata: r.Metadata(), @@ -45,7 +45,7 @@ func getClusterSettings(r *parser.Resource) ecs.ClusterSettings { return clusterSettings } -func checkProperty(setting *parser.Property, clusterSettings *ecs.ClusterSettings) { +func checkProperty(setting *parser2.Property, clusterSettings *ecs.ClusterSettings) { settingMap := setting.AsMap() name := settingMap["Name"] if name.IsNotNil() && name.EqualTo("containerInsights") { diff --git a/internal/adapters/cloudformation/aws/ecs/ecs.go b/pkg/iac/adapters/cloudformation/aws/ecs/ecs.go similarity index 78% rename from internal/adapters/cloudformation/aws/ecs/ecs.go rename to pkg/iac/adapters/cloudformation/aws/ecs/ecs.go index 350e2e0dd61d..a41930f9e439 100644 --- a/internal/adapters/cloudformation/aws/ecs/ecs.go +++ b/pkg/iac/adapters/cloudformation/aws/ecs/ecs.go @@ -2,7 +2,7 @@ package ecs import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an ECS instance diff --git a/internal/adapters/cloudformation/aws/ecs/task_definition.go b/pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go similarity index 88% rename from internal/adapters/cloudformation/aws/ecs/task_definition.go rename to pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go index e1855353a228..fbee3e51c284 100644 --- a/internal/adapters/cloudformation/aws/ecs/task_definition.go +++ b/pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go @@ -3,10 +3,10 @@ package ecs import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getTaskDefinitions(ctx parser.FileContext) (taskDefinitions []ecs.TaskDefinition) { +func getTaskDefinitions(ctx parser2.FileContext) (taskDefinitions []ecs.TaskDefinition) { taskDefResources := ctx.GetResourcesByType("AWS::ECS::TaskDefinition") @@ -23,7 +23,7 @@ func getTaskDefinitions(ctx parser.FileContext) (taskDefinitions []ecs.TaskDefin return taskDefinitions } -func getContainerDefinitions(r *parser.Resource) ([]ecs.ContainerDefinition, error) { +func getContainerDefinitions(r *parser2.Resource) ([]ecs.ContainerDefinition, error) { var definitions []ecs.ContainerDefinition containerDefs := r.GetProperty("ContainerDefinitions") if containerDefs.IsNil() || containerDefs.IsNotList() { @@ -60,7 +60,7 @@ func getContainerDefinitions(r *parser.Resource) ([]ecs.ContainerDefinition, err return definitions, nil } -func getVolumes(r *parser.Resource) (volumes []ecs.Volume) { +func getVolumes(r *parser2.Resource) (volumes []ecs.Volume) { volumesList := r.GetProperty("Volumes") if volumesList.IsNil() || volumesList.IsNotList() { @@ -76,7 +76,7 @@ func getVolumes(r *parser.Resource) (volumes []ecs.Volume) { }, } transitProp := v.GetProperty("EFSVolumeConfiguration.TransitEncryption") - if transitProp.IsNotNil() && transitProp.EqualTo("enabled", parser.IgnoreCase) { + if transitProp.IsNotNil() && transitProp.EqualTo("enabled", parser2.IgnoreCase) { volume.EFSVolumeConfiguration.TransitEncryptionEnabled = types.Bool(true, transitProp.Metadata()) } diff --git a/internal/adapters/cloudformation/aws/efs/efs.go b/pkg/iac/adapters/cloudformation/aws/efs/efs.go similarity index 75% rename from internal/adapters/cloudformation/aws/efs/efs.go rename to pkg/iac/adapters/cloudformation/aws/efs/efs.go index e7b02370a646..78026eacdf6f 100644 --- a/internal/adapters/cloudformation/aws/efs/efs.go +++ b/pkg/iac/adapters/cloudformation/aws/efs/efs.go @@ -2,7 +2,7 @@ package efs import ( "github.com/aquasecurity/defsec/pkg/providers/aws/efs" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an EFS instance diff --git a/internal/adapters/cloudformation/aws/efs/filesystem.go b/pkg/iac/adapters/cloudformation/aws/efs/filesystem.go similarity index 86% rename from internal/adapters/cloudformation/aws/efs/filesystem.go rename to pkg/iac/adapters/cloudformation/aws/efs/filesystem.go index 372a6fd9e03f..9ef3c6d13fb1 100644 --- a/internal/adapters/cloudformation/aws/efs/filesystem.go +++ b/pkg/iac/adapters/cloudformation/aws/efs/filesystem.go @@ -2,7 +2,7 @@ package efs import ( "github.com/aquasecurity/defsec/pkg/providers/aws/efs" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getFileSystems(ctx parser.FileContext) (filesystems []efs.FileSystem) { diff --git a/internal/adapters/cloudformation/aws/eks/cluster.go b/pkg/iac/adapters/cloudformation/aws/eks/cluster.go similarity index 88% rename from internal/adapters/cloudformation/aws/eks/cluster.go rename to pkg/iac/adapters/cloudformation/aws/eks/cluster.go index ae814c7529e0..56cbba5f04a0 100644 --- a/internal/adapters/cloudformation/aws/eks/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/eks/cluster.go @@ -3,10 +3,10 @@ package eks import ( "github.com/aquasecurity/defsec/pkg/providers/aws/eks" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getClusters(ctx parser.FileContext) (clusters []eks.Cluster) { +func getClusters(ctx parser2.FileContext) (clusters []eks.Cluster) { clusterResources := ctx.GetResourcesByType("AWS::EKS::Cluster") @@ -33,7 +33,7 @@ func getClusters(ctx parser.FileContext) (clusters []eks.Cluster) { return clusters } -func getEncryptionConfig(r *parser.Resource) eks.Encryption { +func getEncryptionConfig(r *parser2.Resource) eks.Encryption { encryption := eks.Encryption{ Metadata: r.Metadata(), diff --git a/internal/adapters/cloudformation/aws/eks/eks.go b/pkg/iac/adapters/cloudformation/aws/eks/eks.go similarity index 75% rename from internal/adapters/cloudformation/aws/eks/eks.go rename to pkg/iac/adapters/cloudformation/aws/eks/eks.go index d1b13a865e24..5f6e4a987a91 100644 --- a/internal/adapters/cloudformation/aws/eks/eks.go +++ b/pkg/iac/adapters/cloudformation/aws/eks/eks.go @@ -2,7 +2,7 @@ package eks import ( "github.com/aquasecurity/defsec/pkg/providers/aws/eks" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an EKS instance diff --git a/internal/adapters/cloudformation/aws/elasticache/cluster.go b/pkg/iac/adapters/cloudformation/aws/elasticache/cluster.go similarity index 89% rename from internal/adapters/cloudformation/aws/elasticache/cluster.go rename to pkg/iac/adapters/cloudformation/aws/elasticache/cluster.go index 572fe4c02043..28414a62348a 100644 --- a/internal/adapters/cloudformation/aws/elasticache/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/elasticache/cluster.go @@ -2,7 +2,7 @@ package elasticache import ( "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getClusterGroups(ctx parser.FileContext) (clusters []elasticache.Cluster) { diff --git a/internal/adapters/cloudformation/aws/elasticache/elasticache.go b/pkg/iac/adapters/cloudformation/aws/elasticache/elasticache.go similarity index 84% rename from internal/adapters/cloudformation/aws/elasticache/elasticache.go rename to pkg/iac/adapters/cloudformation/aws/elasticache/elasticache.go index 856c45b25667..a84322e221e8 100644 --- a/internal/adapters/cloudformation/aws/elasticache/elasticache.go +++ b/pkg/iac/adapters/cloudformation/aws/elasticache/elasticache.go @@ -2,7 +2,7 @@ package elasticache import ( "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an ElasticCache instance diff --git a/internal/adapters/cloudformation/aws/elasticache/replication_group.go b/pkg/iac/adapters/cloudformation/aws/elasticache/replication_group.go similarity index 90% rename from internal/adapters/cloudformation/aws/elasticache/replication_group.go rename to pkg/iac/adapters/cloudformation/aws/elasticache/replication_group.go index 12e22ff31314..3910a377ebf4 100644 --- a/internal/adapters/cloudformation/aws/elasticache/replication_group.go +++ b/pkg/iac/adapters/cloudformation/aws/elasticache/replication_group.go @@ -2,7 +2,7 @@ package elasticache import ( "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getReplicationGroups(ctx parser.FileContext) (replicationGroups []elasticache.ReplicationGroup) { diff --git a/internal/adapters/cloudformation/aws/elasticache/security_group.go b/pkg/iac/adapters/cloudformation/aws/elasticache/security_group.go similarity index 87% rename from internal/adapters/cloudformation/aws/elasticache/security_group.go rename to pkg/iac/adapters/cloudformation/aws/elasticache/security_group.go index c25b02f158b2..6e51796ff935 100644 --- a/internal/adapters/cloudformation/aws/elasticache/security_group.go +++ b/pkg/iac/adapters/cloudformation/aws/elasticache/security_group.go @@ -2,7 +2,7 @@ package elasticache import ( "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getSecurityGroups(ctx parser.FileContext) (securityGroups []elasticache.SecurityGroup) { diff --git a/internal/adapters/cloudformation/aws/elasticsearch/domain.go b/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go similarity index 97% rename from internal/adapters/cloudformation/aws/elasticsearch/domain.go rename to pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go index f78df7882bdf..93b0300744c0 100644 --- a/internal/adapters/cloudformation/aws/elasticsearch/domain.go +++ b/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go @@ -3,7 +3,7 @@ package elasticsearch import ( "github.com/aquasecurity/defsec/pkg/providers/aws/elasticsearch" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { diff --git a/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go b/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch.go similarity index 79% rename from internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go rename to pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch.go index 94515a5a0484..b54b7fa1b33f 100644 --- a/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go +++ b/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch.go @@ -2,7 +2,7 @@ package elasticsearch import ( "github.com/aquasecurity/defsec/pkg/providers/aws/elasticsearch" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an ElasticSearch instance diff --git a/internal/adapters/cloudformation/aws/elb/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go similarity index 96% rename from internal/adapters/cloudformation/aws/elb/adapt_test.go rename to pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go index 607e07f94238..7d2dc5b9b5af 100644 --- a/internal/adapters/cloudformation/aws/elb/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go @@ -6,9 +6,9 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/elb" "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/cloudformation/aws/elb/elb.go b/pkg/iac/adapters/cloudformation/aws/elb/elb.go similarity index 75% rename from internal/adapters/cloudformation/aws/elb/elb.go rename to pkg/iac/adapters/cloudformation/aws/elb/elb.go index 8e72304e75ab..e9df99b919c9 100644 --- a/internal/adapters/cloudformation/aws/elb/elb.go +++ b/pkg/iac/adapters/cloudformation/aws/elb/elb.go @@ -2,7 +2,7 @@ package elb import ( "github.com/aquasecurity/defsec/pkg/providers/aws/elb" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an ELB instance diff --git a/internal/adapters/cloudformation/aws/elb/loadbalancer.go b/pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go similarity index 77% rename from internal/adapters/cloudformation/aws/elb/loadbalancer.go rename to pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go index 51c62ef31d7d..007a0c90a422 100644 --- a/internal/adapters/cloudformation/aws/elb/loadbalancer.go +++ b/pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go @@ -3,10 +3,10 @@ package elb import ( "github.com/aquasecurity/defsec/pkg/providers/aws/elb" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getLoadBalancers(ctx parser.FileContext) (loadbalancers []elb.LoadBalancer) { +func getLoadBalancers(ctx parser2.FileContext) (loadbalancers []elb.LoadBalancer) { loadBalanacerResources := ctx.GetResourcesByType("AWS::ElasticLoadBalancingV2::LoadBalancer") @@ -24,7 +24,7 @@ func getLoadBalancers(ctx parser.FileContext) (loadbalancers []elb.LoadBalancer) return loadbalancers } -func getListeners(lbr *parser.Resource, ctx parser.FileContext) (listeners []elb.Listener) { +func getListeners(lbr *parser2.Resource, ctx parser2.FileContext) (listeners []elb.Listener) { listenerResources := ctx.GetResourcesByType("AWS::ElasticLoadBalancingV2::Listener") @@ -43,7 +43,7 @@ func getListeners(lbr *parser.Resource, ctx parser.FileContext) (listeners []elb return listeners } -func getDefaultListenerActions(r *parser.Resource) (actions []elb.Action) { +func getDefaultListenerActions(r *parser2.Resource) (actions []elb.Action) { defaultActionsProp := r.GetProperty("DefaultActions") if defaultActionsProp.IsNotList() { return actions @@ -57,15 +57,15 @@ func getDefaultListenerActions(r *parser.Resource) (actions []elb.Action) { return actions } -func isInternal(r *parser.Resource) types.BoolValue { +func isInternal(r *parser2.Resource) types.BoolValue { schemeProp := r.GetProperty("Scheme") if schemeProp.IsNotString() { return r.BoolDefault(false) } - return types.Bool(schemeProp.EqualTo("internal", parser.IgnoreCase), schemeProp.Metadata()) + return types.Bool(schemeProp.EqualTo("internal", parser2.IgnoreCase), schemeProp.Metadata()) } -func checkForDropInvalidHeaders(r *parser.Resource) types.BoolValue { +func checkForDropInvalidHeaders(r *parser2.Resource) types.BoolValue { attributesProp := r.GetProperty("LoadBalancerAttributes") if attributesProp.IsNotList() { return types.BoolDefault(false, r.Metadata()) diff --git a/internal/adapters/cloudformation/aws/iam/iam.go b/pkg/iac/adapters/cloudformation/aws/iam/iam.go similarity index 93% rename from internal/adapters/cloudformation/aws/iam/iam.go rename to pkg/iac/adapters/cloudformation/aws/iam/iam.go index f0d2329e61c9..27a257b736e0 100644 --- a/internal/adapters/cloudformation/aws/iam/iam.go +++ b/pkg/iac/adapters/cloudformation/aws/iam/iam.go @@ -3,7 +3,7 @@ package iam import ( "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an IAM instance diff --git a/internal/adapters/cloudformation/aws/iam/policy.go b/pkg/iac/adapters/cloudformation/aws/iam/policy.go similarity index 87% rename from internal/adapters/cloudformation/aws/iam/policy.go rename to pkg/iac/adapters/cloudformation/aws/iam/policy.go index 4487b70cefb7..20fd1b9c7e38 100644 --- a/internal/adapters/cloudformation/aws/iam/policy.go +++ b/pkg/iac/adapters/cloudformation/aws/iam/policy.go @@ -1,14 +1,14 @@ package iam import ( + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -func getPolicies(ctx parser.FileContext) (policies []iam.Policy) { +func getPolicies(ctx parser2.FileContext) (policies []iam.Policy) { for _, policyResource := range ctx.GetResourcesByType("AWS::IAM::Policy") { policy := iam.Policy{ @@ -34,7 +34,7 @@ func getPolicies(ctx parser.FileContext) (policies []iam.Policy) { return policies } -func getRoles(ctx parser.FileContext) (roles []iam.Role) { +func getRoles(ctx parser2.FileContext) (roles []iam.Role) { for _, roleResource := range ctx.GetResourcesByType("AWS::IAM::Role") { policyProp := roleResource.GetProperty("Policies") roleName := roleResource.GetStringProperty("RoleName") @@ -48,7 +48,7 @@ func getRoles(ctx parser.FileContext) (roles []iam.Role) { return roles } -func getUsers(ctx parser.FileContext) (users []iam.User) { +func getUsers(ctx parser2.FileContext) (users []iam.User) { for _, userResource := range ctx.GetResourcesByType("AWS::IAM::User") { policyProp := userResource.GetProperty("Policies") userName := userResource.GetStringProperty("GroupName") @@ -64,7 +64,7 @@ func getUsers(ctx parser.FileContext) (users []iam.User) { return users } -func getAccessKeys(ctx parser.FileContext, username string) (accessKeys []iam.AccessKey) { +func getAccessKeys(ctx parser2.FileContext, username string) (accessKeys []iam.AccessKey) { for _, keyResource := range ctx.GetResourcesByType("AWS::IAM::AccessKey") { keyUsername := keyResource.GetStringProperty("UserName") if !keyUsername.EqualTo(username) { @@ -86,7 +86,7 @@ func getAccessKeys(ctx parser.FileContext, username string) (accessKeys []iam.Ac return accessKeys } -func getGroups(ctx parser.FileContext) (groups []iam.Group) { +func getGroups(ctx parser2.FileContext) (groups []iam.Group) { for _, groupResource := range ctx.GetResourcesByType("AWS::IAM::Group") { policyProp := groupResource.GetProperty("Policies") groupName := groupResource.GetStringProperty("GroupName") @@ -100,7 +100,7 @@ func getGroups(ctx parser.FileContext) (groups []iam.Group) { return groups } -func getPoliciesDocs(policiesProp *parser.Property) []iam.Policy { +func getPoliciesDocs(policiesProp *parser2.Property) []iam.Policy { var policies []iam.Policy for _, policy := range policiesProp.AsList() { diff --git a/internal/adapters/cloudformation/aws/kinesis/kinesis.go b/pkg/iac/adapters/cloudformation/aws/kinesis/kinesis.go similarity index 76% rename from internal/adapters/cloudformation/aws/kinesis/kinesis.go rename to pkg/iac/adapters/cloudformation/aws/kinesis/kinesis.go index 1348a8a5de83..cae954a39731 100644 --- a/internal/adapters/cloudformation/aws/kinesis/kinesis.go +++ b/pkg/iac/adapters/cloudformation/aws/kinesis/kinesis.go @@ -2,7 +2,7 @@ package kinesis import ( "github.com/aquasecurity/defsec/pkg/providers/aws/kinesis" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a Kinesis instance diff --git a/internal/adapters/cloudformation/aws/kinesis/stream.go b/pkg/iac/adapters/cloudformation/aws/kinesis/stream.go similarity index 92% rename from internal/adapters/cloudformation/aws/kinesis/stream.go rename to pkg/iac/adapters/cloudformation/aws/kinesis/stream.go index 41a0889332a6..57c16dec985c 100644 --- a/internal/adapters/cloudformation/aws/kinesis/stream.go +++ b/pkg/iac/adapters/cloudformation/aws/kinesis/stream.go @@ -3,7 +3,7 @@ package kinesis import ( "github.com/aquasecurity/defsec/pkg/providers/aws/kinesis" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getStreams(ctx parser.FileContext) (streams []kinesis.Stream) { diff --git a/internal/adapters/cloudformation/aws/lambda/function.go b/pkg/iac/adapters/cloudformation/aws/lambda/function.go similarity index 81% rename from internal/adapters/cloudformation/aws/lambda/function.go rename to pkg/iac/adapters/cloudformation/aws/lambda/function.go index e3ba43fbffff..3d76c5d1c488 100644 --- a/internal/adapters/cloudformation/aws/lambda/function.go +++ b/pkg/iac/adapters/cloudformation/aws/lambda/function.go @@ -3,10 +3,10 @@ package lambda import ( "github.com/aquasecurity/defsec/pkg/providers/aws/lambda" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getFunctions(ctx parser.FileContext) (functions []lambda.Function) { +func getFunctions(ctx parser2.FileContext) (functions []lambda.Function) { functionResources := ctx.GetResourcesByType("AWS::Lambda::Function") @@ -34,7 +34,7 @@ func getFunctions(ctx parser.FileContext) (functions []lambda.Function) { return functions } -func getPermissions(funcR *parser.Resource, ctx parser.FileContext) (perms []lambda.Permission) { +func getPermissions(funcR *parser2.Resource, ctx parser2.FileContext) (perms []lambda.Permission) { permissionResources := ctx.GetResourcesByType("AWS::Lambda::Permission") diff --git a/internal/adapters/cloudformation/aws/lambda/lambda.go b/pkg/iac/adapters/cloudformation/aws/lambda/lambda.go similarity index 76% rename from internal/adapters/cloudformation/aws/lambda/lambda.go rename to pkg/iac/adapters/cloudformation/aws/lambda/lambda.go index b17c9d6015e1..54d7f6dab3e7 100644 --- a/internal/adapters/cloudformation/aws/lambda/lambda.go +++ b/pkg/iac/adapters/cloudformation/aws/lambda/lambda.go @@ -2,7 +2,7 @@ package lambda import ( "github.com/aquasecurity/defsec/pkg/providers/aws/lambda" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a lambda instance diff --git a/internal/adapters/cloudformation/aws/mq/broker.go b/pkg/iac/adapters/cloudformation/aws/mq/broker.go similarity index 91% rename from internal/adapters/cloudformation/aws/mq/broker.go rename to pkg/iac/adapters/cloudformation/aws/mq/broker.go index aed216e74291..a25944780572 100644 --- a/internal/adapters/cloudformation/aws/mq/broker.go +++ b/pkg/iac/adapters/cloudformation/aws/mq/broker.go @@ -3,7 +3,7 @@ package mq import ( "github.com/aquasecurity/defsec/pkg/providers/aws/mq" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getBrokers(ctx parser.FileContext) (brokers []mq.Broker) { diff --git a/internal/adapters/cloudformation/aws/mq/mq.go b/pkg/iac/adapters/cloudformation/aws/mq/mq.go similarity index 74% rename from internal/adapters/cloudformation/aws/mq/mq.go rename to pkg/iac/adapters/cloudformation/aws/mq/mq.go index 90d31994c5bb..34d879d22d07 100644 --- a/internal/adapters/cloudformation/aws/mq/mq.go +++ b/pkg/iac/adapters/cloudformation/aws/mq/mq.go @@ -2,7 +2,7 @@ package mq import ( "github.com/aquasecurity/defsec/pkg/providers/aws/mq" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an MQ instance diff --git a/internal/adapters/cloudformation/aws/msk/cluster.go b/pkg/iac/adapters/cloudformation/aws/msk/cluster.go similarity index 97% rename from internal/adapters/cloudformation/aws/msk/cluster.go rename to pkg/iac/adapters/cloudformation/aws/msk/cluster.go index 7de4d181a50c..d2b7a192d478 100644 --- a/internal/adapters/cloudformation/aws/msk/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/msk/cluster.go @@ -3,7 +3,7 @@ package msk import ( "github.com/aquasecurity/defsec/pkg/providers/aws/msk" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getClusters(ctx parser.FileContext) (clusters []msk.Cluster) { diff --git a/internal/adapters/cloudformation/aws/msk/msk.go b/pkg/iac/adapters/cloudformation/aws/msk/msk.go similarity index 75% rename from internal/adapters/cloudformation/aws/msk/msk.go rename to pkg/iac/adapters/cloudformation/aws/msk/msk.go index 9b7cc9eb5b20..3a53fca389e3 100644 --- a/internal/adapters/cloudformation/aws/msk/msk.go +++ b/pkg/iac/adapters/cloudformation/aws/msk/msk.go @@ -2,7 +2,7 @@ package msk import ( "github.com/aquasecurity/defsec/pkg/providers/aws/msk" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an MSK instance diff --git a/internal/adapters/cloudformation/aws/neptune/cluster.go b/pkg/iac/adapters/cloudformation/aws/neptune/cluster.go similarity index 78% rename from internal/adapters/cloudformation/aws/neptune/cluster.go rename to pkg/iac/adapters/cloudformation/aws/neptune/cluster.go index b24353881bfa..f4aefefbfdc6 100644 --- a/internal/adapters/cloudformation/aws/neptune/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/neptune/cluster.go @@ -3,10 +3,10 @@ package neptune import ( "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getClusters(ctx parser.FileContext) (clusters []neptune.Cluster) { +func getClusters(ctx parser2.FileContext) (clusters []neptune.Cluster) { for _, r := range ctx.GetResourcesByType("AWS::Neptune::DBCluster") { cluster := neptune.Cluster{ @@ -23,7 +23,7 @@ func getClusters(ctx parser.FileContext) (clusters []neptune.Cluster) { return clusters } -func getAuditLog(r *parser.Resource) types.BoolValue { +func getAuditLog(r *parser2.Resource) types.BoolValue { if logsProp := r.GetProperty("EnableCloudwatchLogsExports"); logsProp.IsList() { if logsProp.Contains("audit") { return types.Bool(true, logsProp.Metadata()) diff --git a/internal/adapters/cloudformation/aws/neptune/neptune.go b/pkg/iac/adapters/cloudformation/aws/neptune/neptune.go similarity index 77% rename from internal/adapters/cloudformation/aws/neptune/neptune.go rename to pkg/iac/adapters/cloudformation/aws/neptune/neptune.go index 528fd111041e..8956e48ba196 100644 --- a/internal/adapters/cloudformation/aws/neptune/neptune.go +++ b/pkg/iac/adapters/cloudformation/aws/neptune/neptune.go @@ -2,7 +2,7 @@ package neptune import ( "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a Neptune instance diff --git a/internal/adapters/cloudformation/aws/rds/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go similarity index 98% rename from internal/adapters/cloudformation/aws/rds/adapt_test.go rename to pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go index 178b47230843..8a74c1fd1f4c 100644 --- a/internal/adapters/cloudformation/aws/rds/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go @@ -6,9 +6,9 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/rds" "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/cloudformation/aws/rds/cluster.go b/pkg/iac/adapters/cloudformation/aws/rds/cluster.go similarity index 96% rename from internal/adapters/cloudformation/aws/rds/cluster.go rename to pkg/iac/adapters/cloudformation/aws/rds/cluster.go index 79457c57d3a3..83f614b67e53 100644 --- a/internal/adapters/cloudformation/aws/rds/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/cluster.go @@ -3,7 +3,7 @@ package rds import ( "github.com/aquasecurity/defsec/pkg/providers/aws/rds" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getClusters(ctx parser.FileContext) (clusters map[string]rds.Cluster) { diff --git a/internal/adapters/cloudformation/aws/rds/instance.go b/pkg/iac/adapters/cloudformation/aws/rds/instance.go similarity index 88% rename from internal/adapters/cloudformation/aws/rds/instance.go rename to pkg/iac/adapters/cloudformation/aws/rds/instance.go index 7f651bb6d154..e3d1b444be7b 100644 --- a/internal/adapters/cloudformation/aws/rds/instance.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/instance.go @@ -3,10 +3,10 @@ package rds import ( "github.com/aquasecurity/defsec/pkg/providers/aws/rds" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getClustersAndInstances(ctx parser.FileContext) ([]rds.Cluster, []rds.Instance) { +func getClustersAndInstances(ctx parser2.FileContext) ([]rds.Cluster, []rds.Instance) { clusterMap := getClusters(ctx) @@ -68,7 +68,7 @@ func getClustersAndInstances(ctx parser.FileContext) ([]rds.Cluster, []rds.Insta return clusters, orphans } -func getDBParameterGroups(ctx parser.FileContext, r *parser.Resource) (dbParameterGroup []rds.DBParameterGroupsList) { +func getDBParameterGroups(ctx parser2.FileContext, r *parser2.Resource) (dbParameterGroup []rds.DBParameterGroupsList) { dbParameterGroupName := r.GetStringProperty("DBParameterGroupName") @@ -88,7 +88,7 @@ func getDBParameterGroups(ctx parser.FileContext, r *parser.Resource) (dbParamet return dbParameterGroup } -func getEnabledCloudwatchLogsExports(r *parser.Resource) (enabledcloudwatchlogexportslist []types.StringValue) { +func getEnabledCloudwatchLogsExports(r *parser2.Resource) (enabledcloudwatchlogexportslist []types.StringValue) { enabledCloudwatchLogExportList := r.GetProperty("EnableCloudwatchLogsExports") if enabledCloudwatchLogExportList.IsNil() || enabledCloudwatchLogExportList.IsNotList() { @@ -101,7 +101,7 @@ func getEnabledCloudwatchLogsExports(r *parser.Resource) (enabledcloudwatchlogex return enabledcloudwatchlogexportslist } -func getTagList(r *parser.Resource) (taglist []rds.TagList) { +func getTagList(r *parser2.Resource) (taglist []rds.TagList) { tagLists := r.GetProperty("Tags") if tagLists.IsNil() || tagLists.IsNotList() { @@ -116,7 +116,7 @@ func getTagList(r *parser.Resource) (taglist []rds.TagList) { return taglist } -func getReadReplicaDBInstanceIdentifiers(r *parser.Resource) (readreplicadbidentifier []types.StringValue) { +func getReadReplicaDBInstanceIdentifiers(r *parser2.Resource) (readreplicadbidentifier []types.StringValue) { readReplicaDBIdentifier := r.GetProperty("SourceDBInstanceIdentifier") if readReplicaDBIdentifier.IsNil() || readReplicaDBIdentifier.IsNotList() { diff --git a/internal/adapters/cloudformation/aws/rds/parameter_groups.go b/pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go similarity index 79% rename from internal/adapters/cloudformation/aws/rds/parameter_groups.go rename to pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go index fda94da89c36..f8bc488a5fd1 100644 --- a/internal/adapters/cloudformation/aws/rds/parameter_groups.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go @@ -3,10 +3,10 @@ package rds import ( "github.com/aquasecurity/defsec/pkg/providers/aws/rds" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getParameterGroups(ctx parser.FileContext) (parametergroups []rds.ParameterGroups) { +func getParameterGroups(ctx parser2.FileContext) (parametergroups []rds.ParameterGroups) { for _, r := range ctx.GetResourcesByType("AWS::RDS::DBParameterGroup") { @@ -23,7 +23,7 @@ func getParameterGroups(ctx parser.FileContext) (parametergroups []rds.Parameter return parametergroups } -func getParameters(r *parser.Resource) (parameters []rds.Parameters) { +func getParameters(r *parser2.Resource) (parameters []rds.Parameters) { dBParam := r.GetProperty("Parameters") diff --git a/internal/adapters/cloudformation/aws/rds/rds.go b/pkg/iac/adapters/cloudformation/aws/rds/rds.go similarity index 84% rename from internal/adapters/cloudformation/aws/rds/rds.go rename to pkg/iac/adapters/cloudformation/aws/rds/rds.go index e5db62ccc77a..05f4babc7d59 100644 --- a/internal/adapters/cloudformation/aws/rds/rds.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/rds.go @@ -2,7 +2,7 @@ package rds import ( "github.com/aquasecurity/defsec/pkg/providers/aws/rds" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an RDS instance diff --git a/internal/adapters/cloudformation/aws/redshift/cluster.go b/pkg/iac/adapters/cloudformation/aws/redshift/cluster.go similarity index 96% rename from internal/adapters/cloudformation/aws/redshift/cluster.go rename to pkg/iac/adapters/cloudformation/aws/redshift/cluster.go index 9624849326f1..ebdee435b262 100644 --- a/internal/adapters/cloudformation/aws/redshift/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/redshift/cluster.go @@ -3,7 +3,7 @@ package redshift import ( "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getClusters(ctx parser.FileContext) (clusters []redshift.Cluster) { diff --git a/internal/adapters/cloudformation/aws/redshift/redshift.go b/pkg/iac/adapters/cloudformation/aws/redshift/redshift.go similarity index 83% rename from internal/adapters/cloudformation/aws/redshift/redshift.go rename to pkg/iac/adapters/cloudformation/aws/redshift/redshift.go index 601a67043f72..a4358393199d 100644 --- a/internal/adapters/cloudformation/aws/redshift/redshift.go +++ b/pkg/iac/adapters/cloudformation/aws/redshift/redshift.go @@ -2,7 +2,7 @@ package redshift import ( "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a RedShift instance diff --git a/internal/adapters/cloudformation/aws/redshift/security_group.go b/pkg/iac/adapters/cloudformation/aws/redshift/security_group.go similarity index 86% rename from internal/adapters/cloudformation/aws/redshift/security_group.go rename to pkg/iac/adapters/cloudformation/aws/redshift/security_group.go index 345631e1a61b..bdd069044e78 100644 --- a/internal/adapters/cloudformation/aws/redshift/security_group.go +++ b/pkg/iac/adapters/cloudformation/aws/redshift/security_group.go @@ -2,7 +2,7 @@ package redshift import ( "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getSecurityGroups(ctx parser.FileContext) (groups []redshift.SecurityGroup) { diff --git a/internal/adapters/cloudformation/aws/s3/bucket.go b/pkg/iac/adapters/cloudformation/aws/s3/bucket.go similarity index 89% rename from internal/adapters/cloudformation/aws/s3/bucket.go rename to pkg/iac/adapters/cloudformation/aws/s3/bucket.go index 8514d4a7c6e8..e33a8e01db2d 100644 --- a/internal/adapters/cloudformation/aws/s3/bucket.go +++ b/pkg/iac/adapters/cloudformation/aws/s3/bucket.go @@ -6,12 +6,12 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/s3" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) var aclConvertRegex = regexp.MustCompile(`[A-Z][^A-Z]*`) -func getBuckets(cfFile parser.FileContext) []s3.Bucket { +func getBuckets(cfFile parser2.FileContext) []s3.Bucket { var buckets []s3.Bucket bucketResources := cfFile.GetResourcesByType("AWS::S3::Bucket") @@ -40,7 +40,7 @@ func getBuckets(cfFile parser.FileContext) []s3.Bucket { return buckets } -func getPublicAccessBlock(r *parser.Resource) *s3.PublicAccessBlock { +func getPublicAccessBlock(r *parser2.Resource) *s3.PublicAccessBlock { if block := r.GetProperty("PublicAccessBlockConfiguration"); block.IsNil() { return nil } @@ -60,7 +60,7 @@ func convertAclValue(aclValue defsecTypes.StringValue) defsecTypes.StringValue { return defsecTypes.String(strings.ToLower(strings.Join(matches, "-")), aclValue.GetMetadata()) } -func getLogging(r *parser.Resource) s3.Logging { +func getLogging(r *parser2.Resource) s3.Logging { logging := s3.Logging{ Metadata: r.Metadata(), @@ -77,7 +77,7 @@ func getLogging(r *parser.Resource) s3.Logging { return logging } -func hasVersioning(r *parser.Resource) defsecTypes.BoolValue { +func hasVersioning(r *parser2.Resource) defsecTypes.BoolValue { versioningProp := r.GetProperty("VersioningConfiguration.Status") if versioningProp.IsNil() { @@ -92,7 +92,7 @@ func hasVersioning(r *parser.Resource) defsecTypes.BoolValue { return defsecTypes.Bool(versioningEnabled, versioningProp.Metadata()) } -func getEncryption(r *parser.Resource, _ parser.FileContext) s3.Encryption { +func getEncryption(r *parser2.Resource, _ parser2.FileContext) s3.Encryption { encryption := s3.Encryption{ Metadata: r.Metadata(), @@ -117,7 +117,7 @@ func getEncryption(r *parser.Resource, _ parser.FileContext) s3.Encryption { return encryption } -func getLifecycle(resource *parser.Resource) []s3.Rules { +func getLifecycle(resource *parser2.Resource) []s3.Rules { LifecycleProp := resource.GetProperty("LifecycleConfiguration") RuleProp := LifecycleProp.GetProperty("Rules") @@ -136,7 +136,7 @@ func getLifecycle(resource *parser.Resource) []s3.Rules { return rule } -func getWebsite(r *parser.Resource) *s3.Website { +func getWebsite(r *parser2.Resource) *s3.Website { if block := r.GetProperty("WebsiteConfiguration"); block.IsNil() { return nil } else { diff --git a/internal/adapters/cloudformation/aws/s3/s3.go b/pkg/iac/adapters/cloudformation/aws/s3/s3.go similarity index 74% rename from internal/adapters/cloudformation/aws/s3/s3.go rename to pkg/iac/adapters/cloudformation/aws/s3/s3.go index d3f322475a51..783116246ef3 100644 --- a/internal/adapters/cloudformation/aws/s3/s3.go +++ b/pkg/iac/adapters/cloudformation/aws/s3/s3.go @@ -2,7 +2,7 @@ package s3 import ( "github.com/aquasecurity/defsec/pkg/providers/aws/s3" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an S3 instance diff --git a/internal/adapters/cloudformation/aws/sam/api.go b/pkg/iac/adapters/cloudformation/aws/sam/api.go similarity index 86% rename from internal/adapters/cloudformation/aws/sam/api.go rename to pkg/iac/adapters/cloudformation/aws/sam/api.go index ac123f28770e..84ec41ee7f7e 100644 --- a/internal/adapters/cloudformation/aws/sam/api.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/api.go @@ -3,10 +3,10 @@ package sam import ( "github.com/aquasecurity/defsec/pkg/providers/aws/sam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getApis(cfFile parser.FileContext) (apis []sam.API) { +func getApis(cfFile parser2.FileContext) (apis []sam.API) { apiResources := cfFile.GetResourcesByType("AWS::Serverless::Api") for _, r := range apiResources { @@ -25,7 +25,7 @@ func getApis(cfFile parser.FileContext) (apis []sam.API) { return apis } -func getRestMethodSettings(r *parser.Resource) sam.RESTMethodSettings { +func getRestMethodSettings(r *parser2.Resource) sam.RESTMethodSettings { settings := sam.RESTMethodSettings{ Metadata: r.Metadata(), @@ -47,7 +47,7 @@ func getRestMethodSettings(r *parser.Resource) sam.RESTMethodSettings { } if loggingLevel := settingsProp.GetProperty("LoggingLevel"); loggingLevel.IsNotNil() { - if loggingLevel.EqualTo("OFF", parser.IgnoreCase) { + if loggingLevel.EqualTo("OFF", parser2.IgnoreCase) { settings.LoggingEnabled = defsecTypes.Bool(false, loggingLevel.Metadata()) } else { settings.LoggingEnabled = defsecTypes.Bool(true, loggingLevel.Metadata()) @@ -58,7 +58,7 @@ func getRestMethodSettings(r *parser.Resource) sam.RESTMethodSettings { return settings } -func getAccessLogging(r *parser.Resource) sam.AccessLogging { +func getAccessLogging(r *parser2.Resource) sam.AccessLogging { logging := sam.AccessLogging{ Metadata: r.Metadata(), @@ -75,7 +75,7 @@ func getAccessLogging(r *parser.Resource) sam.AccessLogging { return logging } -func getDomainConfiguration(r *parser.Resource) sam.DomainConfiguration { +func getDomainConfiguration(r *parser2.Resource) sam.DomainConfiguration { domainConfig := sam.DomainConfiguration{ Metadata: r.Metadata(), diff --git a/internal/adapters/cloudformation/aws/sam/function.go b/pkg/iac/adapters/cloudformation/aws/sam/function.go similarity index 87% rename from internal/adapters/cloudformation/aws/sam/function.go rename to pkg/iac/adapters/cloudformation/aws/sam/function.go index 5ab9ca1b06fe..84db2ecc6c8f 100644 --- a/internal/adapters/cloudformation/aws/sam/function.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/function.go @@ -1,15 +1,15 @@ package sam import ( + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -func getFunctions(cfFile parser.FileContext) (functions []sam.Function) { +func getFunctions(cfFile parser2.FileContext) (functions []sam.Function) { functionResources := cfFile.GetResourcesByType("AWS::Serverless::Function") for _, r := range functionResources { @@ -28,7 +28,7 @@ func getFunctions(cfFile parser.FileContext) (functions []sam.Function) { return functions } -func setFunctionPolicies(r *parser.Resource, function *sam.Function) { +func setFunctionPolicies(r *parser2.Resource, function *sam.Function) { policies := r.GetProperty("Policies") if policies.IsNotNil() { if policies.IsString() { diff --git a/internal/adapters/cloudformation/aws/sam/http_api.go b/pkg/iac/adapters/cloudformation/aws/sam/http_api.go similarity index 85% rename from internal/adapters/cloudformation/aws/sam/http_api.go rename to pkg/iac/adapters/cloudformation/aws/sam/http_api.go index 075cff115596..b1412010df6f 100644 --- a/internal/adapters/cloudformation/aws/sam/http_api.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/http_api.go @@ -3,10 +3,10 @@ package sam import ( "github.com/aquasecurity/defsec/pkg/providers/aws/sam" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getHttpApis(cfFile parser.FileContext) (apis []sam.HttpAPI) { +func getHttpApis(cfFile parser2.FileContext) (apis []sam.HttpAPI) { apiResources := cfFile.GetResourcesByType("AWS::Serverless::HttpApi") for _, r := range apiResources { @@ -24,7 +24,7 @@ func getHttpApis(cfFile parser.FileContext) (apis []sam.HttpAPI) { return apis } -func getAccessLoggingV2(r *parser.Resource) sam.AccessLogging { +func getAccessLoggingV2(r *parser2.Resource) sam.AccessLogging { logging := sam.AccessLogging{ Metadata: r.Metadata(), @@ -41,7 +41,7 @@ func getAccessLoggingV2(r *parser.Resource) sam.AccessLogging { return logging } -func getRouteSettings(r *parser.Resource) sam.RouteSettings { +func getRouteSettings(r *parser2.Resource) sam.RouteSettings { routeSettings := sam.RouteSettings{ Metadata: r.Metadata(), diff --git a/internal/adapters/cloudformation/aws/sam/sam.go b/pkg/iac/adapters/cloudformation/aws/sam/sam.go similarity index 84% rename from internal/adapters/cloudformation/aws/sam/sam.go rename to pkg/iac/adapters/cloudformation/aws/sam/sam.go index 5ae61c53ca66..c928e901f308 100644 --- a/internal/adapters/cloudformation/aws/sam/sam.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/sam.go @@ -2,7 +2,7 @@ package sam import ( "github.com/aquasecurity/defsec/pkg/providers/aws/sam" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an SAM instance diff --git a/internal/adapters/cloudformation/aws/sam/state_machines.go b/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go similarity index 87% rename from internal/adapters/cloudformation/aws/sam/state_machines.go rename to pkg/iac/adapters/cloudformation/aws/sam/state_machines.go index 917bd48a53f5..344df4006c3e 100644 --- a/internal/adapters/cloudformation/aws/sam/state_machines.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go @@ -1,15 +1,15 @@ package sam import ( + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) -func getStateMachines(cfFile parser.FileContext) (stateMachines []sam.StateMachine) { +func getStateMachines(cfFile parser2.FileContext) (stateMachines []sam.StateMachine) { stateMachineResources := cfFile.GetResourcesByType("AWS::Serverless::StateMachine") for _, r := range stateMachineResources { @@ -39,7 +39,7 @@ func getStateMachines(cfFile parser.FileContext) (stateMachines []sam.StateMachi return stateMachines } -func getTracingConfiguration(r *parser.Resource) sam.TracingConfiguration { +func getTracingConfiguration(r *parser2.Resource) sam.TracingConfiguration { tracing := r.GetProperty("Tracing") if tracing.IsNil() { return sam.TracingConfiguration{ @@ -54,7 +54,7 @@ func getTracingConfiguration(r *parser.Resource) sam.TracingConfiguration { } } -func setStateMachinePolicies(r *parser.Resource, stateMachine *sam.StateMachine) { +func setStateMachinePolicies(r *parser2.Resource, stateMachine *sam.StateMachine) { policies := r.GetProperty("Policies") if policies.IsNotNil() { if policies.IsString() { diff --git a/internal/adapters/cloudformation/aws/sam/tables.go b/pkg/iac/adapters/cloudformation/aws/sam/tables.go similarity index 80% rename from internal/adapters/cloudformation/aws/sam/tables.go rename to pkg/iac/adapters/cloudformation/aws/sam/tables.go index 1ee62a4ef90f..de6284966d9f 100644 --- a/internal/adapters/cloudformation/aws/sam/tables.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/tables.go @@ -3,10 +3,10 @@ package sam import ( "github.com/aquasecurity/defsec/pkg/providers/aws/sam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getSimpleTables(cfFile parser.FileContext) (tables []sam.SimpleTable) { +func getSimpleTables(cfFile parser2.FileContext) (tables []sam.SimpleTable) { tableResources := cfFile.GetResourcesByType("AWS::Serverless::SimpleTable") for _, r := range tableResources { @@ -22,7 +22,7 @@ func getSimpleTables(cfFile parser.FileContext) (tables []sam.SimpleTable) { return tables } -func getSSESpecification(r *parser.Resource) sam.SSESpecification { +func getSSESpecification(r *parser2.Resource) sam.SSESpecification { spec := sam.SSESpecification{ Metadata: r.Metadata(), diff --git a/internal/adapters/cloudformation/aws/sns/sns.go b/pkg/iac/adapters/cloudformation/aws/sns/sns.go similarity index 74% rename from internal/adapters/cloudformation/aws/sns/sns.go rename to pkg/iac/adapters/cloudformation/aws/sns/sns.go index 8c9e24bea91d..8e691b98d8e5 100644 --- a/internal/adapters/cloudformation/aws/sns/sns.go +++ b/pkg/iac/adapters/cloudformation/aws/sns/sns.go @@ -2,7 +2,7 @@ package sns import ( "github.com/aquasecurity/defsec/pkg/providers/aws/sns" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a SNS instance diff --git a/internal/adapters/cloudformation/aws/sns/topic.go b/pkg/iac/adapters/cloudformation/aws/sns/topic.go similarity index 88% rename from internal/adapters/cloudformation/aws/sns/topic.go rename to pkg/iac/adapters/cloudformation/aws/sns/topic.go index 07fb62a35763..738248ccfd88 100644 --- a/internal/adapters/cloudformation/aws/sns/topic.go +++ b/pkg/iac/adapters/cloudformation/aws/sns/topic.go @@ -3,7 +3,7 @@ package sns import ( "github.com/aquasecurity/defsec/pkg/providers/aws/sns" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getTopics(ctx parser.FileContext) (topics []sns.Topic) { diff --git a/internal/adapters/cloudformation/aws/sqs/queue.go b/pkg/iac/adapters/cloudformation/aws/sqs/queue.go similarity index 96% rename from internal/adapters/cloudformation/aws/sqs/queue.go rename to pkg/iac/adapters/cloudformation/aws/sqs/queue.go index 396966b7db4d..72af950b5fa8 100644 --- a/internal/adapters/cloudformation/aws/sqs/queue.go +++ b/pkg/iac/adapters/cloudformation/aws/sqs/queue.go @@ -3,12 +3,12 @@ package sqs import ( "fmt" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" ) func getQueues(ctx parser.FileContext) (queues []sqs.Queue) { diff --git a/internal/adapters/cloudformation/aws/sqs/sqs.go b/pkg/iac/adapters/cloudformation/aws/sqs/sqs.go similarity index 74% rename from internal/adapters/cloudformation/aws/sqs/sqs.go rename to pkg/iac/adapters/cloudformation/aws/sqs/sqs.go index 974860f6a09a..e51ab59334ba 100644 --- a/internal/adapters/cloudformation/aws/sqs/sqs.go +++ b/pkg/iac/adapters/cloudformation/aws/sqs/sqs.go @@ -2,7 +2,7 @@ package sqs import ( "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an SQS instance diff --git a/internal/adapters/cloudformation/aws/ssm/secret.go b/pkg/iac/adapters/cloudformation/aws/ssm/secret.go similarity index 84% rename from internal/adapters/cloudformation/aws/ssm/secret.go rename to pkg/iac/adapters/cloudformation/aws/ssm/secret.go index 6145c950f7f4..181799c68f8e 100644 --- a/internal/adapters/cloudformation/aws/ssm/secret.go +++ b/pkg/iac/adapters/cloudformation/aws/ssm/secret.go @@ -2,7 +2,7 @@ package ssm import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ssm" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getSecrets(ctx parser.FileContext) (secrets []ssm.Secret) { diff --git a/internal/adapters/cloudformation/aws/ssm/ssm.go b/pkg/iac/adapters/cloudformation/aws/ssm/ssm.go similarity index 74% rename from internal/adapters/cloudformation/aws/ssm/ssm.go rename to pkg/iac/adapters/cloudformation/aws/ssm/ssm.go index 33899d7494b5..705ad63c3391 100644 --- a/internal/adapters/cloudformation/aws/ssm/ssm.go +++ b/pkg/iac/adapters/cloudformation/aws/ssm/ssm.go @@ -2,7 +2,7 @@ package ssm import ( "github.com/aquasecurity/defsec/pkg/providers/aws/ssm" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts an SSM instance diff --git a/internal/adapters/cloudformation/aws/workspaces/workspace.go b/pkg/iac/adapters/cloudformation/aws/workspaces/workspace.go similarity index 91% rename from internal/adapters/cloudformation/aws/workspaces/workspace.go rename to pkg/iac/adapters/cloudformation/aws/workspaces/workspace.go index 267c9aac46d3..8a896513a740 100644 --- a/internal/adapters/cloudformation/aws/workspaces/workspace.go +++ b/pkg/iac/adapters/cloudformation/aws/workspaces/workspace.go @@ -2,7 +2,7 @@ package workspaces import ( "github.com/aquasecurity/defsec/pkg/providers/aws/workspaces" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getWorkSpaces(ctx parser.FileContext) (workSpaces []workspaces.WorkSpace) { diff --git a/internal/adapters/cloudformation/aws/workspaces/workspaces.go b/pkg/iac/adapters/cloudformation/aws/workspaces/workspaces.go similarity index 78% rename from internal/adapters/cloudformation/aws/workspaces/workspaces.go rename to pkg/iac/adapters/cloudformation/aws/workspaces/workspaces.go index 9918ebdf4977..a7702a821466 100644 --- a/internal/adapters/cloudformation/aws/workspaces/workspaces.go +++ b/pkg/iac/adapters/cloudformation/aws/workspaces/workspaces.go @@ -2,7 +2,7 @@ package workspaces import ( "github.com/aquasecurity/defsec/pkg/providers/aws/workspaces" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) // Adapt adapts a Workspaces instance diff --git a/pkg/iac/adapters/terraform/adapt.go b/pkg/iac/adapters/terraform/adapt.go new file mode 100644 index 000000000000..108122d84b59 --- /dev/null +++ b/pkg/iac/adapters/terraform/adapt.go @@ -0,0 +1,31 @@ +package terraform + +import ( + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/cloudstack" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/digitalocean" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/github" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/google" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/kubernetes" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/nifcloud" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/openstack" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/oracle" +) + +func Adapt(modules terraform.Modules) *state.State { + return &state.State{ + AWS: aws.Adapt(modules), + Azure: azure.Adapt(modules), + CloudStack: cloudstack.Adapt(modules), + DigitalOcean: digitalocean.Adapt(modules), + GitHub: github.Adapt(modules), + Google: google.Adapt(modules), + Kubernetes: kubernetes.Adapt(modules), + Nifcloud: nifcloud.Adapt(modules), + OpenStack: openstack.Adapt(modules), + Oracle: oracle.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/aws/accessanalyzer/accessanalyzer.go b/pkg/iac/adapters/terraform/aws/accessanalyzer/accessanalyzer.go similarity index 100% rename from internal/adapters/terraform/aws/accessanalyzer/accessanalyzer.go rename to pkg/iac/adapters/terraform/aws/accessanalyzer/accessanalyzer.go diff --git a/pkg/iac/adapters/terraform/aws/adapt.go b/pkg/iac/adapters/terraform/aws/adapt.go new file mode 100644 index 000000000000..3e6366d698f1 --- /dev/null +++ b/pkg/iac/adapters/terraform/aws/adapt.go @@ -0,0 +1,79 @@ +package aws + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/apigateway" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/athena" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/cloudfront" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/cloudtrail" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/cloudwatch" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/codebuild" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/config" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/documentdb" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/dynamodb" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/ec2" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/ecr" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/ecs" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/efs" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/eks" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/elasticsearch" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/elb" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/emr" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/iam" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/kinesis" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/kms" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/lambda" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/mq" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/msk" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/neptune" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/provider" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/rds" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/redshift" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/s3" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/sns" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/sqs" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/ssm" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/workspaces" +) + +func Adapt(modules terraform.Modules) aws.AWS { + return aws.AWS{ + Meta: aws.Meta{ + TFProviders: provider.Adapt(modules), + }, + APIGateway: apigateway.Adapt(modules), + Athena: athena.Adapt(modules), + Cloudfront: cloudfront.Adapt(modules), + CloudTrail: cloudtrail.Adapt(modules), + CloudWatch: cloudwatch.Adapt(modules), + CodeBuild: codebuild.Adapt(modules), + Config: config.Adapt(modules), + DocumentDB: documentdb.Adapt(modules), + DynamoDB: dynamodb.Adapt(modules), + EC2: ec2.Adapt(modules), + ECR: ecr.Adapt(modules), + ECS: ecs.Adapt(modules), + EFS: efs.Adapt(modules), + EKS: eks.Adapt(modules), + ElastiCache: elasticache.Adapt(modules), + Elasticsearch: elasticsearch.Adapt(modules), + ELB: elb.Adapt(modules), + EMR: emr.Adapt(modules), + IAM: iam.Adapt(modules), + Kinesis: kinesis.Adapt(modules), + KMS: kms.Adapt(modules), + Lambda: lambda.Adapt(modules), + MQ: mq.Adapt(modules), + MSK: msk.Adapt(modules), + Neptune: neptune.Adapt(modules), + RDS: rds.Adapt(modules), + Redshift: redshift.Adapt(modules), + S3: s3.Adapt(modules), + SNS: sns.Adapt(modules), + SQS: sqs.Adapt(modules), + SSM: ssm.Adapt(modules), + WorkSpaces: workspaces.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/aws/apigateway/adapt.go b/pkg/iac/adapters/terraform/aws/apigateway/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/apigateway/adapt.go rename to pkg/iac/adapters/terraform/aws/apigateway/adapt.go diff --git a/internal/adapters/terraform/aws/apigateway/adapt_test.go b/pkg/iac/adapters/terraform/aws/apigateway/adapt_test.go similarity index 99% rename from internal/adapters/terraform/aws/apigateway/adapt_test.go rename to pkg/iac/adapters/terraform/aws/apigateway/adapt_test.go index 1b116d0df8e5..b59ead0cabe5 100644 --- a/internal/adapters/terraform/aws/apigateway/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/apigateway/adapt_test.go @@ -7,7 +7,7 @@ import ( v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/apigateway/apiv1.go b/pkg/iac/adapters/terraform/aws/apigateway/apiv1.go similarity index 100% rename from internal/adapters/terraform/aws/apigateway/apiv1.go rename to pkg/iac/adapters/terraform/aws/apigateway/apiv1.go diff --git a/internal/adapters/terraform/aws/apigateway/apiv1_test.go b/pkg/iac/adapters/terraform/aws/apigateway/apiv1_test.go similarity index 97% rename from internal/adapters/terraform/aws/apigateway/apiv1_test.go rename to pkg/iac/adapters/terraform/aws/apigateway/apiv1_test.go index 1d1ae52dbd85..63dceafa0fb8 100644 --- a/internal/adapters/terraform/aws/apigateway/apiv1_test.go +++ b/pkg/iac/adapters/terraform/aws/apigateway/apiv1_test.go @@ -4,7 +4,7 @@ import ( "testing" v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/apigateway/apiv2.go b/pkg/iac/adapters/terraform/aws/apigateway/apiv2.go similarity index 100% rename from internal/adapters/terraform/aws/apigateway/apiv2.go rename to pkg/iac/adapters/terraform/aws/apigateway/apiv2.go diff --git a/internal/adapters/terraform/aws/apigateway/apiv2_test.go b/pkg/iac/adapters/terraform/aws/apigateway/apiv2_test.go similarity index 96% rename from internal/adapters/terraform/aws/apigateway/apiv2_test.go rename to pkg/iac/adapters/terraform/aws/apigateway/apiv2_test.go index f5c728996746..db6db62c7b00 100644 --- a/internal/adapters/terraform/aws/apigateway/apiv2_test.go +++ b/pkg/iac/adapters/terraform/aws/apigateway/apiv2_test.go @@ -4,7 +4,7 @@ import ( "testing" v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/apigateway/namesv1.go b/pkg/iac/adapters/terraform/aws/apigateway/namesv1.go similarity index 100% rename from internal/adapters/terraform/aws/apigateway/namesv1.go rename to pkg/iac/adapters/terraform/aws/apigateway/namesv1.go diff --git a/internal/adapters/terraform/aws/apigateway/namesv1_test.go b/pkg/iac/adapters/terraform/aws/apigateway/namesv1_test.go similarity index 93% rename from internal/adapters/terraform/aws/apigateway/namesv1_test.go rename to pkg/iac/adapters/terraform/aws/apigateway/namesv1_test.go index 8232d1271aea..54f430bc0fa6 100644 --- a/internal/adapters/terraform/aws/apigateway/namesv1_test.go +++ b/pkg/iac/adapters/terraform/aws/apigateway/namesv1_test.go @@ -4,7 +4,7 @@ import ( "testing" v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/apigateway/namesv2.go b/pkg/iac/adapters/terraform/aws/apigateway/namesv2.go similarity index 100% rename from internal/adapters/terraform/aws/apigateway/namesv2.go rename to pkg/iac/adapters/terraform/aws/apigateway/namesv2.go diff --git a/internal/adapters/terraform/aws/apigateway/namesv2_test.go b/pkg/iac/adapters/terraform/aws/apigateway/namesv2_test.go similarity index 94% rename from internal/adapters/terraform/aws/apigateway/namesv2_test.go rename to pkg/iac/adapters/terraform/aws/apigateway/namesv2_test.go index c2c1c8da4c78..1d672204b804 100644 --- a/internal/adapters/terraform/aws/apigateway/namesv2_test.go +++ b/pkg/iac/adapters/terraform/aws/apigateway/namesv2_test.go @@ -4,7 +4,7 @@ import ( "testing" v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/athena/adapt.go b/pkg/iac/adapters/terraform/aws/athena/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/athena/adapt.go rename to pkg/iac/adapters/terraform/aws/athena/adapt.go diff --git a/internal/adapters/terraform/aws/athena/adapt_test.go b/pkg/iac/adapters/terraform/aws/athena/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/athena/adapt_test.go rename to pkg/iac/adapters/terraform/aws/athena/adapt_test.go index c4daaf9c9284..3e5d84daa830 100644 --- a/internal/adapters/terraform/aws/athena/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/athena/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/athena" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/cloudfront/adapt.go b/pkg/iac/adapters/terraform/aws/cloudfront/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/cloudfront/adapt.go rename to pkg/iac/adapters/terraform/aws/cloudfront/adapt.go diff --git a/internal/adapters/terraform/aws/cloudfront/adapt_test.go b/pkg/iac/adapters/terraform/aws/cloudfront/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/cloudfront/adapt_test.go rename to pkg/iac/adapters/terraform/aws/cloudfront/adapt_test.go index 9131bd1a36d0..6235b0e33782 100644 --- a/internal/adapters/terraform/aws/cloudfront/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/cloudfront/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/cloudtrail/adapt.go b/pkg/iac/adapters/terraform/aws/cloudtrail/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/cloudtrail/adapt.go rename to pkg/iac/adapters/terraform/aws/cloudtrail/adapt.go diff --git a/internal/adapters/terraform/aws/cloudtrail/adapt_test.go b/pkg/iac/adapters/terraform/aws/cloudtrail/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/cloudtrail/adapt_test.go rename to pkg/iac/adapters/terraform/aws/cloudtrail/adapt_test.go index c669d96f0010..363b63aeba37 100644 --- a/internal/adapters/terraform/aws/cloudtrail/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/cloudtrail/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/cloudwatch/adapt.go b/pkg/iac/adapters/terraform/aws/cloudwatch/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/cloudwatch/adapt.go rename to pkg/iac/adapters/terraform/aws/cloudwatch/adapt.go diff --git a/internal/adapters/terraform/aws/cloudwatch/adapt_test.go b/pkg/iac/adapters/terraform/aws/cloudwatch/adapt_test.go similarity index 97% rename from internal/adapters/terraform/aws/cloudwatch/adapt_test.go rename to pkg/iac/adapters/terraform/aws/cloudwatch/adapt_test.go index 1486cfeb7d08..442aa24e2220 100644 --- a/internal/adapters/terraform/aws/cloudwatch/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/cloudwatch/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/codebuild/adapt.go b/pkg/iac/adapters/terraform/aws/codebuild/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/codebuild/adapt.go rename to pkg/iac/adapters/terraform/aws/codebuild/adapt.go diff --git a/internal/adapters/terraform/aws/codebuild/adapt_test.go b/pkg/iac/adapters/terraform/aws/codebuild/adapt_test.go similarity index 97% rename from internal/adapters/terraform/aws/codebuild/adapt_test.go rename to pkg/iac/adapters/terraform/aws/codebuild/adapt_test.go index 0488a1441831..48b27fcc04ae 100644 --- a/internal/adapters/terraform/aws/codebuild/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/codebuild/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/config/adapt.go b/pkg/iac/adapters/terraform/aws/config/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/config/adapt.go rename to pkg/iac/adapters/terraform/aws/config/adapt.go diff --git a/internal/adapters/terraform/aws/config/adapt_test.go b/pkg/iac/adapters/terraform/aws/config/adapt_test.go similarity index 96% rename from internal/adapters/terraform/aws/config/adapt_test.go rename to pkg/iac/adapters/terraform/aws/config/adapt_test.go index 5fd929b70689..273d46a39ca8 100644 --- a/internal/adapters/terraform/aws/config/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/config/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/config" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" ) diff --git a/internal/adapters/terraform/aws/documentdb/adapt.go b/pkg/iac/adapters/terraform/aws/documentdb/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/documentdb/adapt.go rename to pkg/iac/adapters/terraform/aws/documentdb/adapt.go diff --git a/internal/adapters/terraform/aws/documentdb/adapt_test.go b/pkg/iac/adapters/terraform/aws/documentdb/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/documentdb/adapt_test.go rename to pkg/iac/adapters/terraform/aws/documentdb/adapt_test.go index 470d2992c189..06f12d5c0e73 100644 --- a/internal/adapters/terraform/aws/documentdb/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/documentdb/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/dynamodb/adapt.go b/pkg/iac/adapters/terraform/aws/dynamodb/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/dynamodb/adapt.go rename to pkg/iac/adapters/terraform/aws/dynamodb/adapt.go diff --git a/internal/adapters/terraform/aws/dynamodb/adapt_test.go b/pkg/iac/adapters/terraform/aws/dynamodb/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/dynamodb/adapt_test.go rename to pkg/iac/adapters/terraform/aws/dynamodb/adapt_test.go index 5c233021083d..223f5449e4ad 100644 --- a/internal/adapters/terraform/aws/dynamodb/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/dynamodb/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/ec2/adapt.go b/pkg/iac/adapters/terraform/aws/ec2/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/ec2/adapt.go rename to pkg/iac/adapters/terraform/aws/ec2/adapt.go diff --git a/internal/adapters/terraform/aws/ec2/adapt_test.go b/pkg/iac/adapters/terraform/aws/ec2/adapt_test.go similarity index 99% rename from internal/adapters/terraform/aws/ec2/adapt_test.go rename to pkg/iac/adapters/terraform/aws/ec2/adapt_test.go index 12fc3f4c2b99..8238ec902d3d 100644 --- a/internal/adapters/terraform/aws/ec2/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/ec2/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/ec2/autoscaling.go b/pkg/iac/adapters/terraform/aws/ec2/autoscaling.go similarity index 100% rename from internal/adapters/terraform/aws/ec2/autoscaling.go rename to pkg/iac/adapters/terraform/aws/ec2/autoscaling.go diff --git a/internal/adapters/terraform/aws/ec2/autoscaling_test.go b/pkg/iac/adapters/terraform/aws/ec2/autoscaling_test.go similarity index 98% rename from internal/adapters/terraform/aws/ec2/autoscaling_test.go rename to pkg/iac/adapters/terraform/aws/ec2/autoscaling_test.go index 893fec8e63cd..2373c64e7068 100644 --- a/internal/adapters/terraform/aws/ec2/autoscaling_test.go +++ b/pkg/iac/adapters/terraform/aws/ec2/autoscaling_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/ec2/subnet.go b/pkg/iac/adapters/terraform/aws/ec2/subnet.go similarity index 100% rename from internal/adapters/terraform/aws/ec2/subnet.go rename to pkg/iac/adapters/terraform/aws/ec2/subnet.go diff --git a/internal/adapters/terraform/aws/ec2/subnet_test.go b/pkg/iac/adapters/terraform/aws/ec2/subnet_test.go similarity index 96% rename from internal/adapters/terraform/aws/ec2/subnet_test.go rename to pkg/iac/adapters/terraform/aws/ec2/subnet_test.go index a18b4d768edb..91572ab9d3a9 100644 --- a/internal/adapters/terraform/aws/ec2/subnet_test.go +++ b/pkg/iac/adapters/terraform/aws/ec2/subnet_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/ec2/volume.go b/pkg/iac/adapters/terraform/aws/ec2/volume.go similarity index 100% rename from internal/adapters/terraform/aws/ec2/volume.go rename to pkg/iac/adapters/terraform/aws/ec2/volume.go diff --git a/internal/adapters/terraform/aws/ec2/volume_test.go b/pkg/iac/adapters/terraform/aws/ec2/volume_test.go similarity index 97% rename from internal/adapters/terraform/aws/ec2/volume_test.go rename to pkg/iac/adapters/terraform/aws/ec2/volume_test.go index c09d1ca2ed9b..c15e76b7f75b 100644 --- a/internal/adapters/terraform/aws/ec2/volume_test.go +++ b/pkg/iac/adapters/terraform/aws/ec2/volume_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/ec2/vpc.go b/pkg/iac/adapters/terraform/aws/ec2/vpc.go similarity index 100% rename from internal/adapters/terraform/aws/ec2/vpc.go rename to pkg/iac/adapters/terraform/aws/ec2/vpc.go diff --git a/internal/adapters/terraform/aws/ec2/vpc_test.go b/pkg/iac/adapters/terraform/aws/ec2/vpc_test.go similarity index 99% rename from internal/adapters/terraform/aws/ec2/vpc_test.go rename to pkg/iac/adapters/terraform/aws/ec2/vpc_test.go index cdde2f6c6fdc..1b9e062fe885 100644 --- a/internal/adapters/terraform/aws/ec2/vpc_test.go +++ b/pkg/iac/adapters/terraform/aws/ec2/vpc_test.go @@ -4,13 +4,13 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/ecr/adapt.go b/pkg/iac/adapters/terraform/aws/ecr/adapt.go similarity index 98% rename from internal/adapters/terraform/aws/ecr/adapt.go rename to pkg/iac/adapters/terraform/aws/ecr/adapt.go index 0aca6c6da7cb..14f82f96046c 100644 --- a/internal/adapters/terraform/aws/ecr/adapt.go +++ b/pkg/iac/adapters/terraform/aws/ecr/adapt.go @@ -1,13 +1,13 @@ package ecr import ( + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/iam" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" iamp "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" ) func Adapt(modules terraform.Modules) ecr.ECR { diff --git a/internal/adapters/terraform/aws/ecr/adapt_test.go b/pkg/iac/adapters/terraform/aws/ecr/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/ecr/adapt_test.go rename to pkg/iac/adapters/terraform/aws/ecr/adapt_test.go index d6110cb1960a..e83547c680bd 100644 --- a/internal/adapters/terraform/aws/ecr/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/ecr/adapt_test.go @@ -4,12 +4,11 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/liamg/iamgo" "github.com/stretchr/testify/assert" diff --git a/internal/adapters/terraform/aws/ecs/adapt.go b/pkg/iac/adapters/terraform/aws/ecs/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/ecs/adapt.go rename to pkg/iac/adapters/terraform/aws/ecs/adapt.go diff --git a/internal/adapters/terraform/aws/ecs/adapt_test.go b/pkg/iac/adapters/terraform/aws/ecs/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/ecs/adapt_test.go rename to pkg/iac/adapters/terraform/aws/ecs/adapt_test.go index 293422d0cd88..a4a558966d2c 100644 --- a/internal/adapters/terraform/aws/ecs/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/ecs/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/efs/adapt.go b/pkg/iac/adapters/terraform/aws/efs/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/efs/adapt.go rename to pkg/iac/adapters/terraform/aws/efs/adapt.go diff --git a/internal/adapters/terraform/aws/efs/adapt_test.go b/pkg/iac/adapters/terraform/aws/efs/adapt_test.go similarity index 96% rename from internal/adapters/terraform/aws/efs/adapt_test.go rename to pkg/iac/adapters/terraform/aws/efs/adapt_test.go index cca5358ff01a..37debad07577 100644 --- a/internal/adapters/terraform/aws/efs/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/efs/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/efs" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/eks/adapt.go b/pkg/iac/adapters/terraform/aws/eks/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/eks/adapt.go rename to pkg/iac/adapters/terraform/aws/eks/adapt.go diff --git a/internal/adapters/terraform/aws/eks/adapt_test.go b/pkg/iac/adapters/terraform/aws/eks/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/eks/adapt_test.go rename to pkg/iac/adapters/terraform/aws/eks/adapt_test.go index f303f2dfa3b1..f0b70144646a 100644 --- a/internal/adapters/terraform/aws/eks/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/eks/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/eks" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/elasticache/adapt.go b/pkg/iac/adapters/terraform/aws/elasticache/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/elasticache/adapt.go rename to pkg/iac/adapters/terraform/aws/elasticache/adapt.go diff --git a/internal/adapters/terraform/aws/elasticache/adapt_test.go b/pkg/iac/adapters/terraform/aws/elasticache/adapt_test.go similarity index 99% rename from internal/adapters/terraform/aws/elasticache/adapt_test.go rename to pkg/iac/adapters/terraform/aws/elasticache/adapt_test.go index 436142734b19..1045e1e81e74 100644 --- a/internal/adapters/terraform/aws/elasticache/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/elasticache/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/elasticsearch/adapt.go b/pkg/iac/adapters/terraform/aws/elasticsearch/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/elasticsearch/adapt.go rename to pkg/iac/adapters/terraform/aws/elasticsearch/adapt.go diff --git a/internal/adapters/terraform/aws/elasticsearch/adapt_test.go b/pkg/iac/adapters/terraform/aws/elasticsearch/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/elasticsearch/adapt_test.go rename to pkg/iac/adapters/terraform/aws/elasticsearch/adapt_test.go index bba77842d69a..6f82b06a4719 100644 --- a/internal/adapters/terraform/aws/elasticsearch/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/elasticsearch/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/elasticsearch" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/elb/adapt.go b/pkg/iac/adapters/terraform/aws/elb/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/elb/adapt.go rename to pkg/iac/adapters/terraform/aws/elb/adapt.go diff --git a/internal/adapters/terraform/aws/elb/adapt_test.go b/pkg/iac/adapters/terraform/aws/elb/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/elb/adapt_test.go rename to pkg/iac/adapters/terraform/aws/elb/adapt_test.go index c0f4dae37115..a1d06b02f716 100644 --- a/internal/adapters/terraform/aws/elb/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/elb/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/elb" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/emr/adapt.go b/pkg/iac/adapters/terraform/aws/emr/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/emr/adapt.go rename to pkg/iac/adapters/terraform/aws/emr/adapt.go diff --git a/internal/adapters/terraform/aws/emr/adapt_test.go b/pkg/iac/adapters/terraform/aws/emr/adapt_test.go similarity index 97% rename from internal/adapters/terraform/aws/emr/adapt_test.go rename to pkg/iac/adapters/terraform/aws/emr/adapt_test.go index b38dcc5811fe..259e2257cf73 100644 --- a/internal/adapters/terraform/aws/emr/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/emr/adapt_test.go @@ -4,12 +4,12 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/emr" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/iam/adapt.go b/pkg/iac/adapters/terraform/aws/iam/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/iam/adapt.go rename to pkg/iac/adapters/terraform/aws/iam/adapt.go diff --git a/internal/adapters/terraform/aws/iam/adapt_test.go b/pkg/iac/adapters/terraform/aws/iam/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/iam/adapt_test.go rename to pkg/iac/adapters/terraform/aws/iam/adapt_test.go index 55ba7a725900..66387056736c 100644 --- a/internal/adapters/terraform/aws/iam/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/iam/adapt_test.go @@ -3,7 +3,7 @@ package iam import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/aws/iam/convert.go b/pkg/iac/adapters/terraform/aws/iam/convert.go similarity index 100% rename from internal/adapters/terraform/aws/iam/convert.go rename to pkg/iac/adapters/terraform/aws/iam/convert.go diff --git a/internal/adapters/terraform/aws/iam/groups.go b/pkg/iac/adapters/terraform/aws/iam/groups.go similarity index 100% rename from internal/adapters/terraform/aws/iam/groups.go rename to pkg/iac/adapters/terraform/aws/iam/groups.go diff --git a/internal/adapters/terraform/aws/iam/groups_test.go b/pkg/iac/adapters/terraform/aws/iam/groups_test.go similarity index 97% rename from internal/adapters/terraform/aws/iam/groups_test.go rename to pkg/iac/adapters/terraform/aws/iam/groups_test.go index 5e06c6974312..ba5b3d34781c 100644 --- a/internal/adapters/terraform/aws/iam/groups_test.go +++ b/pkg/iac/adapters/terraform/aws/iam/groups_test.go @@ -4,10 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/iam/passwords.go b/pkg/iac/adapters/terraform/aws/iam/passwords.go similarity index 100% rename from internal/adapters/terraform/aws/iam/passwords.go rename to pkg/iac/adapters/terraform/aws/iam/passwords.go diff --git a/internal/adapters/terraform/aws/iam/passwords_test.go b/pkg/iac/adapters/terraform/aws/iam/passwords_test.go similarity index 95% rename from internal/adapters/terraform/aws/iam/passwords_test.go rename to pkg/iac/adapters/terraform/aws/iam/passwords_test.go index b6d920ba6ab3..56d4d25e12ce 100644 --- a/internal/adapters/terraform/aws/iam/passwords_test.go +++ b/pkg/iac/adapters/terraform/aws/iam/passwords_test.go @@ -4,10 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/iam/policies.go b/pkg/iac/adapters/terraform/aws/iam/policies.go similarity index 100% rename from internal/adapters/terraform/aws/iam/policies.go rename to pkg/iac/adapters/terraform/aws/iam/policies.go diff --git a/internal/adapters/terraform/aws/iam/policies_test.go b/pkg/iac/adapters/terraform/aws/iam/policies_test.go similarity index 98% rename from internal/adapters/terraform/aws/iam/policies_test.go rename to pkg/iac/adapters/terraform/aws/iam/policies_test.go index 9d7706185b93..263e0e9f7947 100644 --- a/internal/adapters/terraform/aws/iam/policies_test.go +++ b/pkg/iac/adapters/terraform/aws/iam/policies_test.go @@ -4,11 +4,11 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/liamg/iamgo" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/iam/roles.go b/pkg/iac/adapters/terraform/aws/iam/roles.go similarity index 100% rename from internal/adapters/terraform/aws/iam/roles.go rename to pkg/iac/adapters/terraform/aws/iam/roles.go diff --git a/internal/adapters/terraform/aws/iam/roles_test.go b/pkg/iac/adapters/terraform/aws/iam/roles_test.go similarity index 98% rename from internal/adapters/terraform/aws/iam/roles_test.go rename to pkg/iac/adapters/terraform/aws/iam/roles_test.go index b1d6354017ff..c7347913166d 100644 --- a/internal/adapters/terraform/aws/iam/roles_test.go +++ b/pkg/iac/adapters/terraform/aws/iam/roles_test.go @@ -6,8 +6,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/iam/users.go b/pkg/iac/adapters/terraform/aws/iam/users.go similarity index 100% rename from internal/adapters/terraform/aws/iam/users.go rename to pkg/iac/adapters/terraform/aws/iam/users.go diff --git a/internal/adapters/terraform/aws/iam/users_test.go b/pkg/iac/adapters/terraform/aws/iam/users_test.go similarity index 98% rename from internal/adapters/terraform/aws/iam/users_test.go rename to pkg/iac/adapters/terraform/aws/iam/users_test.go index ef4e3606c041..563a9772ce71 100644 --- a/internal/adapters/terraform/aws/iam/users_test.go +++ b/pkg/iac/adapters/terraform/aws/iam/users_test.go @@ -5,8 +5,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/kinesis/adapt.go b/pkg/iac/adapters/terraform/aws/kinesis/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/kinesis/adapt.go rename to pkg/iac/adapters/terraform/aws/kinesis/adapt.go diff --git a/internal/adapters/terraform/aws/kinesis/adapt_test.go b/pkg/iac/adapters/terraform/aws/kinesis/adapt_test.go similarity index 96% rename from internal/adapters/terraform/aws/kinesis/adapt_test.go rename to pkg/iac/adapters/terraform/aws/kinesis/adapt_test.go index ff90ad5cfefa..97b0d14972d4 100644 --- a/internal/adapters/terraform/aws/kinesis/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/kinesis/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/kinesis" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/kms/adapt.go b/pkg/iac/adapters/terraform/aws/kms/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/kms/adapt.go rename to pkg/iac/adapters/terraform/aws/kms/adapt.go diff --git a/internal/adapters/terraform/aws/kms/adapt_test.go b/pkg/iac/adapters/terraform/aws/kms/adapt_test.go similarity index 96% rename from internal/adapters/terraform/aws/kms/adapt_test.go rename to pkg/iac/adapters/terraform/aws/kms/adapt_test.go index fc203f373042..fecee0cc2c0e 100644 --- a/internal/adapters/terraform/aws/kms/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/kms/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/kms" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/lambda/adapt.go b/pkg/iac/adapters/terraform/aws/lambda/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/lambda/adapt.go rename to pkg/iac/adapters/terraform/aws/lambda/adapt.go diff --git a/internal/adapters/terraform/aws/lambda/adapt_test.go b/pkg/iac/adapters/terraform/aws/lambda/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/lambda/adapt_test.go rename to pkg/iac/adapters/terraform/aws/lambda/adapt_test.go index 64c884c02e8c..dd0f8e97f32c 100644 --- a/internal/adapters/terraform/aws/lambda/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/lambda/adapt_test.go @@ -4,12 +4,12 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/lambda" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/mq/adapt.go b/pkg/iac/adapters/terraform/aws/mq/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/mq/adapt.go rename to pkg/iac/adapters/terraform/aws/mq/adapt.go diff --git a/internal/adapters/terraform/aws/mq/adapt_test.go b/pkg/iac/adapters/terraform/aws/mq/adapt_test.go similarity index 97% rename from internal/adapters/terraform/aws/mq/adapt_test.go rename to pkg/iac/adapters/terraform/aws/mq/adapt_test.go index a7e110c3fc82..6dc317ada21f 100644 --- a/internal/adapters/terraform/aws/mq/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/mq/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/mq" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/msk/adapt.go b/pkg/iac/adapters/terraform/aws/msk/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/msk/adapt.go rename to pkg/iac/adapters/terraform/aws/msk/adapt.go diff --git a/internal/adapters/terraform/aws/msk/adapt_test.go b/pkg/iac/adapters/terraform/aws/msk/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/msk/adapt_test.go rename to pkg/iac/adapters/terraform/aws/msk/adapt_test.go index de3752b73991..03b8fdf4f4b1 100644 --- a/internal/adapters/terraform/aws/msk/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/msk/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/msk" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/neptune/adapt.go b/pkg/iac/adapters/terraform/aws/neptune/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/neptune/adapt.go rename to pkg/iac/adapters/terraform/aws/neptune/adapt.go diff --git a/internal/adapters/terraform/aws/neptune/adapt_test.go b/pkg/iac/adapters/terraform/aws/neptune/adapt_test.go similarity index 97% rename from internal/adapters/terraform/aws/neptune/adapt_test.go rename to pkg/iac/adapters/terraform/aws/neptune/adapt_test.go index 148b29f3b3e4..7619b80efceb 100644 --- a/internal/adapters/terraform/aws/neptune/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/neptune/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/provider/adapt.go b/pkg/iac/adapters/terraform/aws/provider/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/provider/adapt.go rename to pkg/iac/adapters/terraform/aws/provider/adapt.go diff --git a/internal/adapters/terraform/aws/provider/adapt_test.go b/pkg/iac/adapters/terraform/aws/provider/adapt_test.go similarity index 98% rename from internal/adapters/terraform/aws/provider/adapt_test.go rename to pkg/iac/adapters/terraform/aws/provider/adapt_test.go index acdd08ded4f2..261a48779fd6 100644 --- a/internal/adapters/terraform/aws/provider/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/provider/adapt_test.go @@ -5,8 +5,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws" "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/rds/adapt.go b/pkg/iac/adapters/terraform/aws/rds/adapt.go similarity index 86% rename from internal/adapters/terraform/aws/rds/adapt.go rename to pkg/iac/adapters/terraform/aws/rds/adapt.go index a03b3d124058..517c7635c77d 100644 --- a/internal/adapters/terraform/aws/rds/adapt.go +++ b/pkg/iac/adapters/terraform/aws/rds/adapt.go @@ -146,21 +146,21 @@ func adaptInstance(resource *terraform.Block, modules terraform.Modules) rds.Ins } } return rds.Instance{ - Metadata: resource.GetMetadata(), - BackupRetentionPeriodDays: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(0, resource), - ReplicationSourceARN: defsecTypes.StringExplicit(replicaSourceValue, resource.GetMetadata()), - PerformanceInsights: adaptPerformanceInsights(resource), - Encryption: adaptEncryption(resource), - PublicAccess: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(false, resource), - Engine: resource.GetAttribute("engine").AsStringValueOrDefault(rds.EngineAurora, resource), - IAMAuthEnabled: resource.GetAttribute("iam_database_authentication_enabled").AsBoolValueOrDefault(false, resource), - DeletionProtection: resource.GetAttribute("deletion_protection").AsBoolValueOrDefault(false, resource), - DBInstanceArn: resource.GetAttribute("arn").AsStringValueOrDefault("", resource), - StorageEncrypted: resource.GetAttribute("storage_encrypted").AsBoolValueOrDefault(true, resource), - DBInstanceIdentifier: resource.GetAttribute("identifier").AsStringValueOrDefault("", resource), - EngineVersion: resource.GetAttribute("engine_version").AsStringValueOrDefault("", resource), - AutoMinorVersionUpgrade: resource.GetAttribute("auto_minor_version_upgrade").AsBoolValueOrDefault(false, resource), - MultiAZ: resource.GetAttribute("multi_az").AsBoolValueOrDefault(false, resource), + Metadata: resource.GetMetadata(), + BackupRetentionPeriodDays: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(0, resource), + ReplicationSourceARN: defsecTypes.StringExplicit(replicaSourceValue, resource.GetMetadata()), + PerformanceInsights: adaptPerformanceInsights(resource), + Encryption: adaptEncryption(resource), + PublicAccess: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(false, resource), + Engine: resource.GetAttribute("engine").AsStringValueOrDefault(rds.EngineAurora, resource), + IAMAuthEnabled: resource.GetAttribute("iam_database_authentication_enabled").AsBoolValueOrDefault(false, resource), + DeletionProtection: resource.GetAttribute("deletion_protection").AsBoolValueOrDefault(false, resource), + DBInstanceArn: resource.GetAttribute("arn").AsStringValueOrDefault("", resource), + StorageEncrypted: resource.GetAttribute("storage_encrypted").AsBoolValueOrDefault(true, resource), + DBInstanceIdentifier: resource.GetAttribute("identifier").AsStringValueOrDefault("", resource), + EngineVersion: resource.GetAttribute("engine_version").AsStringValueOrDefault("", resource), + AutoMinorVersionUpgrade: resource.GetAttribute("auto_minor_version_upgrade").AsBoolValueOrDefault(false, resource), + MultiAZ: resource.GetAttribute("multi_az").AsBoolValueOrDefault(false, resource), PubliclyAccessible: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(false, resource), LatestRestorableTime: defsecTypes.TimeUnresolvable(resource.GetMetadata()), ReadReplicaDBInstanceIdentifiers: ReadReplicaDBInstanceIdentifiers, diff --git a/internal/adapters/terraform/aws/rds/adapt_test.go b/pkg/iac/adapters/terraform/aws/rds/adapt_test.go similarity index 99% rename from internal/adapters/terraform/aws/rds/adapt_test.go rename to pkg/iac/adapters/terraform/aws/rds/adapt_test.go index c6e750b5277d..95e4b1960731 100644 --- a/internal/adapters/terraform/aws/rds/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/rds/adapt_test.go @@ -4,9 +4,9 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/rds" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/redshift/adapt.go b/pkg/iac/adapters/terraform/aws/redshift/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/redshift/adapt.go rename to pkg/iac/adapters/terraform/aws/redshift/adapt.go diff --git a/internal/adapters/terraform/aws/redshift/adapt_test.go b/pkg/iac/adapters/terraform/aws/redshift/adapt_test.go similarity index 99% rename from internal/adapters/terraform/aws/redshift/adapt_test.go rename to pkg/iac/adapters/terraform/aws/redshift/adapt_test.go index 3a852f5cb9a4..dbd2cfdce8bd 100644 --- a/internal/adapters/terraform/aws/redshift/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/redshift/adapt_test.go @@ -5,12 +5,12 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/aws/s3/adapt.go b/pkg/iac/adapters/terraform/aws/s3/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/s3/adapt.go rename to pkg/iac/adapters/terraform/aws/s3/adapt.go diff --git a/internal/adapters/terraform/aws/s3/adapt_test.go b/pkg/iac/adapters/terraform/aws/s3/adapt_test.go similarity index 99% rename from internal/adapters/terraform/aws/s3/adapt_test.go rename to pkg/iac/adapters/terraform/aws/s3/adapt_test.go index 35d6a4e5aaca..1da59aa73b4c 100644 --- a/internal/adapters/terraform/aws/s3/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/s3/adapt_test.go @@ -4,10 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/s3" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" "github.com/liamg/iamgo" "github.com/stretchr/testify/assert" diff --git a/internal/adapters/terraform/aws/s3/bucket.go b/pkg/iac/adapters/terraform/aws/s3/bucket.go similarity index 100% rename from internal/adapters/terraform/aws/s3/bucket.go rename to pkg/iac/adapters/terraform/aws/s3/bucket.go diff --git a/internal/adapters/terraform/aws/s3/bucket_test.go b/pkg/iac/adapters/terraform/aws/s3/bucket_test.go similarity index 98% rename from internal/adapters/terraform/aws/s3/bucket_test.go rename to pkg/iac/adapters/terraform/aws/s3/bucket_test.go index 4fcdc2e50ef9..069d0b39c86d 100644 --- a/internal/adapters/terraform/aws/s3/bucket_test.go +++ b/pkg/iac/adapters/terraform/aws/s3/bucket_test.go @@ -3,10 +3,9 @@ package s3 import ( "testing" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/stretchr/testify/assert" ) diff --git a/internal/adapters/terraform/aws/s3/policies.go b/pkg/iac/adapters/terraform/aws/s3/policies.go similarity index 94% rename from internal/adapters/terraform/aws/s3/policies.go rename to pkg/iac/adapters/terraform/aws/s3/policies.go index dc3f39294b27..0f4423189849 100644 --- a/internal/adapters/terraform/aws/s3/policies.go +++ b/pkg/iac/adapters/terraform/aws/s3/policies.go @@ -3,7 +3,7 @@ package s3 import ( "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - iamAdapter "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" + iamAdapter "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/iam" ) func (a *adapter) adaptBucketPolicies() { diff --git a/internal/adapters/terraform/aws/s3/public_access_block.go b/pkg/iac/adapters/terraform/aws/s3/public_access_block.go similarity index 100% rename from internal/adapters/terraform/aws/s3/public_access_block.go rename to pkg/iac/adapters/terraform/aws/s3/public_access_block.go diff --git a/internal/adapters/terraform/aws/sns/adapt.go b/pkg/iac/adapters/terraform/aws/sns/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/sns/adapt.go rename to pkg/iac/adapters/terraform/aws/sns/adapt.go diff --git a/internal/adapters/terraform/aws/sns/adapt_test.go b/pkg/iac/adapters/terraform/aws/sns/adapt_test.go similarity index 96% rename from internal/adapters/terraform/aws/sns/adapt_test.go rename to pkg/iac/adapters/terraform/aws/sns/adapt_test.go index 1213829a1d11..d080defa5fe5 100644 --- a/internal/adapters/terraform/aws/sns/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/sns/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/sns" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/sqs/adapt.go b/pkg/iac/adapters/terraform/aws/sqs/adapt.go similarity index 98% rename from internal/adapters/terraform/aws/sqs/adapt.go rename to pkg/iac/adapters/terraform/aws/sqs/adapt.go index 84d28750cfaf..c90517ed8263 100644 --- a/internal/adapters/terraform/aws/sqs/adapt.go +++ b/pkg/iac/adapters/terraform/aws/sqs/adapt.go @@ -1,6 +1,7 @@ package sqs import ( + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/iam" "github.com/google/uuid" "github.com/liamg/iamgo" @@ -8,7 +9,6 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" ) func Adapt(modules terraform.Modules) sqs.SQS { diff --git a/internal/adapters/terraform/aws/sqs/adapt_test.go b/pkg/iac/adapters/terraform/aws/sqs/adapt_test.go similarity index 97% rename from internal/adapters/terraform/aws/sqs/adapt_test.go rename to pkg/iac/adapters/terraform/aws/sqs/adapt_test.go index f6a191ec23eb..c3107e8ddd54 100644 --- a/internal/adapters/terraform/aws/sqs/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/sqs/adapt_test.go @@ -4,12 +4,11 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/liamg/iamgo" "github.com/stretchr/testify/assert" diff --git a/internal/adapters/terraform/aws/ssm/adapt.go b/pkg/iac/adapters/terraform/aws/ssm/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/ssm/adapt.go rename to pkg/iac/adapters/terraform/aws/ssm/adapt.go diff --git a/internal/adapters/terraform/aws/ssm/adapt_test.go b/pkg/iac/adapters/terraform/aws/ssm/adapt_test.go similarity index 97% rename from internal/adapters/terraform/aws/ssm/adapt_test.go rename to pkg/iac/adapters/terraform/aws/ssm/adapt_test.go index 73dc82015f9b..874100590297 100644 --- a/internal/adapters/terraform/aws/ssm/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/ssm/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/ssm" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/aws/workspaces/adapt.go b/pkg/iac/adapters/terraform/aws/workspaces/adapt.go similarity index 100% rename from internal/adapters/terraform/aws/workspaces/adapt.go rename to pkg/iac/adapters/terraform/aws/workspaces/adapt.go diff --git a/internal/adapters/terraform/aws/workspaces/adapt_test.go b/pkg/iac/adapters/terraform/aws/workspaces/adapt_test.go similarity index 97% rename from internal/adapters/terraform/aws/workspaces/adapt_test.go rename to pkg/iac/adapters/terraform/aws/workspaces/adapt_test.go index 0888dd1e020c..e77faebd64c7 100644 --- a/internal/adapters/terraform/aws/workspaces/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/workspaces/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/aws/workspaces" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/iac/adapters/terraform/azure/adapt.go b/pkg/iac/adapters/terraform/azure/adapt.go new file mode 100644 index 000000000000..783092082f72 --- /dev/null +++ b/pkg/iac/adapters/terraform/azure/adapt.go @@ -0,0 +1,37 @@ +package azure + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/appservice" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/authorization" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/compute" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/container" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/database" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/datafactory" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/datalake" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/keyvault" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/monitor" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/network" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/securitycenter" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/storage" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/azure/synapse" +) + +func Adapt(modules terraform.Modules) azure.Azure { + return azure.Azure{ + AppService: appservice.Adapt(modules), + Authorization: authorization.Adapt(modules), + Compute: compute.Adapt(modules), + Container: container.Adapt(modules), + Database: database.Adapt(modules), + DataFactory: datafactory.Adapt(modules), + DataLake: datalake.Adapt(modules), + KeyVault: keyvault.Adapt(modules), + Monitor: monitor.Adapt(modules), + Network: network.Adapt(modules), + SecurityCenter: securitycenter.Adapt(modules), + Storage: storage.Adapt(modules), + Synapse: synapse.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/azure/appservice/adapt.go b/pkg/iac/adapters/terraform/azure/appservice/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/appservice/adapt.go rename to pkg/iac/adapters/terraform/azure/appservice/adapt.go diff --git a/internal/adapters/terraform/azure/appservice/adapt_test.go b/pkg/iac/adapters/terraform/azure/appservice/adapt_test.go similarity index 98% rename from internal/adapters/terraform/azure/appservice/adapt_test.go rename to pkg/iac/adapters/terraform/azure/appservice/adapt_test.go index a0432279774b..1894222c0207 100644 --- a/internal/adapters/terraform/azure/appservice/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/appservice/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/appservice" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/azure/authorization/adapt.go b/pkg/iac/adapters/terraform/azure/authorization/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/authorization/adapt.go rename to pkg/iac/adapters/terraform/azure/authorization/adapt.go diff --git a/internal/adapters/terraform/azure/authorization/adapt_test.go b/pkg/iac/adapters/terraform/azure/authorization/adapt_test.go similarity index 97% rename from internal/adapters/terraform/azure/authorization/adapt_test.go rename to pkg/iac/adapters/terraform/azure/authorization/adapt_test.go index 74c8602edebc..fdfbc6725641 100644 --- a/internal/adapters/terraform/azure/authorization/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/authorization/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/authorization" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/azure/compute/adapt.go b/pkg/iac/adapters/terraform/azure/compute/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/compute/adapt.go rename to pkg/iac/adapters/terraform/azure/compute/adapt.go diff --git a/internal/adapters/terraform/azure/compute/adapt_test.go b/pkg/iac/adapters/terraform/azure/compute/adapt_test.go similarity index 98% rename from internal/adapters/terraform/azure/compute/adapt_test.go rename to pkg/iac/adapters/terraform/azure/compute/adapt_test.go index 3af617db9685..3b7063bc2216 100644 --- a/internal/adapters/terraform/azure/compute/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/compute/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/compute" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/azure/container/adapt.go b/pkg/iac/adapters/terraform/azure/container/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/container/adapt.go rename to pkg/iac/adapters/terraform/azure/container/adapt.go diff --git a/internal/adapters/terraform/azure/container/adapt_test.go b/pkg/iac/adapters/terraform/azure/container/adapt_test.go similarity index 99% rename from internal/adapters/terraform/azure/container/adapt_test.go rename to pkg/iac/adapters/terraform/azure/container/adapt_test.go index a43fcdf35f97..039bd9b0ef0d 100644 --- a/internal/adapters/terraform/azure/container/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/container/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/container" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/azure/database/adapt.go b/pkg/iac/adapters/terraform/azure/database/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/database/adapt.go rename to pkg/iac/adapters/terraform/azure/database/adapt.go diff --git a/internal/adapters/terraform/azure/database/adapt_test.go b/pkg/iac/adapters/terraform/azure/database/adapt_test.go similarity index 99% rename from internal/adapters/terraform/azure/database/adapt_test.go rename to pkg/iac/adapters/terraform/azure/database/adapt_test.go index 401b8603fa7d..c0fb7d3c5ddf 100644 --- a/internal/adapters/terraform/azure/database/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/database/adapt_test.go @@ -4,9 +4,9 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/database" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/azure/datafactory/adapt.go b/pkg/iac/adapters/terraform/azure/datafactory/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/datafactory/adapt.go rename to pkg/iac/adapters/terraform/azure/datafactory/adapt.go diff --git a/internal/adapters/terraform/azure/datafactory/adapt_test.go b/pkg/iac/adapters/terraform/azure/datafactory/adapt_test.go similarity index 96% rename from internal/adapters/terraform/azure/datafactory/adapt_test.go rename to pkg/iac/adapters/terraform/azure/datafactory/adapt_test.go index acd13315d904..4bc12c231b8f 100644 --- a/internal/adapters/terraform/azure/datafactory/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/datafactory/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/datafactory" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/azure/datalake/adapt.go b/pkg/iac/adapters/terraform/azure/datalake/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/datalake/adapt.go rename to pkg/iac/adapters/terraform/azure/datalake/adapt.go diff --git a/internal/adapters/terraform/azure/datalake/adapt_test.go b/pkg/iac/adapters/terraform/azure/datalake/adapt_test.go similarity index 96% rename from internal/adapters/terraform/azure/datalake/adapt_test.go rename to pkg/iac/adapters/terraform/azure/datalake/adapt_test.go index 41fd476522cf..4c465b329e88 100644 --- a/internal/adapters/terraform/azure/datalake/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/datalake/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/datalake" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/azure/keyvault/adapt.go b/pkg/iac/adapters/terraform/azure/keyvault/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/keyvault/adapt.go rename to pkg/iac/adapters/terraform/azure/keyvault/adapt.go diff --git a/internal/adapters/terraform/azure/keyvault/adapt_test.go b/pkg/iac/adapters/terraform/azure/keyvault/adapt_test.go similarity index 99% rename from internal/adapters/terraform/azure/keyvault/adapt_test.go rename to pkg/iac/adapters/terraform/azure/keyvault/adapt_test.go index b7e668712697..da78a56d3365 100644 --- a/internal/adapters/terraform/azure/keyvault/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/keyvault/adapt_test.go @@ -5,11 +5,10 @@ import ( "time" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/keyvault" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/azure/monitor/adapt.go b/pkg/iac/adapters/terraform/azure/monitor/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/monitor/adapt.go rename to pkg/iac/adapters/terraform/azure/monitor/adapt.go diff --git a/internal/adapters/terraform/azure/monitor/adapt_test.go b/pkg/iac/adapters/terraform/azure/monitor/adapt_test.go similarity index 97% rename from internal/adapters/terraform/azure/monitor/adapt_test.go rename to pkg/iac/adapters/terraform/azure/monitor/adapt_test.go index f7894c53da59..d2297515aa7e 100644 --- a/internal/adapters/terraform/azure/monitor/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/monitor/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/monitor" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/azure/network/adapt.go b/pkg/iac/adapters/terraform/azure/network/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/network/adapt.go rename to pkg/iac/adapters/terraform/azure/network/adapt.go diff --git a/internal/adapters/terraform/azure/network/adapt_test.go b/pkg/iac/adapters/terraform/azure/network/adapt_test.go similarity index 99% rename from internal/adapters/terraform/azure/network/adapt_test.go rename to pkg/iac/adapters/terraform/azure/network/adapt_test.go index 74e1f3f26aca..f345c2a7c5c1 100644 --- a/internal/adapters/terraform/azure/network/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/network/adapt_test.go @@ -4,12 +4,12 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/network" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/azure/securitycenter/adapt.go b/pkg/iac/adapters/terraform/azure/securitycenter/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/securitycenter/adapt.go rename to pkg/iac/adapters/terraform/azure/securitycenter/adapt.go diff --git a/internal/adapters/terraform/azure/securitycenter/adapt_test.go b/pkg/iac/adapters/terraform/azure/securitycenter/adapt_test.go similarity index 98% rename from internal/adapters/terraform/azure/securitycenter/adapt_test.go rename to pkg/iac/adapters/terraform/azure/securitycenter/adapt_test.go index 1454259aa3d5..09977c3e27d2 100644 --- a/internal/adapters/terraform/azure/securitycenter/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/securitycenter/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/securitycenter" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/azure/storage/adapt.go b/pkg/iac/adapters/terraform/azure/storage/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/storage/adapt.go rename to pkg/iac/adapters/terraform/azure/storage/adapt.go diff --git a/internal/adapters/terraform/azure/storage/adapt_test.go b/pkg/iac/adapters/terraform/azure/storage/adapt_test.go similarity index 99% rename from internal/adapters/terraform/azure/storage/adapt_test.go rename to pkg/iac/adapters/terraform/azure/storage/adapt_test.go index c0e3b85f3c99..a02089ea106f 100644 --- a/internal/adapters/terraform/azure/storage/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/storage/adapt_test.go @@ -4,12 +4,12 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/storage" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/azure/synapse/adapt.go b/pkg/iac/adapters/terraform/azure/synapse/adapt.go similarity index 100% rename from internal/adapters/terraform/azure/synapse/adapt.go rename to pkg/iac/adapters/terraform/azure/synapse/adapt.go diff --git a/internal/adapters/terraform/azure/synapse/adapt_test.go b/pkg/iac/adapters/terraform/azure/synapse/adapt_test.go similarity index 96% rename from internal/adapters/terraform/azure/synapse/adapt_test.go rename to pkg/iac/adapters/terraform/azure/synapse/adapt_test.go index 8c1146733c4d..26619fe99c7a 100644 --- a/internal/adapters/terraform/azure/synapse/adapt_test.go +++ b/pkg/iac/adapters/terraform/azure/synapse/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/azure/synapse" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/cloudstack/adapt.go b/pkg/iac/adapters/terraform/cloudstack/adapt.go similarity index 77% rename from internal/adapters/terraform/cloudstack/adapt.go rename to pkg/iac/adapters/terraform/cloudstack/adapt.go index 6be5887cf6b5..095931d405d4 100644 --- a/internal/adapters/terraform/cloudstack/adapt.go +++ b/pkg/iac/adapters/terraform/cloudstack/adapt.go @@ -3,7 +3,7 @@ package cloudstack import ( "github.com/aquasecurity/defsec/pkg/providers/cloudstack" "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/internal/adapters/terraform/cloudstack/compute" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/cloudstack/compute" ) func Adapt(modules terraform.Modules) cloudstack.CloudStack { diff --git a/internal/adapters/terraform/cloudstack/compute/adapt.go b/pkg/iac/adapters/terraform/cloudstack/compute/adapt.go similarity index 100% rename from internal/adapters/terraform/cloudstack/compute/adapt.go rename to pkg/iac/adapters/terraform/cloudstack/compute/adapt.go diff --git a/internal/adapters/terraform/cloudstack/compute/adapt_test.go b/pkg/iac/adapters/terraform/cloudstack/compute/adapt_test.go similarity index 96% rename from internal/adapters/terraform/cloudstack/compute/adapt_test.go rename to pkg/iac/adapters/terraform/cloudstack/compute/adapt_test.go index b94cd90983a9..3ec4c57e59b1 100644 --- a/internal/adapters/terraform/cloudstack/compute/adapt_test.go +++ b/pkg/iac/adapters/terraform/cloudstack/compute/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/cloudstack/compute" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/digitalocean/adapt.go b/pkg/iac/adapters/terraform/digitalocean/adapt.go similarity index 66% rename from internal/adapters/terraform/digitalocean/adapt.go rename to pkg/iac/adapters/terraform/digitalocean/adapt.go index 1792d1be2fd9..bef8e6ce8137 100644 --- a/internal/adapters/terraform/digitalocean/adapt.go +++ b/pkg/iac/adapters/terraform/digitalocean/adapt.go @@ -3,8 +3,8 @@ package digitalocean import ( "github.com/aquasecurity/defsec/pkg/providers/digitalocean" "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/internal/adapters/terraform/digitalocean/compute" - "github.com/aquasecurity/trivy/internal/adapters/terraform/digitalocean/spaces" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/digitalocean/compute" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/digitalocean/spaces" ) func Adapt(modules terraform.Modules) digitalocean.DigitalOcean { diff --git a/internal/adapters/terraform/digitalocean/compute/adapt.go b/pkg/iac/adapters/terraform/digitalocean/compute/adapt.go similarity index 100% rename from internal/adapters/terraform/digitalocean/compute/adapt.go rename to pkg/iac/adapters/terraform/digitalocean/compute/adapt.go diff --git a/internal/adapters/terraform/digitalocean/compute/adapt_test.go b/pkg/iac/adapters/terraform/digitalocean/compute/adapt_test.go similarity index 99% rename from internal/adapters/terraform/digitalocean/compute/adapt_test.go rename to pkg/iac/adapters/terraform/digitalocean/compute/adapt_test.go index f39685ba22c1..8b2ec6b15e8b 100644 --- a/internal/adapters/terraform/digitalocean/compute/adapt_test.go +++ b/pkg/iac/adapters/terraform/digitalocean/compute/adapt_test.go @@ -4,12 +4,12 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/digitalocean/compute" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/digitalocean/spaces/adapt.go b/pkg/iac/adapters/terraform/digitalocean/spaces/adapt.go similarity index 100% rename from internal/adapters/terraform/digitalocean/spaces/adapt.go rename to pkg/iac/adapters/terraform/digitalocean/spaces/adapt.go diff --git a/internal/adapters/terraform/digitalocean/spaces/adapt_test.go b/pkg/iac/adapters/terraform/digitalocean/spaces/adapt_test.go similarity index 98% rename from internal/adapters/terraform/digitalocean/spaces/adapt_test.go rename to pkg/iac/adapters/terraform/digitalocean/spaces/adapt_test.go index c1b0378c0d98..162cc2f7cc13 100644 --- a/internal/adapters/terraform/digitalocean/spaces/adapt_test.go +++ b/pkg/iac/adapters/terraform/digitalocean/spaces/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/digitalocean/spaces" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/github/adapt.go b/pkg/iac/adapters/terraform/github/adapt.go similarity index 60% rename from internal/adapters/terraform/github/adapt.go rename to pkg/iac/adapters/terraform/github/adapt.go index f5d6dc5eb131..7ef290e1768d 100644 --- a/internal/adapters/terraform/github/adapt.go +++ b/pkg/iac/adapters/terraform/github/adapt.go @@ -3,9 +3,9 @@ package github import ( "github.com/aquasecurity/defsec/pkg/providers/github" "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/internal/adapters/terraform/github/branch_protections" - "github.com/aquasecurity/trivy/internal/adapters/terraform/github/repositories" - "github.com/aquasecurity/trivy/internal/adapters/terraform/github/secrets" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/github/branch_protections" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/github/repositories" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/github/secrets" ) func Adapt(modules terraform.Modules) github.GitHub { diff --git a/internal/adapters/terraform/github/branch_protections/adapt.go b/pkg/iac/adapters/terraform/github/branch_protections/adapt.go similarity index 100% rename from internal/adapters/terraform/github/branch_protections/adapt.go rename to pkg/iac/adapters/terraform/github/branch_protections/adapt.go diff --git a/internal/adapters/terraform/github/branch_protections/adapt_test.go b/pkg/iac/adapters/terraform/github/branch_protections/adapt_test.go similarity index 95% rename from internal/adapters/terraform/github/branch_protections/adapt_test.go rename to pkg/iac/adapters/terraform/github/branch_protections/adapt_test.go index 54258ee49b78..4317f84bde9e 100644 --- a/internal/adapters/terraform/github/branch_protections/adapt_test.go +++ b/pkg/iac/adapters/terraform/github/branch_protections/adapt_test.go @@ -3,7 +3,7 @@ package branch_protections import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/github/repositories/adapt.go b/pkg/iac/adapters/terraform/github/repositories/adapt.go similarity index 100% rename from internal/adapters/terraform/github/repositories/adapt.go rename to pkg/iac/adapters/terraform/github/repositories/adapt.go diff --git a/internal/adapters/terraform/github/repositories/adapt_test.go b/pkg/iac/adapters/terraform/github/repositories/adapt_test.go similarity index 97% rename from internal/adapters/terraform/github/repositories/adapt_test.go rename to pkg/iac/adapters/terraform/github/repositories/adapt_test.go index 9b0d91221360..2199744bbde9 100644 --- a/internal/adapters/terraform/github/repositories/adapt_test.go +++ b/pkg/iac/adapters/terraform/github/repositories/adapt_test.go @@ -3,7 +3,7 @@ package repositories import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/github/secrets/adapt.go b/pkg/iac/adapters/terraform/github/secrets/adapt.go similarity index 100% rename from internal/adapters/terraform/github/secrets/adapt.go rename to pkg/iac/adapters/terraform/github/secrets/adapt.go diff --git a/internal/adapters/terraform/github/secrets/adapt_test.go b/pkg/iac/adapters/terraform/github/secrets/adapt_test.go similarity index 96% rename from internal/adapters/terraform/github/secrets/adapt_test.go rename to pkg/iac/adapters/terraform/github/secrets/adapt_test.go index d45748dc87d2..491ace454e45 100644 --- a/internal/adapters/terraform/github/secrets/adapt_test.go +++ b/pkg/iac/adapters/terraform/github/secrets/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/github" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/pkg/iac/adapters/terraform/google/adapt.go b/pkg/iac/adapters/terraform/google/adapt.go new file mode 100644 index 000000000000..f1289a0c2fc3 --- /dev/null +++ b/pkg/iac/adapters/terraform/google/adapt.go @@ -0,0 +1,27 @@ +package google + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/google/bigquery" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/google/compute" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/google/dns" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/google/gke" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/google/iam" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/google/kms" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/google/sql" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/google/storage" +) + +func Adapt(modules terraform.Modules) google.Google { + return google.Google{ + BigQuery: bigquery.Adapt(modules), + Compute: compute.Adapt(modules), + DNS: dns.Adapt(modules), + GKE: gke.Adapt(modules), + KMS: kms.Adapt(modules), + IAM: iam.Adapt(modules), + SQL: sql.Adapt(modules), + Storage: storage.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/google/bigquery/adapt.go b/pkg/iac/adapters/terraform/google/bigquery/adapt.go similarity index 100% rename from internal/adapters/terraform/google/bigquery/adapt.go rename to pkg/iac/adapters/terraform/google/bigquery/adapt.go diff --git a/internal/adapters/terraform/google/bigquery/adapt_test.go b/pkg/iac/adapters/terraform/google/bigquery/adapt_test.go similarity index 98% rename from internal/adapters/terraform/google/bigquery/adapt_test.go rename to pkg/iac/adapters/terraform/google/bigquery/adapt_test.go index 320a5f478752..d49ab9c308b3 100644 --- a/internal/adapters/terraform/google/bigquery/adapt_test.go +++ b/pkg/iac/adapters/terraform/google/bigquery/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/bigquery" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/google/compute/adapt.go b/pkg/iac/adapters/terraform/google/compute/adapt.go similarity index 100% rename from internal/adapters/terraform/google/compute/adapt.go rename to pkg/iac/adapters/terraform/google/compute/adapt.go diff --git a/internal/adapters/terraform/google/compute/adapt_test.go b/pkg/iac/adapters/terraform/google/compute/adapt_test.go similarity index 99% rename from internal/adapters/terraform/google/compute/adapt_test.go rename to pkg/iac/adapters/terraform/google/compute/adapt_test.go index ff09ae036b98..a30fa260281b 100644 --- a/internal/adapters/terraform/google/compute/adapt_test.go +++ b/pkg/iac/adapters/terraform/google/compute/adapt_test.go @@ -3,7 +3,7 @@ package compute import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/internal/adapters/terraform/google/compute/disks.go b/pkg/iac/adapters/terraform/google/compute/disks.go similarity index 100% rename from internal/adapters/terraform/google/compute/disks.go rename to pkg/iac/adapters/terraform/google/compute/disks.go diff --git a/internal/adapters/terraform/google/compute/disks_test.go b/pkg/iac/adapters/terraform/google/compute/disks_test.go similarity index 97% rename from internal/adapters/terraform/google/compute/disks_test.go rename to pkg/iac/adapters/terraform/google/compute/disks_test.go index da80933c326d..d6d47074e26b 100644 --- a/internal/adapters/terraform/google/compute/disks_test.go +++ b/pkg/iac/adapters/terraform/google/compute/disks_test.go @@ -4,10 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/compute" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/google/compute/instances.go b/pkg/iac/adapters/terraform/google/compute/instances.go similarity index 100% rename from internal/adapters/terraform/google/compute/instances.go rename to pkg/iac/adapters/terraform/google/compute/instances.go diff --git a/internal/adapters/terraform/google/compute/instances_test.go b/pkg/iac/adapters/terraform/google/compute/instances_test.go similarity index 98% rename from internal/adapters/terraform/google/compute/instances_test.go rename to pkg/iac/adapters/terraform/google/compute/instances_test.go index 8dc61c0d173c..3bd595386100 100644 --- a/internal/adapters/terraform/google/compute/instances_test.go +++ b/pkg/iac/adapters/terraform/google/compute/instances_test.go @@ -4,10 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/compute" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/google/compute/metadata.go b/pkg/iac/adapters/terraform/google/compute/metadata.go similarity index 100% rename from internal/adapters/terraform/google/compute/metadata.go rename to pkg/iac/adapters/terraform/google/compute/metadata.go diff --git a/internal/adapters/terraform/google/compute/metadata_test.go b/pkg/iac/adapters/terraform/google/compute/metadata_test.go similarity index 94% rename from internal/adapters/terraform/google/compute/metadata_test.go rename to pkg/iac/adapters/terraform/google/compute/metadata_test.go index 3067eef746fb..1dc80360d51e 100644 --- a/internal/adapters/terraform/google/compute/metadata_test.go +++ b/pkg/iac/adapters/terraform/google/compute/metadata_test.go @@ -4,10 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/compute" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/google/compute/networks.go b/pkg/iac/adapters/terraform/google/compute/networks.go similarity index 100% rename from internal/adapters/terraform/google/compute/networks.go rename to pkg/iac/adapters/terraform/google/compute/networks.go diff --git a/internal/adapters/terraform/google/compute/networks_test.go b/pkg/iac/adapters/terraform/google/compute/networks_test.go similarity index 98% rename from internal/adapters/terraform/google/compute/networks_test.go rename to pkg/iac/adapters/terraform/google/compute/networks_test.go index 2bae86539afb..cf60d99a03f6 100644 --- a/internal/adapters/terraform/google/compute/networks_test.go +++ b/pkg/iac/adapters/terraform/google/compute/networks_test.go @@ -4,10 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/compute" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/google/compute/ssl.go b/pkg/iac/adapters/terraform/google/compute/ssl.go similarity index 100% rename from internal/adapters/terraform/google/compute/ssl.go rename to pkg/iac/adapters/terraform/google/compute/ssl.go diff --git a/internal/adapters/terraform/google/compute/ssl_test.go b/pkg/iac/adapters/terraform/google/compute/ssl_test.go similarity index 95% rename from internal/adapters/terraform/google/compute/ssl_test.go rename to pkg/iac/adapters/terraform/google/compute/ssl_test.go index 1b0224def50f..dbf04f6a6027 100644 --- a/internal/adapters/terraform/google/compute/ssl_test.go +++ b/pkg/iac/adapters/terraform/google/compute/ssl_test.go @@ -4,10 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/compute" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/google/dns/adapt.go b/pkg/iac/adapters/terraform/google/dns/adapt.go similarity index 100% rename from internal/adapters/terraform/google/dns/adapt.go rename to pkg/iac/adapters/terraform/google/dns/adapt.go diff --git a/internal/adapters/terraform/google/dns/adapt_test.go b/pkg/iac/adapters/terraform/google/dns/adapt_test.go similarity index 97% rename from internal/adapters/terraform/google/dns/adapt_test.go rename to pkg/iac/adapters/terraform/google/dns/adapt_test.go index eedae6f87e76..adc5de3d3405 100644 --- a/internal/adapters/terraform/google/dns/adapt_test.go +++ b/pkg/iac/adapters/terraform/google/dns/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/dns" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/google/gke/adapt.go b/pkg/iac/adapters/terraform/google/gke/adapt.go similarity index 100% rename from internal/adapters/terraform/google/gke/adapt.go rename to pkg/iac/adapters/terraform/google/gke/adapt.go diff --git a/internal/adapters/terraform/google/gke/adapt_test.go b/pkg/iac/adapters/terraform/google/gke/adapt_test.go similarity index 99% rename from internal/adapters/terraform/google/gke/adapt_test.go rename to pkg/iac/adapters/terraform/google/gke/adapt_test.go index da491bd425da..e0ef4ea5decd 100644 --- a/internal/adapters/terraform/google/gke/adapt_test.go +++ b/pkg/iac/adapters/terraform/google/gke/adapt_test.go @@ -4,12 +4,12 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/gke" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/google/iam/adapt.go b/pkg/iac/adapters/terraform/google/iam/adapt.go similarity index 100% rename from internal/adapters/terraform/google/iam/adapt.go rename to pkg/iac/adapters/terraform/google/iam/adapt.go diff --git a/internal/adapters/terraform/google/iam/adapt_test.go b/pkg/iac/adapters/terraform/google/iam/adapt_test.go similarity index 99% rename from internal/adapters/terraform/google/iam/adapt_test.go rename to pkg/iac/adapters/terraform/google/iam/adapt_test.go index e49ba44a466c..bed182f1acf4 100644 --- a/internal/adapters/terraform/google/iam/adapt_test.go +++ b/pkg/iac/adapters/terraform/google/iam/adapt_test.go @@ -4,9 +4,9 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/iam" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/google/iam/convert.go b/pkg/iac/adapters/terraform/google/iam/convert.go similarity index 100% rename from internal/adapters/terraform/google/iam/convert.go rename to pkg/iac/adapters/terraform/google/iam/convert.go diff --git a/internal/adapters/terraform/google/iam/folder_iam.go b/pkg/iac/adapters/terraform/google/iam/folder_iam.go similarity index 100% rename from internal/adapters/terraform/google/iam/folder_iam.go rename to pkg/iac/adapters/terraform/google/iam/folder_iam.go diff --git a/internal/adapters/terraform/google/iam/folders.go b/pkg/iac/adapters/terraform/google/iam/folders.go similarity index 100% rename from internal/adapters/terraform/google/iam/folders.go rename to pkg/iac/adapters/terraform/google/iam/folders.go diff --git a/internal/adapters/terraform/google/iam/org_iam.go b/pkg/iac/adapters/terraform/google/iam/org_iam.go similarity index 100% rename from internal/adapters/terraform/google/iam/org_iam.go rename to pkg/iac/adapters/terraform/google/iam/org_iam.go diff --git a/internal/adapters/terraform/google/iam/project_iam.go b/pkg/iac/adapters/terraform/google/iam/project_iam.go similarity index 100% rename from internal/adapters/terraform/google/iam/project_iam.go rename to pkg/iac/adapters/terraform/google/iam/project_iam.go diff --git a/internal/adapters/terraform/google/iam/project_iam_test.go b/pkg/iac/adapters/terraform/google/iam/project_iam_test.go similarity index 95% rename from internal/adapters/terraform/google/iam/project_iam_test.go rename to pkg/iac/adapters/terraform/google/iam/project_iam_test.go index 3d3270ca4637..44aebb9bbed7 100644 --- a/internal/adapters/terraform/google/iam/project_iam_test.go +++ b/pkg/iac/adapters/terraform/google/iam/project_iam_test.go @@ -4,10 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/iam" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/google/iam/projects.go b/pkg/iac/adapters/terraform/google/iam/projects.go similarity index 100% rename from internal/adapters/terraform/google/iam/projects.go rename to pkg/iac/adapters/terraform/google/iam/projects.go diff --git a/internal/adapters/terraform/google/iam/workload_identity_pool_providers.go b/pkg/iac/adapters/terraform/google/iam/workload_identity_pool_providers.go similarity index 100% rename from internal/adapters/terraform/google/iam/workload_identity_pool_providers.go rename to pkg/iac/adapters/terraform/google/iam/workload_identity_pool_providers.go diff --git a/internal/adapters/terraform/google/kms/adapt.go b/pkg/iac/adapters/terraform/google/kms/adapt.go similarity index 100% rename from internal/adapters/terraform/google/kms/adapt.go rename to pkg/iac/adapters/terraform/google/kms/adapt.go diff --git a/internal/adapters/terraform/google/kms/adapt_test.go b/pkg/iac/adapters/terraform/google/kms/adapt_test.go similarity index 97% rename from internal/adapters/terraform/google/kms/adapt_test.go rename to pkg/iac/adapters/terraform/google/kms/adapt_test.go index 09043e00c049..28176bf96b7a 100644 --- a/internal/adapters/terraform/google/kms/adapt_test.go +++ b/pkg/iac/adapters/terraform/google/kms/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/kms" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/google/sql/adapt.go b/pkg/iac/adapters/terraform/google/sql/adapt.go similarity index 100% rename from internal/adapters/terraform/google/sql/adapt.go rename to pkg/iac/adapters/terraform/google/sql/adapt.go diff --git a/internal/adapters/terraform/google/sql/adapt_test.go b/pkg/iac/adapters/terraform/google/sql/adapt_test.go similarity index 99% rename from internal/adapters/terraform/google/sql/adapt_test.go rename to pkg/iac/adapters/terraform/google/sql/adapt_test.go index b6a7060f2830..8f4f11a9c0ab 100644 --- a/internal/adapters/terraform/google/sql/adapt_test.go +++ b/pkg/iac/adapters/terraform/google/sql/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/sql" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/google/storage/adapt.go b/pkg/iac/adapters/terraform/google/storage/adapt.go similarity index 100% rename from internal/adapters/terraform/google/storage/adapt.go rename to pkg/iac/adapters/terraform/google/storage/adapt.go diff --git a/internal/adapters/terraform/google/storage/adapt_test.go b/pkg/iac/adapters/terraform/google/storage/adapt_test.go similarity index 98% rename from internal/adapters/terraform/google/storage/adapt_test.go rename to pkg/iac/adapters/terraform/google/storage/adapt_test.go index 2dcef331d1c6..dd263258a6e3 100644 --- a/internal/adapters/terraform/google/storage/adapt_test.go +++ b/pkg/iac/adapters/terraform/google/storage/adapt_test.go @@ -4,13 +4,13 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/google/iam" "github.com/aquasecurity/defsec/pkg/providers/google/storage" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/google/storage/iam.go b/pkg/iac/adapters/terraform/google/storage/iam.go similarity index 89% rename from internal/adapters/terraform/google/storage/iam.go rename to pkg/iac/adapters/terraform/google/storage/iam.go index e8c13a4b82cc..55d1e928e6d4 100644 --- a/internal/adapters/terraform/google/storage/iam.go +++ b/pkg/iac/adapters/terraform/google/storage/iam.go @@ -2,7 +2,7 @@ package storage import ( iamTypes "github.com/aquasecurity/defsec/pkg/providers/google/iam" - "github.com/aquasecurity/trivy/internal/adapters/terraform/google/iam" + iam2 "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/google/iam" ) type parentedBinding struct { @@ -48,7 +48,7 @@ func (a *adapter) adaptBindings() { continue } - parented.bindings = iam.ParsePolicyBlock(policyBlock) + parented.bindings = iam2.ParsePolicyBlock(policyBlock) a.bindings = append(a.bindings, parented) } @@ -56,7 +56,7 @@ func (a *adapter) adaptBindings() { var parented parentedBinding parented.blockID = iamBlock.ID() - parented.bindings = []iamTypes.Binding{iam.AdaptBinding(iamBlock, a.modules)} + parented.bindings = []iamTypes.Binding{iam2.AdaptBinding(iamBlock, a.modules)} bucketAttr := iamBlock.GetAttribute("bucket") if bucketAttr.IsString() { @@ -79,7 +79,7 @@ func (a *adapter) adaptMembers() { var parented parentedMember parented.blockID = iamBlock.ID() - parented.member = iam.AdaptMember(iamBlock, a.modules) + parented.member = iam2.AdaptMember(iamBlock, a.modules) bucketAttr := iamBlock.GetAttribute("bucket") if bucketAttr.IsString() { diff --git a/internal/adapters/terraform/kubernetes/adapt.go b/pkg/iac/adapters/terraform/kubernetes/adapt.go similarity index 100% rename from internal/adapters/terraform/kubernetes/adapt.go rename to pkg/iac/adapters/terraform/kubernetes/adapt.go diff --git a/internal/adapters/terraform/kubernetes/adapt_test.go b/pkg/iac/adapters/terraform/kubernetes/adapt_test.go similarity index 100% rename from internal/adapters/terraform/kubernetes/adapt_test.go rename to pkg/iac/adapters/terraform/kubernetes/adapt_test.go diff --git a/internal/adapters/terraform/nifcloud/computing/adapt.go b/pkg/iac/adapters/terraform/nifcloud/computing/adapt.go similarity index 100% rename from internal/adapters/terraform/nifcloud/computing/adapt.go rename to pkg/iac/adapters/terraform/nifcloud/computing/adapt.go diff --git a/internal/adapters/terraform/nifcloud/computing/adapt_test.go b/pkg/iac/adapters/terraform/nifcloud/computing/adapt_test.go similarity index 96% rename from internal/adapters/terraform/nifcloud/computing/adapt_test.go rename to pkg/iac/adapters/terraform/nifcloud/computing/adapt_test.go index d92848402304..be6efb30493f 100644 --- a/internal/adapters/terraform/nifcloud/computing/adapt_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/computing/adapt_test.go @@ -3,7 +3,7 @@ package computing import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/nifcloud/computing/instance.go b/pkg/iac/adapters/terraform/nifcloud/computing/instance.go similarity index 100% rename from internal/adapters/terraform/nifcloud/computing/instance.go rename to pkg/iac/adapters/terraform/nifcloud/computing/instance.go diff --git a/internal/adapters/terraform/nifcloud/computing/instance_test.go b/pkg/iac/adapters/terraform/nifcloud/computing/instance_test.go similarity index 95% rename from internal/adapters/terraform/nifcloud/computing/instance_test.go rename to pkg/iac/adapters/terraform/nifcloud/computing/instance_test.go index 45832714bee0..ff391f0078e9 100644 --- a/internal/adapters/terraform/nifcloud/computing/instance_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/computing/instance_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/computing/security_group.go b/pkg/iac/adapters/terraform/nifcloud/computing/security_group.go similarity index 100% rename from internal/adapters/terraform/nifcloud/computing/security_group.go rename to pkg/iac/adapters/terraform/nifcloud/computing/security_group.go diff --git a/internal/adapters/terraform/nifcloud/computing/security_group_test.go b/pkg/iac/adapters/terraform/nifcloud/computing/security_group_test.go similarity index 97% rename from internal/adapters/terraform/nifcloud/computing/security_group_test.go rename to pkg/iac/adapters/terraform/nifcloud/computing/security_group_test.go index 70800804b76d..2e226b3e3cf4 100644 --- a/internal/adapters/terraform/nifcloud/computing/security_group_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/computing/security_group_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/dns/adapt.go b/pkg/iac/adapters/terraform/nifcloud/dns/adapt.go similarity index 100% rename from internal/adapters/terraform/nifcloud/dns/adapt.go rename to pkg/iac/adapters/terraform/nifcloud/dns/adapt.go diff --git a/internal/adapters/terraform/nifcloud/dns/adapt_test.go b/pkg/iac/adapters/terraform/nifcloud/dns/adapt_test.go similarity index 90% rename from internal/adapters/terraform/nifcloud/dns/adapt_test.go rename to pkg/iac/adapters/terraform/nifcloud/dns/adapt_test.go index e5e60e9d9853..38ff119054bd 100644 --- a/internal/adapters/terraform/nifcloud/dns/adapt_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/dns/adapt_test.go @@ -3,7 +3,7 @@ package dns import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/nifcloud/dns/record.go b/pkg/iac/adapters/terraform/nifcloud/dns/record.go similarity index 100% rename from internal/adapters/terraform/nifcloud/dns/record.go rename to pkg/iac/adapters/terraform/nifcloud/dns/record.go diff --git a/internal/adapters/terraform/nifcloud/dns/record_test.go b/pkg/iac/adapters/terraform/nifcloud/dns/record_test.go similarity index 94% rename from internal/adapters/terraform/nifcloud/dns/record_test.go rename to pkg/iac/adapters/terraform/nifcloud/dns/record_test.go index ccf7e4cc7bd1..4e16b11f4cb8 100644 --- a/internal/adapters/terraform/nifcloud/dns/record_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/dns/record_test.go @@ -5,8 +5,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/nifcloud/dns" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/nas/adapt.go b/pkg/iac/adapters/terraform/nifcloud/nas/adapt.go similarity index 100% rename from internal/adapters/terraform/nifcloud/nas/adapt.go rename to pkg/iac/adapters/terraform/nifcloud/nas/adapt.go diff --git a/internal/adapters/terraform/nifcloud/nas/adapt_test.go b/pkg/iac/adapters/terraform/nifcloud/nas/adapt_test.go similarity index 93% rename from internal/adapters/terraform/nifcloud/nas/adapt_test.go rename to pkg/iac/adapters/terraform/nifcloud/nas/adapt_test.go index 0998303dbed6..b43b874974b8 100644 --- a/internal/adapters/terraform/nifcloud/nas/adapt_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/nas/adapt_test.go @@ -3,7 +3,7 @@ package nas import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/nifcloud/nas/nas_instance.go b/pkg/iac/adapters/terraform/nifcloud/nas/nas_instance.go similarity index 100% rename from internal/adapters/terraform/nifcloud/nas/nas_instance.go rename to pkg/iac/adapters/terraform/nifcloud/nas/nas_instance.go diff --git a/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go b/pkg/iac/adapters/terraform/nifcloud/nas/nas_instance_test.go similarity index 94% rename from internal/adapters/terraform/nifcloud/nas/nas_instance_test.go rename to pkg/iac/adapters/terraform/nifcloud/nas/nas_instance_test.go index 26b9e1408a2f..c1ba1f884a8c 100644 --- a/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/nas/nas_instance_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/nas/nas_security_group.go b/pkg/iac/adapters/terraform/nifcloud/nas/nas_security_group.go similarity index 100% rename from internal/adapters/terraform/nifcloud/nas/nas_security_group.go rename to pkg/iac/adapters/terraform/nifcloud/nas/nas_security_group.go diff --git a/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go b/pkg/iac/adapters/terraform/nifcloud/nas/nas_security_group_test.go similarity index 95% rename from internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go rename to pkg/iac/adapters/terraform/nifcloud/nas/nas_security_group_test.go index 01e93aade40b..e0e3cc0a2173 100644 --- a/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/nas/nas_security_group_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/network/adapt.go b/pkg/iac/adapters/terraform/nifcloud/network/adapt.go similarity index 100% rename from internal/adapters/terraform/nifcloud/network/adapt.go rename to pkg/iac/adapters/terraform/nifcloud/network/adapt.go diff --git a/internal/adapters/terraform/nifcloud/network/adapt_test.go b/pkg/iac/adapters/terraform/nifcloud/network/adapt_test.go similarity index 97% rename from internal/adapters/terraform/nifcloud/network/adapt_test.go rename to pkg/iac/adapters/terraform/nifcloud/network/adapt_test.go index 9255e7e16d3b..9a4277b28558 100644 --- a/internal/adapters/terraform/nifcloud/network/adapt_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/network/adapt_test.go @@ -3,7 +3,7 @@ package network import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/nifcloud/network/elastic_load_balancer.go b/pkg/iac/adapters/terraform/nifcloud/network/elastic_load_balancer.go similarity index 100% rename from internal/adapters/terraform/nifcloud/network/elastic_load_balancer.go rename to pkg/iac/adapters/terraform/nifcloud/network/elastic_load_balancer.go diff --git a/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go b/pkg/iac/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go similarity index 96% rename from internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go rename to pkg/iac/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go index 06bb3a96e78a..d7d90ec12753 100644 --- a/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go @@ -5,8 +5,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/network/load_balancer.go b/pkg/iac/adapters/terraform/nifcloud/network/load_balancer.go similarity index 100% rename from internal/adapters/terraform/nifcloud/network/load_balancer.go rename to pkg/iac/adapters/terraform/nifcloud/network/load_balancer.go diff --git a/internal/adapters/terraform/nifcloud/network/load_balancer_test.go b/pkg/iac/adapters/terraform/nifcloud/network/load_balancer_test.go similarity index 96% rename from internal/adapters/terraform/nifcloud/network/load_balancer_test.go rename to pkg/iac/adapters/terraform/nifcloud/network/load_balancer_test.go index cbcebb11d774..81cc3c11cfe0 100644 --- a/internal/adapters/terraform/nifcloud/network/load_balancer_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/network/load_balancer_test.go @@ -5,8 +5,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/network/router.go b/pkg/iac/adapters/terraform/nifcloud/network/router.go similarity index 100% rename from internal/adapters/terraform/nifcloud/network/router.go rename to pkg/iac/adapters/terraform/nifcloud/network/router.go diff --git a/internal/adapters/terraform/nifcloud/network/router_test.go b/pkg/iac/adapters/terraform/nifcloud/network/router_test.go similarity index 95% rename from internal/adapters/terraform/nifcloud/network/router_test.go rename to pkg/iac/adapters/terraform/nifcloud/network/router_test.go index 3c2fe55ab92b..e050f139e98a 100644 --- a/internal/adapters/terraform/nifcloud/network/router_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/network/router_test.go @@ -5,8 +5,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/network/vpn_gateway.go b/pkg/iac/adapters/terraform/nifcloud/network/vpn_gateway.go similarity index 100% rename from internal/adapters/terraform/nifcloud/network/vpn_gateway.go rename to pkg/iac/adapters/terraform/nifcloud/network/vpn_gateway.go diff --git a/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go b/pkg/iac/adapters/terraform/nifcloud/network/vpn_gateway_test.go similarity index 94% rename from internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go rename to pkg/iac/adapters/terraform/nifcloud/network/vpn_gateway_test.go index d589bd2fd7c6..2a81c2f50589 100644 --- a/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/network/vpn_gateway_test.go @@ -5,8 +5,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/nifcloud.go b/pkg/iac/adapters/terraform/nifcloud/nifcloud.go similarity index 50% rename from internal/adapters/terraform/nifcloud/nifcloud.go rename to pkg/iac/adapters/terraform/nifcloud/nifcloud.go index 8c9ae16a4dc4..e456188805f5 100644 --- a/internal/adapters/terraform/nifcloud/nifcloud.go +++ b/pkg/iac/adapters/terraform/nifcloud/nifcloud.go @@ -3,12 +3,12 @@ package nifcloud import ( "github.com/aquasecurity/defsec/pkg/providers/nifcloud" "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/computing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/dns" - "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/nas" - "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/network" - "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/rdb" - "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/sslcertificate" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/nifcloud/computing" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/nifcloud/dns" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/nifcloud/nas" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/nifcloud/network" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/nifcloud/rdb" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/nifcloud/sslcertificate" ) func Adapt(modules terraform.Modules) nifcloud.Nifcloud { diff --git a/internal/adapters/terraform/nifcloud/rdb/adapt.go b/pkg/iac/adapters/terraform/nifcloud/rdb/adapt.go similarity index 100% rename from internal/adapters/terraform/nifcloud/rdb/adapt.go rename to pkg/iac/adapters/terraform/nifcloud/rdb/adapt.go diff --git a/internal/adapters/terraform/nifcloud/rdb/adapt_test.go b/pkg/iac/adapters/terraform/nifcloud/rdb/adapt_test.go similarity index 96% rename from internal/adapters/terraform/nifcloud/rdb/adapt_test.go rename to pkg/iac/adapters/terraform/nifcloud/rdb/adapt_test.go index ab15a2f10747..5360773bd505 100644 --- a/internal/adapters/terraform/nifcloud/rdb/adapt_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/rdb/adapt_test.go @@ -3,7 +3,7 @@ package rdb import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/nifcloud/rdb/db_instance.go b/pkg/iac/adapters/terraform/nifcloud/rdb/db_instance.go similarity index 100% rename from internal/adapters/terraform/nifcloud/rdb/db_instance.go rename to pkg/iac/adapters/terraform/nifcloud/rdb/db_instance.go diff --git a/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go b/pkg/iac/adapters/terraform/nifcloud/rdb/db_instance_test.go similarity index 96% rename from internal/adapters/terraform/nifcloud/rdb/db_instance_test.go rename to pkg/iac/adapters/terraform/nifcloud/rdb/db_instance_test.go index 5878dc3fdc14..ed5b69de0621 100644 --- a/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/rdb/db_instance_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/rdb/db_security_group.go b/pkg/iac/adapters/terraform/nifcloud/rdb/db_security_group.go similarity index 100% rename from internal/adapters/terraform/nifcloud/rdb/db_security_group.go rename to pkg/iac/adapters/terraform/nifcloud/rdb/db_security_group.go diff --git a/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go b/pkg/iac/adapters/terraform/nifcloud/rdb/db_security_group_test.go similarity index 95% rename from internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go rename to pkg/iac/adapters/terraform/nifcloud/rdb/db_security_group_test.go index 148fe2cc8ddb..611491caa0ed 100644 --- a/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/rdb/db_security_group_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/adapt.go b/pkg/iac/adapters/terraform/nifcloud/sslcertificate/adapt.go similarity index 100% rename from internal/adapters/terraform/nifcloud/sslcertificate/adapt.go rename to pkg/iac/adapters/terraform/nifcloud/sslcertificate/adapt.go diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go b/pkg/iac/adapters/terraform/nifcloud/sslcertificate/adapt_test.go similarity index 89% rename from internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go rename to pkg/iac/adapters/terraform/nifcloud/sslcertificate/adapt_test.go index 9483467e47cc..0c044cb049a9 100644 --- a/internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/sslcertificate/adapt_test.go @@ -3,7 +3,7 @@ package sslcertificate import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate.go b/pkg/iac/adapters/terraform/nifcloud/sslcertificate/server_certificate.go similarity index 100% rename from internal/adapters/terraform/nifcloud/sslcertificate/server_certificate.go rename to pkg/iac/adapters/terraform/nifcloud/sslcertificate/server_certificate.go diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go b/pkg/iac/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go similarity index 96% rename from internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go rename to pkg/iac/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go index 661e452a019d..84255d836d2a 100644 --- a/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go +++ b/pkg/iac/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go @@ -6,8 +6,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/nifcloud/sslcertificate" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/adapters/terraform/openstack/adapt.go b/pkg/iac/adapters/terraform/openstack/adapt.go similarity index 100% rename from internal/adapters/terraform/openstack/adapt.go rename to pkg/iac/adapters/terraform/openstack/adapt.go diff --git a/internal/adapters/terraform/openstack/adapt_test.go b/pkg/iac/adapters/terraform/openstack/adapt_test.go similarity index 98% rename from internal/adapters/terraform/openstack/adapt_test.go rename to pkg/iac/adapters/terraform/openstack/adapt_test.go index e175f4a7ec98..0449cd9b26ab 100644 --- a/internal/adapters/terraform/openstack/adapt_test.go +++ b/pkg/iac/adapters/terraform/openstack/adapt_test.go @@ -4,11 +4,10 @@ import ( "testing" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/aquasecurity/defsec/pkg/providers/openstack" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" - "github.com/aquasecurity/trivy/test/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/openstack/networking.go b/pkg/iac/adapters/terraform/openstack/networking.go similarity index 100% rename from internal/adapters/terraform/openstack/networking.go rename to pkg/iac/adapters/terraform/openstack/networking.go diff --git a/internal/adapters/terraform/openstack/networking_test.go b/pkg/iac/adapters/terraform/openstack/networking_test.go similarity index 97% rename from internal/adapters/terraform/openstack/networking_test.go rename to pkg/iac/adapters/terraform/openstack/networking_test.go index 0010d9a70b44..8790dff42cf7 100644 --- a/internal/adapters/terraform/openstack/networking_test.go +++ b/pkg/iac/adapters/terraform/openstack/networking_test.go @@ -3,7 +3,7 @@ package openstack import ( "testing" - "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/tftestutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/internal/adapters/terraform/oracle/adapt.go b/pkg/iac/adapters/terraform/oracle/adapt.go similarity index 100% rename from internal/adapters/terraform/oracle/adapt.go rename to pkg/iac/adapters/terraform/oracle/adapt.go diff --git a/internal/adapters/terraform/tftestutil/testutil.go b/pkg/iac/adapters/terraform/tftestutil/testutil.go similarity index 78% rename from internal/adapters/terraform/tftestutil/testutil.go rename to pkg/iac/adapters/terraform/tftestutil/testutil.go index 27038cd18c23..731ad1a00bec 100644 --- a/internal/adapters/terraform/tftestutil/testutil.go +++ b/pkg/iac/adapters/terraform/tftestutil/testutil.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/aquasecurity/trivy/test/testutil" ) @@ -13,7 +13,7 @@ func CreateModulesFromSource(t *testing.T, source, ext string) terraform.Modules fs := testutil.CreateFS(t, map[string]string{ "source" + ext: source, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) if err := p.ParseFS(context.TODO(), "."); err != nil { t.Fatal(err) } diff --git a/pkg/detection/detect.go b/pkg/iac/detection/detect.go similarity index 98% rename from pkg/detection/detect.go rename to pkg/iac/detection/detect.go index db1cd77d235d..6c5f965c6b30 100644 --- a/pkg/detection/detect.go +++ b/pkg/iac/detection/detect.go @@ -7,10 +7,10 @@ import ( "path/filepath" "strings" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" "gopkg.in/yaml.v3" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser/armjson" ) type FileType string diff --git a/pkg/detection/detect_test.go b/pkg/iac/detection/detect_test.go similarity index 100% rename from pkg/detection/detect_test.go rename to pkg/iac/detection/detect_test.go diff --git a/pkg/detection/peek.go b/pkg/iac/detection/peek.go similarity index 100% rename from pkg/detection/peek.go rename to pkg/iac/detection/peek.go diff --git a/pkg/detection/testdata/big.file b/pkg/iac/detection/testdata/big.file similarity index 100% rename from pkg/detection/testdata/big.file rename to pkg/iac/detection/testdata/big.file diff --git a/pkg/detection/testdata/small.file b/pkg/iac/detection/testdata/small.file similarity index 100% rename from pkg/detection/testdata/small.file rename to pkg/iac/detection/testdata/small.file diff --git a/pkg/scanners/azure/arm/parser/armjson/bench_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/bench_test.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/bench_test.go rename to pkg/iac/scanners/azure/arm/parser/armjson/bench_test.go diff --git a/pkg/scanners/azure/arm/parser/armjson/decode.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/decode.go rename to pkg/iac/scanners/azure/arm/parser/armjson/decode.go diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_array.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_array.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/decode_array.go rename to pkg/iac/scanners/azure/arm/parser/armjson/decode_array.go diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_boolean.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_boolean.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/decode_boolean.go rename to pkg/iac/scanners/azure/arm/parser/armjson/decode_boolean.go diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_meta_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_meta_test.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/decode_meta_test.go rename to pkg/iac/scanners/azure/arm/parser/armjson/decode_meta_test.go diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_null.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_null.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/decode_null.go rename to pkg/iac/scanners/azure/arm/parser/armjson/decode_null.go diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_number.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_number.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/decode_number.go rename to pkg/iac/scanners/azure/arm/parser/armjson/decode_number.go diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_object.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/decode_object.go rename to pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go diff --git a/pkg/scanners/azure/arm/parser/armjson/decode_string.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_string.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/decode_string.go rename to pkg/iac/scanners/azure/arm/parser/armjson/decode_string.go diff --git a/pkg/scanners/azure/arm/parser/armjson/kind.go b/pkg/iac/scanners/azure/arm/parser/armjson/kind.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/kind.go rename to pkg/iac/scanners/azure/arm/parser/armjson/kind.go diff --git a/pkg/scanners/azure/arm/parser/armjson/node.go b/pkg/iac/scanners/azure/arm/parser/armjson/node.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/node.go rename to pkg/iac/scanners/azure/arm/parser/armjson/node.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_array.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_array.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_array.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_array.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_array_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_array_test.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_array_test.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_array_test.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_boolean.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_boolean.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_boolean_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean_test.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_boolean_test.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean_test.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_comment.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_comment.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_comment.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_comment.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_complex_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_complex_test.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_complex_test.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_complex_test.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_null.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_null.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_null.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_null.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_null_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_null_test.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_null_test.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_null_test.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_number.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_number.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_number.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_number.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_number_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_number_test.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_number_test.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_number_test.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_object.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_object.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_object_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_object_test.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_string.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_string.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_string.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_string.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_string_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_string_test.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_string_test.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_string_test.go diff --git a/pkg/scanners/azure/arm/parser/armjson/parse_whitespace.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_whitespace.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/parse_whitespace.go rename to pkg/iac/scanners/azure/arm/parser/armjson/parse_whitespace.go diff --git a/pkg/scanners/azure/arm/parser/armjson/reader.go b/pkg/iac/scanners/azure/arm/parser/armjson/reader.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/reader.go rename to pkg/iac/scanners/azure/arm/parser/armjson/reader.go diff --git a/pkg/scanners/azure/arm/parser/armjson/reader_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/reader_test.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/reader_test.go rename to pkg/iac/scanners/azure/arm/parser/armjson/reader_test.go diff --git a/pkg/scanners/azure/arm/parser/armjson/unmarshal.go b/pkg/iac/scanners/azure/arm/parser/armjson/unmarshal.go similarity index 100% rename from pkg/scanners/azure/arm/parser/armjson/unmarshal.go rename to pkg/iac/scanners/azure/arm/parser/armjson/unmarshal.go diff --git a/pkg/scanners/azure/arm/parser/parser.go b/pkg/iac/scanners/azure/arm/parser/parser.go similarity index 79% rename from pkg/scanners/azure/arm/parser/parser.go rename to pkg/iac/scanners/azure/arm/parser/parser.go index b1dfd79dbf02..ca8a86f89b81 100644 --- a/pkg/scanners/azure/arm/parser/parser.go +++ b/pkg/iac/scanners/azure/arm/parser/parser.go @@ -11,9 +11,9 @@ import ( "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" - "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser/armjson" - "github.com/aquasecurity/trivy/pkg/scanners/azure/resolver" + azure2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/resolver" ) type Parser struct { @@ -40,9 +40,9 @@ func New(targetFS fs.FS, opts ...options.ParserOption) *Parser { return p } -func (p *Parser) ParseFS(ctx context.Context, dir string) ([]azure.Deployment, error) { +func (p *Parser) ParseFS(ctx context.Context, dir string) ([]azure2.Deployment, error) { - var deployments []azure.Deployment + var deployments []azure2.Deployment if err := fs.WalkDir(p.targetFS, dir, func(path string, entry fs.DirEntry, err error) error { if err != nil { @@ -98,14 +98,14 @@ func (p *Parser) Required(path string) bool { return false } - if template.Schema.Kind != azure.KindString { + if template.Schema.Kind != azure2.KindString { return false } return strings.HasPrefix(template.Schema.AsString(), "https://schema.management.azure.com") } -func (p *Parser) parseFile(r io.Reader, filename string) (*azure.Deployment, error) { +func (p *Parser) parseFile(r io.Reader, filename string) (*azure2.Deployment, error) { var template Template data, err := io.ReadAll(r) if err != nil { @@ -122,11 +122,11 @@ func (p *Parser) parseFile(r io.Reader, filename string) (*azure.Deployment, err return p.convertTemplate(template), nil } -func (p *Parser) convertTemplate(template Template) *azure.Deployment { +func (p *Parser) convertTemplate(template Template) *azure2.Deployment { - deployment := azure.Deployment{ + deployment := azure2.Deployment{ Metadata: template.Metadata, - TargetScope: azure.ScopeResourceGroup, // TODO: override from --resource-group? + TargetScope: azure2.ScopeResourceGroup, // TODO: override from --resource-group? Parameters: nil, Variables: nil, Resources: nil, @@ -139,8 +139,8 @@ func (p *Parser) convertTemplate(template Template) *azure.Deployment { // TODO: the references passed here should probably not be the name - maybe params.NAME.DefaultValue? for name, param := range template.Parameters { - deployment.Parameters = append(deployment.Parameters, azure.Parameter{ - Variable: azure.Variable{ + deployment.Parameters = append(deployment.Parameters, azure2.Parameter{ + Variable: azure2.Variable{ Name: name, Value: param.DefaultValue, }, @@ -150,14 +150,14 @@ func (p *Parser) convertTemplate(template Template) *azure.Deployment { } for name, variable := range template.Variables { - deployment.Variables = append(deployment.Variables, azure.Variable{ + deployment.Variables = append(deployment.Variables, azure2.Variable{ Name: name, Value: variable, }) } for name, output := range template.Outputs { - deployment.Outputs = append(deployment.Outputs, azure.Output{ + deployment.Outputs = append(deployment.Outputs, azure2.Output{ Name: name, Value: output, }) @@ -170,15 +170,15 @@ func (p *Parser) convertTemplate(template Template) *azure.Deployment { return &deployment } -func (p *Parser) convertResource(input Resource) azure.Resource { +func (p *Parser) convertResource(input Resource) azure2.Resource { - var children []azure.Resource + var children []azure2.Resource for _, child := range input.Resources { children = append(children, p.convertResource(child)) } - resource := azure.Resource{ + resource := azure2.Resource{ Metadata: input.Metadata, APIVersion: input.APIVersion, Type: input.Type, diff --git a/pkg/scanners/azure/arm/parser/parser_test.go b/pkg/iac/scanners/azure/arm/parser/parser_test.go similarity index 77% rename from pkg/scanners/azure/arm/parser/parser_test.go rename to pkg/iac/scanners/azure/arm/parser/parser_test.go index edcec5dc2a29..493a17ac37c0 100644 --- a/pkg/scanners/azure/arm/parser/parser_test.go +++ b/pkg/iac/scanners/azure/arm/parser/parser_test.go @@ -7,13 +7,13 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" + azure2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/resolver" "github.com/liamg/memoryfs" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/scanners/azure" - "github.com/aquasecurity/trivy/pkg/scanners/azure/resolver" ) func createMetadata(targetFS fs.FS, filename string, start, end int, ref string, parent *types.Metadata) types.Metadata { @@ -33,7 +33,7 @@ func TestParser_Parse(t *testing.T) { tests := []struct { name string input string - want func() azure.Deployment + want func() azure2.Deployment wantDeployment bool }{ { @@ -56,23 +56,23 @@ func TestParser_Parse(t *testing.T) { }, "resources": [] }`, - want: func() azure.Deployment { + want: func() azure2.Deployment { root := createMetadata(targetFS, filename, 0, 0, "", nil).WithInternal(resolver.NewResolver()) metadata := createMetadata(targetFS, filename, 1, 13, "", &root) parametersMetadata := createMetadata(targetFS, filename, 4, 11, "parameters", &metadata) storageMetadata := createMetadata(targetFS, filename, 5, 10, "parameters.storagePrefix", ¶metersMetadata) - return azure.Deployment{ + return azure2.Deployment{ Metadata: metadata, - TargetScope: azure.ScopeResourceGroup, - Parameters: []azure.Parameter{ + TargetScope: azure2.ScopeResourceGroup, + Parameters: []azure2.Parameter{ { - Variable: azure.Variable{ + Variable: azure2.Variable{ Name: "storagePrefix", - Value: azure.NewValue("x", createMetadata(targetFS, filename, 7, 7, "parameters.storagePrefix.defaultValue", &storageMetadata)), + Value: azure2.NewValue("x", createMetadata(targetFS, filename, 7, 7, "parameters.storagePrefix.defaultValue", &storageMetadata)), }, - Default: azure.NewValue("x", createMetadata(targetFS, filename, 7, 7, "parameters.storagePrefix.defaultValue", &storageMetadata)), + Default: azure2.NewValue("x", createMetadata(targetFS, filename, 7, 7, "parameters.storagePrefix.defaultValue", &storageMetadata)), Decorators: nil, }, }, @@ -127,7 +127,7 @@ func TestParser_Parse(t *testing.T) { } ] }`, - want: func() azure.Deployment { + want: func() azure2.Deployment { rootMetadata := createMetadata(targetFS, filename, 0, 0, "", nil).WithInternal(resolver.NewResolver()) fileMetadata := createMetadata(targetFS, filename, 1, 45, "", &rootMetadata) @@ -143,52 +143,52 @@ func TestParser_Parse(t *testing.T) { networkACL0Metadata := createMetadata(targetFS, filename, 35, 37, "resources[0].properties.networkAcls[0]", &networkACLListMetadata) networkACL1Metadata := createMetadata(targetFS, filename, 38, 40, "resources[0].properties.networkAcls[1]", &networkACLListMetadata) - return azure.Deployment{ + return azure2.Deployment{ Metadata: fileMetadata, - TargetScope: azure.ScopeResourceGroup, - Resources: []azure.Resource{ + TargetScope: azure2.ScopeResourceGroup, + Resources: []azure2.Resource{ { Metadata: resourceMetadata, - APIVersion: azure.NewValue( + APIVersion: azure2.NewValue( "2022-05-01", createMetadata(targetFS, filename, 8, 8, "resources[0].apiVersion", &resourceMetadata), ), - Type: azure.NewValue( + Type: azure2.NewValue( "Microsoft.Storage/storageAccounts", createMetadata(targetFS, filename, 7, 7, "resources[0].type", &resourceMetadata), ), - Kind: azure.NewValue( + Kind: azure2.NewValue( "string", createMetadata(targetFS, filename, 18, 18, "resources[0].kind", &resourceMetadata), ), - Name: azure.NewValue( + Name: azure2.NewValue( "myResource", createMetadata(targetFS, filename, 9, 9, "resources[0].name", &resourceMetadata), ), - Location: azure.NewValue( + Location: azure2.NewValue( "string", createMetadata(targetFS, filename, 10, 10, "resources[0].location", &resourceMetadata), ), - Properties: azure.NewValue( - map[string]azure.Value{ - "allowSharedKeyAccess": azure.NewValue(false, createMetadata(targetFS, filename, 28, 28, "resources[0].properties.allowSharedKeyAccess", &propertiesMetadata)), - "customDomain": azure.NewValue( - map[string]azure.Value{ - "name": azure.NewValue("string", createMetadata(targetFS, filename, 30, 30, "resources[0].properties.customDomain.name", &customDomainMetadata)), - "useSubDomainName": azure.NewValue(false, createMetadata(targetFS, filename, 31, 31, "resources[0].properties.customDomain.useSubDomainName", &customDomainMetadata)), - "number": azure.NewValue(int64(123), createMetadata(targetFS, filename, 32, 32, "resources[0].properties.customDomain.number", &customDomainMetadata)), + Properties: azure2.NewValue( + map[string]azure2.Value{ + "allowSharedKeyAccess": azure2.NewValue(false, createMetadata(targetFS, filename, 28, 28, "resources[0].properties.allowSharedKeyAccess", &propertiesMetadata)), + "customDomain": azure2.NewValue( + map[string]azure2.Value{ + "name": azure2.NewValue("string", createMetadata(targetFS, filename, 30, 30, "resources[0].properties.customDomain.name", &customDomainMetadata)), + "useSubDomainName": azure2.NewValue(false, createMetadata(targetFS, filename, 31, 31, "resources[0].properties.customDomain.useSubDomainName", &customDomainMetadata)), + "number": azure2.NewValue(int64(123), createMetadata(targetFS, filename, 32, 32, "resources[0].properties.customDomain.number", &customDomainMetadata)), }, customDomainMetadata), - "networkAcls": azure.NewValue( - []azure.Value{ - azure.NewValue( - map[string]azure.Value{ - "bypass": azure.NewValue("AzureServices1", createMetadata(targetFS, filename, 36, 36, "resources[0].properties.networkAcls[0].bypass", &networkACL0Metadata)), + "networkAcls": azure2.NewValue( + []azure2.Value{ + azure2.NewValue( + map[string]azure2.Value{ + "bypass": azure2.NewValue("AzureServices1", createMetadata(targetFS, filename, 36, 36, "resources[0].properties.networkAcls[0].bypass", &networkACL0Metadata)), }, networkACL0Metadata, ), - azure.NewValue( - map[string]azure.Value{ - "bypass": azure.NewValue("AzureServices2", createMetadata(targetFS, filename, 39, 39, "resources[0].properties.networkAcls[1].bypass", &networkACL1Metadata)), + azure2.NewValue( + map[string]azure2.Value{ + "bypass": azure2.NewValue("AzureServices2", createMetadata(targetFS, filename, 39, 39, "resources[0].properties.networkAcls[1].bypass", &networkACL1Metadata)), }, networkACL1Metadata, ), diff --git a/pkg/scanners/azure/arm/parser/template.go b/pkg/iac/scanners/azure/arm/parser/template.go similarity index 93% rename from pkg/scanners/azure/arm/parser/template.go rename to pkg/iac/scanners/azure/arm/parser/template.go index 0f7ca8e75b28..04dd5a78cef7 100644 --- a/pkg/scanners/azure/arm/parser/template.go +++ b/pkg/iac/scanners/azure/arm/parser/template.go @@ -2,8 +2,8 @@ package parser import ( "github.com/aquasecurity/defsec/pkg/types" - types2 "github.com/aquasecurity/trivy/pkg/scanners/azure" - "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser/armjson" + types2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" ) type Template struct { diff --git a/pkg/scanners/azure/arm/parser/template_test.go b/pkg/iac/scanners/azure/arm/parser/template_test.go similarity index 93% rename from pkg/scanners/azure/arm/parser/template_test.go rename to pkg/iac/scanners/azure/arm/parser/template_test.go index 130b513319cd..bc2083b9c2b5 100644 --- a/pkg/scanners/azure/arm/parser/template_test.go +++ b/pkg/iac/scanners/azure/arm/parser/template_test.go @@ -6,8 +6,8 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" - types2 "github.com/aquasecurity/trivy/pkg/scanners/azure" - "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser/armjson" + types2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/azure/arm/parser/testdata/example.json b/pkg/iac/scanners/azure/arm/parser/testdata/example.json similarity index 100% rename from pkg/scanners/azure/arm/parser/testdata/example.json rename to pkg/iac/scanners/azure/arm/parser/testdata/example.json diff --git a/pkg/scanners/azure/arm/parser/testdata/postgres.json b/pkg/iac/scanners/azure/arm/parser/testdata/postgres.json similarity index 100% rename from pkg/scanners/azure/arm/parser/testdata/postgres.json rename to pkg/iac/scanners/azure/arm/parser/testdata/postgres.json diff --git a/pkg/scanners/azure/arm/scanner.go b/pkg/iac/scanners/azure/arm/scanner.go similarity index 95% rename from pkg/scanners/azure/arm/scanner.go rename to pkg/iac/scanners/azure/arm/scanner.go index 54045082c044..48f8df6a9cfc 100644 --- a/pkg/scanners/azure/arm/scanner.go +++ b/pkg/iac/scanners/azure/arm/scanner.go @@ -15,10 +15,10 @@ import ( "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/state" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/internal/adapters/arm" - "github.com/aquasecurity/trivy/pkg/scanners" - "github.com/aquasecurity/trivy/pkg/scanners/azure" - "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser" + "github.com/aquasecurity/trivy/pkg/iac/adapters/arm" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser" ) var _ scanners.FSScanner = (*Scanner)(nil) diff --git a/pkg/scanners/azure/deployment.go b/pkg/iac/scanners/azure/deployment.go similarity index 100% rename from pkg/scanners/azure/deployment.go rename to pkg/iac/scanners/azure/deployment.go diff --git a/pkg/scanners/azure/expressions/lex.go b/pkg/iac/scanners/azure/expressions/lex.go similarity index 100% rename from pkg/scanners/azure/expressions/lex.go rename to pkg/iac/scanners/azure/expressions/lex.go diff --git a/pkg/scanners/azure/expressions/node.go b/pkg/iac/scanners/azure/expressions/node.go similarity index 75% rename from pkg/scanners/azure/expressions/node.go rename to pkg/iac/scanners/azure/expressions/node.go index 3257e127033d..a126a9e40c3f 100644 --- a/pkg/scanners/azure/expressions/node.go +++ b/pkg/iac/scanners/azure/expressions/node.go @@ -1,18 +1,18 @@ package expressions import ( - "github.com/aquasecurity/trivy/pkg/scanners/azure/functions" + functions2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/functions" ) type Node interface { - Evaluate(deploymentProvider functions.DeploymentData) interface{} + Evaluate(deploymentProvider functions2.DeploymentData) interface{} } type expressionValue struct { val interface{} } -func (e expressionValue) Evaluate(deploymentProvider functions.DeploymentData) interface{} { +func (e expressionValue) Evaluate(deploymentProvider functions2.DeploymentData) interface{} { if f, ok := e.val.(expression); ok { return f.Evaluate(deploymentProvider) } @@ -24,13 +24,13 @@ type expression struct { args []Node } -func (f expression) Evaluate(deploymentProvider functions.DeploymentData) interface{} { +func (f expression) Evaluate(deploymentProvider functions2.DeploymentData) interface{} { args := make([]interface{}, len(f.args)) for i, arg := range f.args { args[i] = arg.Evaluate(deploymentProvider) } - return functions.Evaluate(deploymentProvider, f.name, args...) + return functions2.Evaluate(deploymentProvider, f.name, args...) } func NewExpressionTree(code string) (Node, error) { diff --git a/pkg/scanners/azure/expressions/token_walker.go b/pkg/iac/scanners/azure/expressions/token_walker.go similarity index 100% rename from pkg/scanners/azure/expressions/token_walker.go rename to pkg/iac/scanners/azure/expressions/token_walker.go diff --git a/pkg/scanners/azure/functions/add.go b/pkg/iac/scanners/azure/functions/add.go similarity index 100% rename from pkg/scanners/azure/functions/add.go rename to pkg/iac/scanners/azure/functions/add.go diff --git a/pkg/scanners/azure/functions/add_test.go b/pkg/iac/scanners/azure/functions/add_test.go similarity index 100% rename from pkg/scanners/azure/functions/add_test.go rename to pkg/iac/scanners/azure/functions/add_test.go diff --git a/pkg/scanners/azure/functions/and.go b/pkg/iac/scanners/azure/functions/and.go similarity index 100% rename from pkg/scanners/azure/functions/and.go rename to pkg/iac/scanners/azure/functions/and.go diff --git a/pkg/scanners/azure/functions/and_test.go b/pkg/iac/scanners/azure/functions/and_test.go similarity index 100% rename from pkg/scanners/azure/functions/and_test.go rename to pkg/iac/scanners/azure/functions/and_test.go diff --git a/pkg/scanners/azure/functions/array.go b/pkg/iac/scanners/azure/functions/array.go similarity index 100% rename from pkg/scanners/azure/functions/array.go rename to pkg/iac/scanners/azure/functions/array.go diff --git a/pkg/scanners/azure/functions/array_test.go b/pkg/iac/scanners/azure/functions/array_test.go similarity index 100% rename from pkg/scanners/azure/functions/array_test.go rename to pkg/iac/scanners/azure/functions/array_test.go diff --git a/pkg/scanners/azure/functions/base64.go b/pkg/iac/scanners/azure/functions/base64.go similarity index 100% rename from pkg/scanners/azure/functions/base64.go rename to pkg/iac/scanners/azure/functions/base64.go diff --git a/pkg/scanners/azure/functions/base64_test.go b/pkg/iac/scanners/azure/functions/base64_test.go similarity index 100% rename from pkg/scanners/azure/functions/base64_test.go rename to pkg/iac/scanners/azure/functions/base64_test.go diff --git a/pkg/scanners/azure/functions/bool.go b/pkg/iac/scanners/azure/functions/bool.go similarity index 100% rename from pkg/scanners/azure/functions/bool.go rename to pkg/iac/scanners/azure/functions/bool.go diff --git a/pkg/scanners/azure/functions/bool_test.go b/pkg/iac/scanners/azure/functions/bool_test.go similarity index 100% rename from pkg/scanners/azure/functions/bool_test.go rename to pkg/iac/scanners/azure/functions/bool_test.go diff --git a/pkg/scanners/azure/functions/casing.go b/pkg/iac/scanners/azure/functions/casing.go similarity index 100% rename from pkg/scanners/azure/functions/casing.go rename to pkg/iac/scanners/azure/functions/casing.go diff --git a/pkg/scanners/azure/functions/casing_test.go b/pkg/iac/scanners/azure/functions/casing_test.go similarity index 100% rename from pkg/scanners/azure/functions/casing_test.go rename to pkg/iac/scanners/azure/functions/casing_test.go diff --git a/pkg/scanners/azure/functions/coalesce.go b/pkg/iac/scanners/azure/functions/coalesce.go similarity index 100% rename from pkg/scanners/azure/functions/coalesce.go rename to pkg/iac/scanners/azure/functions/coalesce.go diff --git a/pkg/scanners/azure/functions/coalesce_test.go b/pkg/iac/scanners/azure/functions/coalesce_test.go similarity index 100% rename from pkg/scanners/azure/functions/coalesce_test.go rename to pkg/iac/scanners/azure/functions/coalesce_test.go diff --git a/pkg/scanners/azure/functions/concat.go b/pkg/iac/scanners/azure/functions/concat.go similarity index 100% rename from pkg/scanners/azure/functions/concat.go rename to pkg/iac/scanners/azure/functions/concat.go diff --git a/pkg/scanners/azure/functions/concat_test.go b/pkg/iac/scanners/azure/functions/concat_test.go similarity index 100% rename from pkg/scanners/azure/functions/concat_test.go rename to pkg/iac/scanners/azure/functions/concat_test.go diff --git a/pkg/scanners/azure/functions/contains.go b/pkg/iac/scanners/azure/functions/contains.go similarity index 100% rename from pkg/scanners/azure/functions/contains.go rename to pkg/iac/scanners/azure/functions/contains.go diff --git a/pkg/scanners/azure/functions/contains_test.go b/pkg/iac/scanners/azure/functions/contains_test.go similarity index 100% rename from pkg/scanners/azure/functions/contains_test.go rename to pkg/iac/scanners/azure/functions/contains_test.go diff --git a/pkg/scanners/azure/functions/copy_index.go b/pkg/iac/scanners/azure/functions/copy_index.go similarity index 100% rename from pkg/scanners/azure/functions/copy_index.go rename to pkg/iac/scanners/azure/functions/copy_index.go diff --git a/pkg/scanners/azure/functions/copy_index_test.go b/pkg/iac/scanners/azure/functions/copy_index_test.go similarity index 100% rename from pkg/scanners/azure/functions/copy_index_test.go rename to pkg/iac/scanners/azure/functions/copy_index_test.go diff --git a/pkg/scanners/azure/functions/create_array.go b/pkg/iac/scanners/azure/functions/create_array.go similarity index 100% rename from pkg/scanners/azure/functions/create_array.go rename to pkg/iac/scanners/azure/functions/create_array.go diff --git a/pkg/scanners/azure/functions/create_array_test.go b/pkg/iac/scanners/azure/functions/create_array_test.go similarity index 100% rename from pkg/scanners/azure/functions/create_array_test.go rename to pkg/iac/scanners/azure/functions/create_array_test.go diff --git a/pkg/scanners/azure/functions/create_object.go b/pkg/iac/scanners/azure/functions/create_object.go similarity index 100% rename from pkg/scanners/azure/functions/create_object.go rename to pkg/iac/scanners/azure/functions/create_object.go diff --git a/pkg/scanners/azure/functions/create_object_test.go b/pkg/iac/scanners/azure/functions/create_object_test.go similarity index 100% rename from pkg/scanners/azure/functions/create_object_test.go rename to pkg/iac/scanners/azure/functions/create_object_test.go diff --git a/pkg/scanners/azure/functions/data_uri.go b/pkg/iac/scanners/azure/functions/data_uri.go similarity index 100% rename from pkg/scanners/azure/functions/data_uri.go rename to pkg/iac/scanners/azure/functions/data_uri.go diff --git a/pkg/scanners/azure/functions/data_uri_test.go b/pkg/iac/scanners/azure/functions/data_uri_test.go similarity index 100% rename from pkg/scanners/azure/functions/data_uri_test.go rename to pkg/iac/scanners/azure/functions/data_uri_test.go diff --git a/pkg/scanners/azure/functions/date_time_add.go b/pkg/iac/scanners/azure/functions/date_time_add.go similarity index 100% rename from pkg/scanners/azure/functions/date_time_add.go rename to pkg/iac/scanners/azure/functions/date_time_add.go diff --git a/pkg/scanners/azure/functions/date_time_epoch.go b/pkg/iac/scanners/azure/functions/date_time_epoch.go similarity index 100% rename from pkg/scanners/azure/functions/date_time_epoch.go rename to pkg/iac/scanners/azure/functions/date_time_epoch.go diff --git a/pkg/scanners/azure/functions/date_time_epoch_test.go b/pkg/iac/scanners/azure/functions/date_time_epoch_test.go similarity index 100% rename from pkg/scanners/azure/functions/date_time_epoch_test.go rename to pkg/iac/scanners/azure/functions/date_time_epoch_test.go diff --git a/pkg/scanners/azure/functions/datetime_add_test.go b/pkg/iac/scanners/azure/functions/datetime_add_test.go similarity index 100% rename from pkg/scanners/azure/functions/datetime_add_test.go rename to pkg/iac/scanners/azure/functions/datetime_add_test.go diff --git a/pkg/scanners/azure/functions/deployment.go b/pkg/iac/scanners/azure/functions/deployment.go similarity index 100% rename from pkg/scanners/azure/functions/deployment.go rename to pkg/iac/scanners/azure/functions/deployment.go diff --git a/pkg/scanners/azure/functions/div.go b/pkg/iac/scanners/azure/functions/div.go similarity index 100% rename from pkg/scanners/azure/functions/div.go rename to pkg/iac/scanners/azure/functions/div.go diff --git a/pkg/scanners/azure/functions/div_test.go b/pkg/iac/scanners/azure/functions/div_test.go similarity index 100% rename from pkg/scanners/azure/functions/div_test.go rename to pkg/iac/scanners/azure/functions/div_test.go diff --git a/pkg/scanners/azure/functions/empty.go b/pkg/iac/scanners/azure/functions/empty.go similarity index 100% rename from pkg/scanners/azure/functions/empty.go rename to pkg/iac/scanners/azure/functions/empty.go diff --git a/pkg/scanners/azure/functions/empty_test.go b/pkg/iac/scanners/azure/functions/empty_test.go similarity index 100% rename from pkg/scanners/azure/functions/empty_test.go rename to pkg/iac/scanners/azure/functions/empty_test.go diff --git a/pkg/scanners/azure/functions/ends_with.go b/pkg/iac/scanners/azure/functions/ends_with.go similarity index 100% rename from pkg/scanners/azure/functions/ends_with.go rename to pkg/iac/scanners/azure/functions/ends_with.go diff --git a/pkg/scanners/azure/functions/ends_with_test.go b/pkg/iac/scanners/azure/functions/ends_with_test.go similarity index 100% rename from pkg/scanners/azure/functions/ends_with_test.go rename to pkg/iac/scanners/azure/functions/ends_with_test.go diff --git a/pkg/scanners/azure/functions/equals.go b/pkg/iac/scanners/azure/functions/equals.go similarity index 100% rename from pkg/scanners/azure/functions/equals.go rename to pkg/iac/scanners/azure/functions/equals.go diff --git a/pkg/scanners/azure/functions/equals_test.go b/pkg/iac/scanners/azure/functions/equals_test.go similarity index 100% rename from pkg/scanners/azure/functions/equals_test.go rename to pkg/iac/scanners/azure/functions/equals_test.go diff --git a/pkg/scanners/azure/functions/false.go b/pkg/iac/scanners/azure/functions/false.go similarity index 100% rename from pkg/scanners/azure/functions/false.go rename to pkg/iac/scanners/azure/functions/false.go diff --git a/pkg/scanners/azure/functions/first.go b/pkg/iac/scanners/azure/functions/first.go similarity index 100% rename from pkg/scanners/azure/functions/first.go rename to pkg/iac/scanners/azure/functions/first.go diff --git a/pkg/scanners/azure/functions/first_test.go b/pkg/iac/scanners/azure/functions/first_test.go similarity index 100% rename from pkg/scanners/azure/functions/first_test.go rename to pkg/iac/scanners/azure/functions/first_test.go diff --git a/pkg/scanners/azure/functions/float.go b/pkg/iac/scanners/azure/functions/float.go similarity index 100% rename from pkg/scanners/azure/functions/float.go rename to pkg/iac/scanners/azure/functions/float.go diff --git a/pkg/scanners/azure/functions/float_test.go b/pkg/iac/scanners/azure/functions/float_test.go similarity index 100% rename from pkg/scanners/azure/functions/float_test.go rename to pkg/iac/scanners/azure/functions/float_test.go diff --git a/pkg/scanners/azure/functions/format.go b/pkg/iac/scanners/azure/functions/format.go similarity index 100% rename from pkg/scanners/azure/functions/format.go rename to pkg/iac/scanners/azure/functions/format.go diff --git a/pkg/scanners/azure/functions/format_test.go b/pkg/iac/scanners/azure/functions/format_test.go similarity index 100% rename from pkg/scanners/azure/functions/format_test.go rename to pkg/iac/scanners/azure/functions/format_test.go diff --git a/pkg/scanners/azure/functions/functions.go b/pkg/iac/scanners/azure/functions/functions.go similarity index 100% rename from pkg/scanners/azure/functions/functions.go rename to pkg/iac/scanners/azure/functions/functions.go diff --git a/pkg/scanners/azure/functions/greater.go b/pkg/iac/scanners/azure/functions/greater.go similarity index 100% rename from pkg/scanners/azure/functions/greater.go rename to pkg/iac/scanners/azure/functions/greater.go diff --git a/pkg/scanners/azure/functions/greater_test.go b/pkg/iac/scanners/azure/functions/greater_test.go similarity index 100% rename from pkg/scanners/azure/functions/greater_test.go rename to pkg/iac/scanners/azure/functions/greater_test.go diff --git a/pkg/scanners/azure/functions/guid.go b/pkg/iac/scanners/azure/functions/guid.go similarity index 100% rename from pkg/scanners/azure/functions/guid.go rename to pkg/iac/scanners/azure/functions/guid.go diff --git a/pkg/scanners/azure/functions/guid_test.go b/pkg/iac/scanners/azure/functions/guid_test.go similarity index 100% rename from pkg/scanners/azure/functions/guid_test.go rename to pkg/iac/scanners/azure/functions/guid_test.go diff --git a/pkg/scanners/azure/functions/if.go b/pkg/iac/scanners/azure/functions/if.go similarity index 100% rename from pkg/scanners/azure/functions/if.go rename to pkg/iac/scanners/azure/functions/if.go diff --git a/pkg/scanners/azure/functions/if_test.go b/pkg/iac/scanners/azure/functions/if_test.go similarity index 100% rename from pkg/scanners/azure/functions/if_test.go rename to pkg/iac/scanners/azure/functions/if_test.go diff --git a/pkg/scanners/azure/functions/index_of.go b/pkg/iac/scanners/azure/functions/index_of.go similarity index 100% rename from pkg/scanners/azure/functions/index_of.go rename to pkg/iac/scanners/azure/functions/index_of.go diff --git a/pkg/scanners/azure/functions/index_of_test.go b/pkg/iac/scanners/azure/functions/index_of_test.go similarity index 100% rename from pkg/scanners/azure/functions/index_of_test.go rename to pkg/iac/scanners/azure/functions/index_of_test.go diff --git a/pkg/scanners/azure/functions/int.go b/pkg/iac/scanners/azure/functions/int.go similarity index 100% rename from pkg/scanners/azure/functions/int.go rename to pkg/iac/scanners/azure/functions/int.go diff --git a/pkg/scanners/azure/functions/int_test.go b/pkg/iac/scanners/azure/functions/int_test.go similarity index 100% rename from pkg/scanners/azure/functions/int_test.go rename to pkg/iac/scanners/azure/functions/int_test.go diff --git a/pkg/scanners/azure/functions/intersection.go b/pkg/iac/scanners/azure/functions/intersection.go similarity index 100% rename from pkg/scanners/azure/functions/intersection.go rename to pkg/iac/scanners/azure/functions/intersection.go diff --git a/pkg/scanners/azure/functions/intersection_test.go b/pkg/iac/scanners/azure/functions/intersection_test.go similarity index 100% rename from pkg/scanners/azure/functions/intersection_test.go rename to pkg/iac/scanners/azure/functions/intersection_test.go diff --git a/pkg/scanners/azure/functions/items.go b/pkg/iac/scanners/azure/functions/items.go similarity index 100% rename from pkg/scanners/azure/functions/items.go rename to pkg/iac/scanners/azure/functions/items.go diff --git a/pkg/scanners/azure/functions/join.go b/pkg/iac/scanners/azure/functions/join.go similarity index 100% rename from pkg/scanners/azure/functions/join.go rename to pkg/iac/scanners/azure/functions/join.go diff --git a/pkg/scanners/azure/functions/join_test.go b/pkg/iac/scanners/azure/functions/join_test.go similarity index 100% rename from pkg/scanners/azure/functions/join_test.go rename to pkg/iac/scanners/azure/functions/join_test.go diff --git a/pkg/scanners/azure/functions/json.go b/pkg/iac/scanners/azure/functions/json.go similarity index 100% rename from pkg/scanners/azure/functions/json.go rename to pkg/iac/scanners/azure/functions/json.go diff --git a/pkg/scanners/azure/functions/json_test.go b/pkg/iac/scanners/azure/functions/json_test.go similarity index 100% rename from pkg/scanners/azure/functions/json_test.go rename to pkg/iac/scanners/azure/functions/json_test.go diff --git a/pkg/scanners/azure/functions/last.go b/pkg/iac/scanners/azure/functions/last.go similarity index 100% rename from pkg/scanners/azure/functions/last.go rename to pkg/iac/scanners/azure/functions/last.go diff --git a/pkg/scanners/azure/functions/last_index_of.go b/pkg/iac/scanners/azure/functions/last_index_of.go similarity index 100% rename from pkg/scanners/azure/functions/last_index_of.go rename to pkg/iac/scanners/azure/functions/last_index_of.go diff --git a/pkg/scanners/azure/functions/last_index_of_test.go b/pkg/iac/scanners/azure/functions/last_index_of_test.go similarity index 100% rename from pkg/scanners/azure/functions/last_index_of_test.go rename to pkg/iac/scanners/azure/functions/last_index_of_test.go diff --git a/pkg/scanners/azure/functions/last_test.go b/pkg/iac/scanners/azure/functions/last_test.go similarity index 100% rename from pkg/scanners/azure/functions/last_test.go rename to pkg/iac/scanners/azure/functions/last_test.go diff --git a/pkg/scanners/azure/functions/length.go b/pkg/iac/scanners/azure/functions/length.go similarity index 100% rename from pkg/scanners/azure/functions/length.go rename to pkg/iac/scanners/azure/functions/length.go diff --git a/pkg/scanners/azure/functions/length_test.go b/pkg/iac/scanners/azure/functions/length_test.go similarity index 100% rename from pkg/scanners/azure/functions/length_test.go rename to pkg/iac/scanners/azure/functions/length_test.go diff --git a/pkg/scanners/azure/functions/less.go b/pkg/iac/scanners/azure/functions/less.go similarity index 100% rename from pkg/scanners/azure/functions/less.go rename to pkg/iac/scanners/azure/functions/less.go diff --git a/pkg/scanners/azure/functions/less_test.go b/pkg/iac/scanners/azure/functions/less_test.go similarity index 100% rename from pkg/scanners/azure/functions/less_test.go rename to pkg/iac/scanners/azure/functions/less_test.go diff --git a/pkg/scanners/azure/functions/max.go b/pkg/iac/scanners/azure/functions/max.go similarity index 100% rename from pkg/scanners/azure/functions/max.go rename to pkg/iac/scanners/azure/functions/max.go diff --git a/pkg/scanners/azure/functions/max_test.go b/pkg/iac/scanners/azure/functions/max_test.go similarity index 100% rename from pkg/scanners/azure/functions/max_test.go rename to pkg/iac/scanners/azure/functions/max_test.go diff --git a/pkg/scanners/azure/functions/min.go b/pkg/iac/scanners/azure/functions/min.go similarity index 100% rename from pkg/scanners/azure/functions/min.go rename to pkg/iac/scanners/azure/functions/min.go diff --git a/pkg/scanners/azure/functions/min_test.go b/pkg/iac/scanners/azure/functions/min_test.go similarity index 100% rename from pkg/scanners/azure/functions/min_test.go rename to pkg/iac/scanners/azure/functions/min_test.go diff --git a/pkg/scanners/azure/functions/mod.go b/pkg/iac/scanners/azure/functions/mod.go similarity index 100% rename from pkg/scanners/azure/functions/mod.go rename to pkg/iac/scanners/azure/functions/mod.go diff --git a/pkg/scanners/azure/functions/mod_test.go b/pkg/iac/scanners/azure/functions/mod_test.go similarity index 100% rename from pkg/scanners/azure/functions/mod_test.go rename to pkg/iac/scanners/azure/functions/mod_test.go diff --git a/pkg/scanners/azure/functions/mul.go b/pkg/iac/scanners/azure/functions/mul.go similarity index 100% rename from pkg/scanners/azure/functions/mul.go rename to pkg/iac/scanners/azure/functions/mul.go diff --git a/pkg/scanners/azure/functions/mul_test.go b/pkg/iac/scanners/azure/functions/mul_test.go similarity index 100% rename from pkg/scanners/azure/functions/mul_test.go rename to pkg/iac/scanners/azure/functions/mul_test.go diff --git a/pkg/scanners/azure/functions/not.go b/pkg/iac/scanners/azure/functions/not.go similarity index 100% rename from pkg/scanners/azure/functions/not.go rename to pkg/iac/scanners/azure/functions/not.go diff --git a/pkg/scanners/azure/functions/not_test.go b/pkg/iac/scanners/azure/functions/not_test.go similarity index 100% rename from pkg/scanners/azure/functions/not_test.go rename to pkg/iac/scanners/azure/functions/not_test.go diff --git a/pkg/scanners/azure/functions/null.go b/pkg/iac/scanners/azure/functions/null.go similarity index 100% rename from pkg/scanners/azure/functions/null.go rename to pkg/iac/scanners/azure/functions/null.go diff --git a/pkg/scanners/azure/functions/null_test.go b/pkg/iac/scanners/azure/functions/null_test.go similarity index 100% rename from pkg/scanners/azure/functions/null_test.go rename to pkg/iac/scanners/azure/functions/null_test.go diff --git a/pkg/scanners/azure/functions/or.go b/pkg/iac/scanners/azure/functions/or.go similarity index 100% rename from pkg/scanners/azure/functions/or.go rename to pkg/iac/scanners/azure/functions/or.go diff --git a/pkg/scanners/azure/functions/or_test.go b/pkg/iac/scanners/azure/functions/or_test.go similarity index 100% rename from pkg/scanners/azure/functions/or_test.go rename to pkg/iac/scanners/azure/functions/or_test.go diff --git a/pkg/scanners/azure/functions/pad.go b/pkg/iac/scanners/azure/functions/pad.go similarity index 100% rename from pkg/scanners/azure/functions/pad.go rename to pkg/iac/scanners/azure/functions/pad.go diff --git a/pkg/scanners/azure/functions/pad_test.go b/pkg/iac/scanners/azure/functions/pad_test.go similarity index 100% rename from pkg/scanners/azure/functions/pad_test.go rename to pkg/iac/scanners/azure/functions/pad_test.go diff --git a/pkg/scanners/azure/functions/parameters.go b/pkg/iac/scanners/azure/functions/parameters.go similarity index 100% rename from pkg/scanners/azure/functions/parameters.go rename to pkg/iac/scanners/azure/functions/parameters.go diff --git a/pkg/scanners/azure/functions/pick_zones.go b/pkg/iac/scanners/azure/functions/pick_zones.go similarity index 100% rename from pkg/scanners/azure/functions/pick_zones.go rename to pkg/iac/scanners/azure/functions/pick_zones.go diff --git a/pkg/scanners/azure/functions/pick_zones_test.go b/pkg/iac/scanners/azure/functions/pick_zones_test.go similarity index 100% rename from pkg/scanners/azure/functions/pick_zones_test.go rename to pkg/iac/scanners/azure/functions/pick_zones_test.go diff --git a/pkg/scanners/azure/functions/range.go b/pkg/iac/scanners/azure/functions/range.go similarity index 100% rename from pkg/scanners/azure/functions/range.go rename to pkg/iac/scanners/azure/functions/range.go diff --git a/pkg/scanners/azure/functions/range_test.go b/pkg/iac/scanners/azure/functions/range_test.go similarity index 100% rename from pkg/scanners/azure/functions/range_test.go rename to pkg/iac/scanners/azure/functions/range_test.go diff --git a/pkg/scanners/azure/functions/reference.go b/pkg/iac/scanners/azure/functions/reference.go similarity index 100% rename from pkg/scanners/azure/functions/reference.go rename to pkg/iac/scanners/azure/functions/reference.go diff --git a/pkg/scanners/azure/functions/reference_test.go b/pkg/iac/scanners/azure/functions/reference_test.go similarity index 100% rename from pkg/scanners/azure/functions/reference_test.go rename to pkg/iac/scanners/azure/functions/reference_test.go diff --git a/pkg/scanners/azure/functions/replace.go b/pkg/iac/scanners/azure/functions/replace.go similarity index 100% rename from pkg/scanners/azure/functions/replace.go rename to pkg/iac/scanners/azure/functions/replace.go diff --git a/pkg/scanners/azure/functions/replace_test.go b/pkg/iac/scanners/azure/functions/replace_test.go similarity index 100% rename from pkg/scanners/azure/functions/replace_test.go rename to pkg/iac/scanners/azure/functions/replace_test.go diff --git a/pkg/scanners/azure/functions/resource.go b/pkg/iac/scanners/azure/functions/resource.go similarity index 100% rename from pkg/scanners/azure/functions/resource.go rename to pkg/iac/scanners/azure/functions/resource.go diff --git a/pkg/scanners/azure/functions/resource_test.go b/pkg/iac/scanners/azure/functions/resource_test.go similarity index 100% rename from pkg/scanners/azure/functions/resource_test.go rename to pkg/iac/scanners/azure/functions/resource_test.go diff --git a/pkg/scanners/azure/functions/scope.go b/pkg/iac/scanners/azure/functions/scope.go similarity index 100% rename from pkg/scanners/azure/functions/scope.go rename to pkg/iac/scanners/azure/functions/scope.go diff --git a/pkg/scanners/azure/functions/scope_test.go b/pkg/iac/scanners/azure/functions/scope_test.go similarity index 100% rename from pkg/scanners/azure/functions/scope_test.go rename to pkg/iac/scanners/azure/functions/scope_test.go diff --git a/pkg/scanners/azure/functions/skip.go b/pkg/iac/scanners/azure/functions/skip.go similarity index 100% rename from pkg/scanners/azure/functions/skip.go rename to pkg/iac/scanners/azure/functions/skip.go diff --git a/pkg/scanners/azure/functions/skip_test.go b/pkg/iac/scanners/azure/functions/skip_test.go similarity index 100% rename from pkg/scanners/azure/functions/skip_test.go rename to pkg/iac/scanners/azure/functions/skip_test.go diff --git a/pkg/scanners/azure/functions/split.go b/pkg/iac/scanners/azure/functions/split.go similarity index 100% rename from pkg/scanners/azure/functions/split.go rename to pkg/iac/scanners/azure/functions/split.go diff --git a/pkg/scanners/azure/functions/split_test.go b/pkg/iac/scanners/azure/functions/split_test.go similarity index 100% rename from pkg/scanners/azure/functions/split_test.go rename to pkg/iac/scanners/azure/functions/split_test.go diff --git a/pkg/scanners/azure/functions/starts_with.go b/pkg/iac/scanners/azure/functions/starts_with.go similarity index 100% rename from pkg/scanners/azure/functions/starts_with.go rename to pkg/iac/scanners/azure/functions/starts_with.go diff --git a/pkg/scanners/azure/functions/starts_with_test.go b/pkg/iac/scanners/azure/functions/starts_with_test.go similarity index 100% rename from pkg/scanners/azure/functions/starts_with_test.go rename to pkg/iac/scanners/azure/functions/starts_with_test.go diff --git a/pkg/scanners/azure/functions/string.go b/pkg/iac/scanners/azure/functions/string.go similarity index 100% rename from pkg/scanners/azure/functions/string.go rename to pkg/iac/scanners/azure/functions/string.go diff --git a/pkg/scanners/azure/functions/string_test.go b/pkg/iac/scanners/azure/functions/string_test.go similarity index 100% rename from pkg/scanners/azure/functions/string_test.go rename to pkg/iac/scanners/azure/functions/string_test.go diff --git a/pkg/scanners/azure/functions/sub.go b/pkg/iac/scanners/azure/functions/sub.go similarity index 100% rename from pkg/scanners/azure/functions/sub.go rename to pkg/iac/scanners/azure/functions/sub.go diff --git a/pkg/scanners/azure/functions/sub_test.go b/pkg/iac/scanners/azure/functions/sub_test.go similarity index 100% rename from pkg/scanners/azure/functions/sub_test.go rename to pkg/iac/scanners/azure/functions/sub_test.go diff --git a/pkg/scanners/azure/functions/substring.go b/pkg/iac/scanners/azure/functions/substring.go similarity index 100% rename from pkg/scanners/azure/functions/substring.go rename to pkg/iac/scanners/azure/functions/substring.go diff --git a/pkg/scanners/azure/functions/substring_test.go b/pkg/iac/scanners/azure/functions/substring_test.go similarity index 100% rename from pkg/scanners/azure/functions/substring_test.go rename to pkg/iac/scanners/azure/functions/substring_test.go diff --git a/pkg/scanners/azure/functions/take.go b/pkg/iac/scanners/azure/functions/take.go similarity index 100% rename from pkg/scanners/azure/functions/take.go rename to pkg/iac/scanners/azure/functions/take.go diff --git a/pkg/scanners/azure/functions/take_test.go b/pkg/iac/scanners/azure/functions/take_test.go similarity index 100% rename from pkg/scanners/azure/functions/take_test.go rename to pkg/iac/scanners/azure/functions/take_test.go diff --git a/pkg/scanners/azure/functions/trim.go b/pkg/iac/scanners/azure/functions/trim.go similarity index 100% rename from pkg/scanners/azure/functions/trim.go rename to pkg/iac/scanners/azure/functions/trim.go diff --git a/pkg/scanners/azure/functions/trim_test.go b/pkg/iac/scanners/azure/functions/trim_test.go similarity index 100% rename from pkg/scanners/azure/functions/trim_test.go rename to pkg/iac/scanners/azure/functions/trim_test.go diff --git a/pkg/scanners/azure/functions/true.go b/pkg/iac/scanners/azure/functions/true.go similarity index 100% rename from pkg/scanners/azure/functions/true.go rename to pkg/iac/scanners/azure/functions/true.go diff --git a/pkg/scanners/azure/functions/union.go b/pkg/iac/scanners/azure/functions/union.go similarity index 100% rename from pkg/scanners/azure/functions/union.go rename to pkg/iac/scanners/azure/functions/union.go diff --git a/pkg/scanners/azure/functions/union_test.go b/pkg/iac/scanners/azure/functions/union_test.go similarity index 100% rename from pkg/scanners/azure/functions/union_test.go rename to pkg/iac/scanners/azure/functions/union_test.go diff --git a/pkg/scanners/azure/functions/unique_string.go b/pkg/iac/scanners/azure/functions/unique_string.go similarity index 100% rename from pkg/scanners/azure/functions/unique_string.go rename to pkg/iac/scanners/azure/functions/unique_string.go diff --git a/pkg/scanners/azure/functions/unique_string_test.go b/pkg/iac/scanners/azure/functions/unique_string_test.go similarity index 100% rename from pkg/scanners/azure/functions/unique_string_test.go rename to pkg/iac/scanners/azure/functions/unique_string_test.go diff --git a/pkg/scanners/azure/functions/uri.go b/pkg/iac/scanners/azure/functions/uri.go similarity index 100% rename from pkg/scanners/azure/functions/uri.go rename to pkg/iac/scanners/azure/functions/uri.go diff --git a/pkg/scanners/azure/functions/uri_test.go b/pkg/iac/scanners/azure/functions/uri_test.go similarity index 100% rename from pkg/scanners/azure/functions/uri_test.go rename to pkg/iac/scanners/azure/functions/uri_test.go diff --git a/pkg/scanners/azure/functions/utc_now.go b/pkg/iac/scanners/azure/functions/utc_now.go similarity index 100% rename from pkg/scanners/azure/functions/utc_now.go rename to pkg/iac/scanners/azure/functions/utc_now.go diff --git a/pkg/scanners/azure/functions/utc_now_test.go b/pkg/iac/scanners/azure/functions/utc_now_test.go similarity index 100% rename from pkg/scanners/azure/functions/utc_now_test.go rename to pkg/iac/scanners/azure/functions/utc_now_test.go diff --git a/pkg/scanners/azure/resolver/resolver.go b/pkg/iac/scanners/azure/resolver/resolver.go similarity index 52% rename from pkg/scanners/azure/resolver/resolver.go rename to pkg/iac/scanners/azure/resolver/resolver.go index 8a790815fd9c..06383c43b2f0 100644 --- a/pkg/scanners/azure/resolver/resolver.go +++ b/pkg/iac/scanners/azure/resolver/resolver.go @@ -2,13 +2,13 @@ package resolver import ( defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" - "github.com/aquasecurity/trivy/pkg/scanners/azure/expressions" + azure2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/expressions" ) type Resolver interface { - ResolveExpression(expression azure.Value) azure.Value - SetDeployment(d *azure.Deployment) + ResolveExpression(expression azure2.Value) azure2.Value + SetDeployment(d *azure2.Deployment) } func NewResolver() Resolver { @@ -16,15 +16,15 @@ func NewResolver() Resolver { } type resolver struct { - deployment *azure.Deployment + deployment *azure2.Deployment } -func (r *resolver) SetDeployment(d *azure.Deployment) { +func (r *resolver) SetDeployment(d *azure2.Deployment) { r.deployment = d } -func (r *resolver) ResolveExpression(expression azure.Value) azure.Value { - if expression.Kind != azure.KindExpression { +func (r *resolver) ResolveExpression(expression azure2.Value) azure2.Value { + if expression.Kind != azure2.KindExpression { return expression } if r.deployment == nil { @@ -34,18 +34,18 @@ func (r *resolver) ResolveExpression(expression azure.Value) azure.Value { resolved, err := r.resolveExpressionString(code, expression.GetMetadata()) if err != nil { - expression.Kind = azure.KindUnresolvable + expression.Kind = azure2.KindUnresolvable return expression } return resolved } -func (r *resolver) resolveExpressionString(code string, metadata defsecTypes.Metadata) (azure.Value, error) { +func (r *resolver) resolveExpressionString(code string, metadata defsecTypes.Metadata) (azure2.Value, error) { et, err := expressions.NewExpressionTree(code) if err != nil { - return azure.NullValue, err + return azure2.NullValue, err } evaluatedValue := et.Evaluate(r.deployment) - return azure.NewValue(evaluatedValue, metadata), nil + return azure2.NewValue(evaluatedValue, metadata), nil } diff --git a/pkg/scanners/azure/resolver/resolver_test.go b/pkg/iac/scanners/azure/resolver/resolver_test.go similarity index 84% rename from pkg/scanners/azure/resolver/resolver_test.go rename to pkg/iac/scanners/azure/resolver/resolver_test.go index 2a5cec4ea9e2..ee72b4593863 100644 --- a/pkg/scanners/azure/resolver/resolver_test.go +++ b/pkg/iac/scanners/azure/resolver/resolver_test.go @@ -5,7 +5,7 @@ import ( "time" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure" + azure2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" "github.com/stretchr/testify/require" ) @@ -54,7 +54,7 @@ func Test_resolveFunc(t *testing.T) { resolvedValue, err := resolver.resolveExpressionString(tt.expr, types.NewTestMetadata()) require.NoError(t, err) - require.Equal(t, azure.KindString, resolvedValue.Kind) + require.Equal(t, azure2.KindString, resolvedValue.Kind) require.Equal(t, tt.expected, resolvedValue.AsString()) }) @@ -64,18 +64,18 @@ func Test_resolveFunc(t *testing.T) { func Test_resolveParameter(t *testing.T) { tests := []struct { name string - deployment *azure.Deployment + deployment *azure2.Deployment expr string expected string }{ { name: "format call with parameter", - deployment: &azure.Deployment{ - Parameters: []azure.Parameter{ + deployment: &azure2.Deployment{ + Parameters: []azure2.Parameter{ { - Variable: azure.Variable{ + Variable: azure2.Variable{ Name: "dbName", - Value: azure.NewValue("myPostgreSQLServer", types.NewTestMetadata()), + Value: azure2.NewValue("myPostgreSQLServer", types.NewTestMetadata()), }, }, }, @@ -92,7 +92,7 @@ func Test_resolveParameter(t *testing.T) { resolvedValue, err := resolver.resolveExpressionString(tt.expr, types.NewTestMetadata()) require.NoError(t, err) - require.Equal(t, azure.KindString, resolvedValue.Kind) + require.Equal(t, azure2.KindString, resolvedValue.Kind) require.Equal(t, tt.expected, resolvedValue.AsString()) }) diff --git a/pkg/scanners/azure/value.go b/pkg/iac/scanners/azure/value.go similarity index 91% rename from pkg/scanners/azure/value.go rename to pkg/iac/scanners/azure/value.go index c9dc3316ca10..58871118e263 100644 --- a/pkg/scanners/azure/value.go +++ b/pkg/iac/scanners/azure/value.go @@ -4,10 +4,10 @@ import ( "strings" "time" + armjson2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" "golang.org/x/exp/slices" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/azure/arm/parser/armjson" ) type EvalContext struct{} @@ -98,24 +98,24 @@ func (v *Value) GetMetadata() types.Metadata { return v.Metadata } -func (v *Value) UnmarshalJSONWithMetadata(node armjson.Node) error { +func (v *Value) UnmarshalJSONWithMetadata(node armjson2.Node) error { v.updateValueKind(node) v.Metadata = node.Metadata() switch node.Kind() { - case armjson.KindArray: + case armjson2.KindArray: err := v.unmarshallArray(node) if err != nil { return err } - case armjson.KindObject: + case armjson2.KindObject: err := v.unmarshalObject(node) if err != nil { return err } - case armjson.KindString: + case armjson2.KindString: err := v.unmarshalString(node) if err != nil { return err @@ -139,7 +139,7 @@ func (v *Value) UnmarshalJSONWithMetadata(node armjson.Node) error { return nil } -func (v *Value) unmarshalString(node armjson.Node) error { +func (v *Value) unmarshalString(node armjson2.Node) error { var str string if err := node.Decode(&str); err != nil { return err @@ -154,7 +154,7 @@ func (v *Value) unmarshalString(node armjson.Node) error { return nil } -func (v *Value) unmarshalObject(node armjson.Node) error { +func (v *Value) unmarshalObject(node armjson2.Node) error { obj := make(map[string]Value) for i := 0; i < len(node.Content()); i += 2 { var key string @@ -171,7 +171,7 @@ func (v *Value) unmarshalObject(node armjson.Node) error { return nil } -func (v *Value) unmarshallArray(node armjson.Node) error { +func (v *Value) unmarshallArray(node armjson2.Node) error { var arr []Value for _, child := range node.Content() { var val Value @@ -184,19 +184,19 @@ func (v *Value) unmarshallArray(node armjson.Node) error { return nil } -func (v *Value) updateValueKind(node armjson.Node) { +func (v *Value) updateValueKind(node armjson2.Node) { switch node.Kind() { - case armjson.KindString: + case armjson2.KindString: v.Kind = KindString - case armjson.KindNumber: + case armjson2.KindNumber: v.Kind = KindNumber - case armjson.KindBoolean: + case armjson2.KindBoolean: v.Kind = KindBoolean - case armjson.KindObject: + case armjson2.KindObject: v.Kind = KindObject - case armjson.KindNull: + case armjson2.KindNull: v.Kind = KindNull - case armjson.KindArray: + case armjson2.KindArray: v.Kind = KindArray default: panic(node.Kind()) diff --git a/pkg/scanners/azure/value_test.go b/pkg/iac/scanners/azure/value_test.go similarity index 100% rename from pkg/scanners/azure/value_test.go rename to pkg/iac/scanners/azure/value_test.go diff --git a/pkg/scanners/cloudformation/cftypes/types.go b/pkg/iac/scanners/cloudformation/cftypes/types.go similarity index 100% rename from pkg/scanners/cloudformation/cftypes/types.go rename to pkg/iac/scanners/cloudformation/cftypes/types.go diff --git a/pkg/scanners/cloudformation/parser/errors.go b/pkg/iac/scanners/cloudformation/parser/errors.go similarity index 100% rename from pkg/scanners/cloudformation/parser/errors.go rename to pkg/iac/scanners/cloudformation/parser/errors.go diff --git a/pkg/scanners/cloudformation/parser/file_context.go b/pkg/iac/scanners/cloudformation/parser/file_context.go similarity index 100% rename from pkg/scanners/cloudformation/parser/file_context.go rename to pkg/iac/scanners/cloudformation/parser/file_context.go diff --git a/pkg/scanners/cloudformation/parser/file_context_test.go b/pkg/iac/scanners/cloudformation/parser/file_context_test.go similarity index 100% rename from pkg/scanners/cloudformation/parser/file_context_test.go rename to pkg/iac/scanners/cloudformation/parser/file_context_test.go diff --git a/pkg/scanners/cloudformation/parser/fn_and.go b/pkg/iac/scanners/cloudformation/parser/fn_and.go similarity index 89% rename from pkg/scanners/cloudformation/parser/fn_and.go rename to pkg/iac/scanners/cloudformation/parser/fn_and.go index 82a9f7bdcb19..a155120e413a 100644 --- a/pkg/scanners/cloudformation/parser/fn_and.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_and.go @@ -1,6 +1,8 @@ package parser -import "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +import ( + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) func ResolveAnd(property *Property) (resolved *Property, success bool) { if !property.isFunction() { diff --git a/pkg/scanners/cloudformation/parser/fn_and_test.go b/pkg/iac/scanners/cloudformation/parser/fn_and_test.go similarity index 97% rename from pkg/scanners/cloudformation/parser/fn_and_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_and_test.go index 6112d46fe012..a2a9989b2805 100644 --- a/pkg/scanners/cloudformation/parser/fn_and_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_and_test.go @@ -4,8 +4,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/scanners/cloudformation/parser/fn_base64.go b/pkg/iac/scanners/cloudformation/parser/fn_base64.go similarity index 82% rename from pkg/scanners/cloudformation/parser/fn_base64.go rename to pkg/iac/scanners/cloudformation/parser/fn_base64.go index e1b8bcbeca6e..ad94ed08d6e8 100644 --- a/pkg/scanners/cloudformation/parser/fn_base64.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_base64.go @@ -3,7 +3,7 @@ package parser import ( "encoding/base64" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func ResolveBase64(property *Property) (*Property, bool) { diff --git a/pkg/scanners/cloudformation/parser/fn_base64_test.go b/pkg/iac/scanners/cloudformation/parser/fn_base64_test.go similarity index 90% rename from pkg/scanners/cloudformation/parser/fn_base64_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_base64_test.go index 070648bf8f1f..d1f31600a8c1 100644 --- a/pkg/scanners/cloudformation/parser/fn_base64_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_base64_test.go @@ -2,7 +2,7 @@ package parser import ( "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/scanners/cloudformation/parser/fn_builtin.go b/pkg/iac/scanners/cloudformation/parser/fn_builtin.go similarity index 95% rename from pkg/scanners/cloudformation/parser/fn_builtin.go rename to pkg/iac/scanners/cloudformation/parser/fn_builtin.go index 577a3b25c216..4094ea7515f8 100644 --- a/pkg/scanners/cloudformation/parser/fn_builtin.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_builtin.go @@ -5,8 +5,7 @@ import ( "net" "github.com/apparentlymart/go-cidr/cidr" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func GetAzs(property *Property) (*Property, bool) { diff --git a/pkg/scanners/cloudformation/parser/fn_builtin_test.go b/pkg/iac/scanners/cloudformation/parser/fn_builtin_test.go similarity index 100% rename from pkg/scanners/cloudformation/parser/fn_builtin_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_builtin_test.go diff --git a/pkg/scanners/cloudformation/parser/fn_condition.go b/pkg/iac/scanners/cloudformation/parser/fn_condition.go similarity index 100% rename from pkg/scanners/cloudformation/parser/fn_condition.go rename to pkg/iac/scanners/cloudformation/parser/fn_condition.go diff --git a/pkg/scanners/cloudformation/parser/fn_condition_test.go b/pkg/iac/scanners/cloudformation/parser/fn_condition_test.go similarity index 96% rename from pkg/scanners/cloudformation/parser/fn_condition_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_condition_test.go index bb8f78e751e5..0bea529c280e 100644 --- a/pkg/scanners/cloudformation/parser/fn_condition_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_condition_test.go @@ -3,7 +3,7 @@ package parser import ( "testing" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/cloudformation/parser/fn_equals.go b/pkg/iac/scanners/cloudformation/parser/fn_equals.go similarity index 87% rename from pkg/scanners/cloudformation/parser/fn_equals.go rename to pkg/iac/scanners/cloudformation/parser/fn_equals.go index b476342c9a8f..4043735849a2 100644 --- a/pkg/scanners/cloudformation/parser/fn_equals.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_equals.go @@ -1,7 +1,7 @@ package parser import ( - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func ResolveEquals(property *Property) (resolved *Property, success bool) { diff --git a/pkg/scanners/cloudformation/parser/fn_equals_test.go b/pkg/iac/scanners/cloudformation/parser/fn_equals_test.go similarity index 98% rename from pkg/scanners/cloudformation/parser/fn_equals_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_equals_test.go index ade7a9a0a1ab..b1b74724c636 100644 --- a/pkg/scanners/cloudformation/parser/fn_equals_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_equals_test.go @@ -4,8 +4,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/scanners/cloudformation/parser/fn_find_in_map.go b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go similarity index 94% rename from pkg/scanners/cloudformation/parser/fn_find_in_map.go rename to pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go index 3c9a0da29f7b..7767f0126456 100644 --- a/pkg/scanners/cloudformation/parser/fn_find_in_map.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go @@ -1,7 +1,7 @@ package parser import ( - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func ResolveFindInMap(property *Property) (resolved *Property, success bool) { diff --git a/pkg/scanners/cloudformation/parser/fn_find_in_map_test.go b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go similarity index 100% rename from pkg/scanners/cloudformation/parser/fn_find_in_map_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go diff --git a/pkg/scanners/cloudformation/parser/fn_get_attr.go b/pkg/iac/scanners/cloudformation/parser/fn_get_attr.go similarity index 93% rename from pkg/scanners/cloudformation/parser/fn_get_attr.go rename to pkg/iac/scanners/cloudformation/parser/fn_get_attr.go index 53a7891e0252..f6754d16a9b3 100644 --- a/pkg/scanners/cloudformation/parser/fn_get_attr.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_get_attr.go @@ -3,7 +3,7 @@ package parser import ( "strings" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func ResolveGetAtt(property *Property) (resolved *Property, success bool) { diff --git a/pkg/scanners/cloudformation/parser/fn_get_attr_test.go b/pkg/iac/scanners/cloudformation/parser/fn_get_attr_test.go similarity index 100% rename from pkg/scanners/cloudformation/parser/fn_get_attr_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_get_attr_test.go diff --git a/pkg/scanners/cloudformation/parser/fn_if.go b/pkg/iac/scanners/cloudformation/parser/fn_if.go similarity index 100% rename from pkg/scanners/cloudformation/parser/fn_if.go rename to pkg/iac/scanners/cloudformation/parser/fn_if.go diff --git a/pkg/scanners/cloudformation/parser/fn_if_test.go b/pkg/iac/scanners/cloudformation/parser/fn_if_test.go similarity index 93% rename from pkg/scanners/cloudformation/parser/fn_if_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_if_test.go index eba1e080ed20..a232b157e7a7 100644 --- a/pkg/scanners/cloudformation/parser/fn_if_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_if_test.go @@ -4,8 +4,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/scanners/cloudformation/parser/fn_join.go b/pkg/iac/scanners/cloudformation/parser/fn_join.go similarity index 90% rename from pkg/scanners/cloudformation/parser/fn_join.go rename to pkg/iac/scanners/cloudformation/parser/fn_join.go index 961248a997f2..e1d39dc702f7 100644 --- a/pkg/scanners/cloudformation/parser/fn_join.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_join.go @@ -3,7 +3,7 @@ package parser import ( "strings" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func ResolveJoin(property *Property) (resolved *Property, success bool) { diff --git a/pkg/scanners/cloudformation/parser/fn_join_test.go b/pkg/iac/scanners/cloudformation/parser/fn_join_test.go similarity index 97% rename from pkg/scanners/cloudformation/parser/fn_join_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_join_test.go index a44ab64039b0..6e8abf143f19 100644 --- a/pkg/scanners/cloudformation/parser/fn_join_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_join_test.go @@ -4,8 +4,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/scanners/cloudformation/parser/fn_length.go b/pkg/iac/scanners/cloudformation/parser/fn_length.go similarity index 84% rename from pkg/scanners/cloudformation/parser/fn_length.go rename to pkg/iac/scanners/cloudformation/parser/fn_length.go index 664bc933c158..2026dd4170e9 100644 --- a/pkg/scanners/cloudformation/parser/fn_length.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_length.go @@ -1,6 +1,8 @@ package parser -import "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +import ( + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) func ResolveLength(property *Property) (*Property, bool) { if !property.isFunction() { diff --git a/pkg/scanners/cloudformation/parser/fn_length_test.go b/pkg/iac/scanners/cloudformation/parser/fn_length_test.go similarity index 96% rename from pkg/scanners/cloudformation/parser/fn_length_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_length_test.go index af9d842dd339..aa916ad0a972 100644 --- a/pkg/scanners/cloudformation/parser/fn_length_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_length_test.go @@ -3,7 +3,7 @@ package parser import ( "testing" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/cloudformation/parser/fn_not.go b/pkg/iac/scanners/cloudformation/parser/fn_not.go similarity index 85% rename from pkg/scanners/cloudformation/parser/fn_not.go rename to pkg/iac/scanners/cloudformation/parser/fn_not.go index a61390d26cf3..fa76db76319b 100644 --- a/pkg/scanners/cloudformation/parser/fn_not.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_not.go @@ -1,6 +1,8 @@ package parser -import "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +import ( + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) func ResolveNot(property *Property) (resolved *Property, success bool) { if !property.isFunction() { diff --git a/pkg/scanners/cloudformation/parser/fn_not_test.go b/pkg/iac/scanners/cloudformation/parser/fn_not_test.go similarity index 97% rename from pkg/scanners/cloudformation/parser/fn_not_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_not_test.go index 4e19b3ad069f..55cdcede75eb 100644 --- a/pkg/scanners/cloudformation/parser/fn_not_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_not_test.go @@ -4,8 +4,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/scanners/cloudformation/parser/fn_or.go b/pkg/iac/scanners/cloudformation/parser/fn_or.go similarity index 89% rename from pkg/scanners/cloudformation/parser/fn_or.go rename to pkg/iac/scanners/cloudformation/parser/fn_or.go index 0da432b350bf..48fd802d1065 100644 --- a/pkg/scanners/cloudformation/parser/fn_or.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_or.go @@ -1,6 +1,8 @@ package parser -import "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +import ( + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) func ResolveOr(property *Property) (resolved *Property, success bool) { if !property.isFunction() { diff --git a/pkg/scanners/cloudformation/parser/fn_or_test.go b/pkg/iac/scanners/cloudformation/parser/fn_or_test.go similarity index 97% rename from pkg/scanners/cloudformation/parser/fn_or_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_or_test.go index 0c4f3dcd1954..095af0b940a7 100644 --- a/pkg/scanners/cloudformation/parser/fn_or_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_or_test.go @@ -4,8 +4,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/scanners/cloudformation/parser/fn_ref.go b/pkg/iac/scanners/cloudformation/parser/fn_ref.go similarity index 93% rename from pkg/scanners/cloudformation/parser/fn_ref.go rename to pkg/iac/scanners/cloudformation/parser/fn_ref.go index e57e3bf21eb0..afc6ead7cf20 100644 --- a/pkg/scanners/cloudformation/parser/fn_ref.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_ref.go @@ -1,7 +1,7 @@ package parser import ( - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func ResolveReference(property *Property) (resolved *Property, success bool) { diff --git a/pkg/scanners/cloudformation/parser/fn_ref_test.go b/pkg/iac/scanners/cloudformation/parser/fn_ref_test.go similarity index 96% rename from pkg/scanners/cloudformation/parser/fn_ref_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_ref_test.go index 98149e4792ec..e017190b44db 100644 --- a/pkg/scanners/cloudformation/parser/fn_ref_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_ref_test.go @@ -4,8 +4,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/scanners/cloudformation/parser/fn_select.go b/pkg/iac/scanners/cloudformation/parser/fn_select.go similarity index 92% rename from pkg/scanners/cloudformation/parser/fn_select.go rename to pkg/iac/scanners/cloudformation/parser/fn_select.go index 3289004847c8..c528223a2325 100644 --- a/pkg/scanners/cloudformation/parser/fn_select.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_select.go @@ -1,7 +1,7 @@ package parser import ( - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func ResolveSelect(property *Property) (resolved *Property, success bool) { diff --git a/pkg/scanners/cloudformation/parser/fn_select_test.go b/pkg/iac/scanners/cloudformation/parser/fn_select_test.go similarity index 100% rename from pkg/scanners/cloudformation/parser/fn_select_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_select_test.go diff --git a/pkg/scanners/cloudformation/parser/fn_split.go b/pkg/iac/scanners/cloudformation/parser/fn_split.go similarity index 93% rename from pkg/scanners/cloudformation/parser/fn_split.go rename to pkg/iac/scanners/cloudformation/parser/fn_split.go index 453de5a5b191..cddda20ef190 100644 --- a/pkg/scanners/cloudformation/parser/fn_split.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_split.go @@ -3,7 +3,7 @@ package parser import ( "strings" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func ResolveSplit(property *Property) (resolved *Property, success bool) { diff --git a/pkg/scanners/cloudformation/parser/fn_split_test.go b/pkg/iac/scanners/cloudformation/parser/fn_split_test.go similarity index 93% rename from pkg/scanners/cloudformation/parser/fn_split_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_split_test.go index 1c69cb5459e5..f3e73f2c3d4a 100644 --- a/pkg/scanners/cloudformation/parser/fn_split_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_split_test.go @@ -2,7 +2,7 @@ package parser import ( "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/scanners/cloudformation/parser/fn_sub.go b/pkg/iac/scanners/cloudformation/parser/fn_sub.go similarity index 96% rename from pkg/scanners/cloudformation/parser/fn_sub.go rename to pkg/iac/scanners/cloudformation/parser/fn_sub.go index 0dc2012daa68..52db66cf9757 100644 --- a/pkg/scanners/cloudformation/parser/fn_sub.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_sub.go @@ -5,7 +5,7 @@ import ( "strconv" "strings" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func ResolveSub(property *Property) (resolved *Property, success bool) { diff --git a/pkg/scanners/cloudformation/parser/fn_sub_test.go b/pkg/iac/scanners/cloudformation/parser/fn_sub_test.go similarity index 100% rename from pkg/scanners/cloudformation/parser/fn_sub_test.go rename to pkg/iac/scanners/cloudformation/parser/fn_sub_test.go diff --git a/pkg/scanners/cloudformation/parser/intrinsics.go b/pkg/iac/scanners/cloudformation/parser/intrinsics.go similarity index 100% rename from pkg/scanners/cloudformation/parser/intrinsics.go rename to pkg/iac/scanners/cloudformation/parser/intrinsics.go diff --git a/pkg/scanners/cloudformation/parser/intrinsics_test.go b/pkg/iac/scanners/cloudformation/parser/intrinsics_test.go similarity index 100% rename from pkg/scanners/cloudformation/parser/intrinsics_test.go rename to pkg/iac/scanners/cloudformation/parser/intrinsics_test.go diff --git a/pkg/scanners/cloudformation/parser/parameter.go b/pkg/iac/scanners/cloudformation/parser/parameter.go similarity index 97% rename from pkg/scanners/cloudformation/parser/parameter.go rename to pkg/iac/scanners/cloudformation/parser/parameter.go index 8c833c7f7367..b3683ed02e8c 100644 --- a/pkg/scanners/cloudformation/parser/parameter.go +++ b/pkg/iac/scanners/cloudformation/parser/parameter.go @@ -7,10 +7,9 @@ import ( "strconv" "strings" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/liamg/jfather" "gopkg.in/yaml.v3" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" ) type Parameter struct { diff --git a/pkg/scanners/cloudformation/parser/parameters_test.go b/pkg/iac/scanners/cloudformation/parser/parameters_test.go similarity index 100% rename from pkg/scanners/cloudformation/parser/parameters_test.go rename to pkg/iac/scanners/cloudformation/parser/parameters_test.go diff --git a/pkg/scanners/cloudformation/parser/parser.go b/pkg/iac/scanners/cloudformation/parser/parser.go similarity index 98% rename from pkg/scanners/cloudformation/parser/parser.go rename to pkg/iac/scanners/cloudformation/parser/parser.go index 6e6ca1e9e453..58486a9109c5 100644 --- a/pkg/scanners/cloudformation/parser/parser.go +++ b/pkg/iac/scanners/cloudformation/parser/parser.go @@ -11,12 +11,12 @@ import ( "path/filepath" "strings" + "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/liamg/jfather" "gopkg.in/yaml.v3" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/scanners/cloudformation/parser/parser_test.go b/pkg/iac/scanners/cloudformation/parser/parser_test.go similarity index 100% rename from pkg/scanners/cloudformation/parser/parser_test.go rename to pkg/iac/scanners/cloudformation/parser/parser_test.go diff --git a/pkg/scanners/cloudformation/parser/property.go b/pkg/iac/scanners/cloudformation/parser/property.go similarity index 99% rename from pkg/scanners/cloudformation/parser/property.go rename to pkg/iac/scanners/cloudformation/parser/property.go index 7090df4e9886..0711bae8f826 100644 --- a/pkg/scanners/cloudformation/parser/property.go +++ b/pkg/iac/scanners/cloudformation/parser/property.go @@ -6,11 +6,11 @@ import ( "strconv" "strings" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/liamg/jfather" "gopkg.in/yaml.v3" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" ) type EqualityOptions = int diff --git a/pkg/scanners/cloudformation/parser/property_conversion.go b/pkg/iac/scanners/cloudformation/parser/property_conversion.go similarity index 97% rename from pkg/scanners/cloudformation/parser/property_conversion.go rename to pkg/iac/scanners/cloudformation/parser/property_conversion.go index 45ff7f3dc927..d286fa4dd797 100644 --- a/pkg/scanners/cloudformation/parser/property_conversion.go +++ b/pkg/iac/scanners/cloudformation/parser/property_conversion.go @@ -6,7 +6,7 @@ import ( "strconv" "strings" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func (p *Property) IsConvertableTo(conversionType cftypes.CfType) bool { diff --git a/pkg/scanners/cloudformation/parser/property_helpers.go b/pkg/iac/scanners/cloudformation/parser/property_helpers.go similarity index 98% rename from pkg/scanners/cloudformation/parser/property_helpers.go rename to pkg/iac/scanners/cloudformation/parser/property_helpers.go index f36bc77cba1c..c7b9d9efac2d 100644 --- a/pkg/scanners/cloudformation/parser/property_helpers.go +++ b/pkg/iac/scanners/cloudformation/parser/property_helpers.go @@ -5,7 +5,7 @@ import ( "strings" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func (p *Property) IsNil() bool { diff --git a/pkg/scanners/cloudformation/parser/property_helpers_test.go b/pkg/iac/scanners/cloudformation/parser/property_helpers_test.go similarity index 98% rename from pkg/scanners/cloudformation/parser/property_helpers_test.go rename to pkg/iac/scanners/cloudformation/parser/property_helpers_test.go index c421cf3b9357..1fa1885a408b 100644 --- a/pkg/scanners/cloudformation/parser/property_helpers_test.go +++ b/pkg/iac/scanners/cloudformation/parser/property_helpers_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/assert" ) diff --git a/pkg/scanners/cloudformation/parser/pseudo_parameters.go b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go similarity index 92% rename from pkg/scanners/cloudformation/parser/pseudo_parameters.go rename to pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go index 3027095c13b7..ab825f02b8fd 100644 --- a/pkg/scanners/cloudformation/parser/pseudo_parameters.go +++ b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go @@ -1,6 +1,8 @@ package parser -import "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" +import ( + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) type pseudoParameter struct { t cftypes.CfType diff --git a/pkg/scanners/cloudformation/parser/pseudo_parameters_test.go b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters_test.go similarity index 100% rename from pkg/scanners/cloudformation/parser/pseudo_parameters_test.go rename to pkg/iac/scanners/cloudformation/parser/pseudo_parameters_test.go diff --git a/pkg/scanners/cloudformation/parser/reference.go b/pkg/iac/scanners/cloudformation/parser/reference.go similarity index 100% rename from pkg/scanners/cloudformation/parser/reference.go rename to pkg/iac/scanners/cloudformation/parser/reference.go diff --git a/pkg/scanners/cloudformation/parser/resource.go b/pkg/iac/scanners/cloudformation/parser/resource.go similarity index 100% rename from pkg/scanners/cloudformation/parser/resource.go rename to pkg/iac/scanners/cloudformation/parser/resource.go diff --git a/pkg/scanners/cloudformation/parser/resource_test.go b/pkg/iac/scanners/cloudformation/parser/resource_test.go similarity index 95% rename from pkg/scanners/cloudformation/parser/resource_test.go rename to pkg/iac/scanners/cloudformation/parser/resource_test.go index eff28ae63931..89d2448954e6 100644 --- a/pkg/scanners/cloudformation/parser/resource_test.go +++ b/pkg/iac/scanners/cloudformation/parser/resource_test.go @@ -3,7 +3,7 @@ package parser import ( "testing" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/cloudformation/parser/util.go b/pkg/iac/scanners/cloudformation/parser/util.go similarity index 95% rename from pkg/scanners/cloudformation/parser/util.go rename to pkg/iac/scanners/cloudformation/parser/util.go index 0836a1fc2413..a5786103d18a 100644 --- a/pkg/scanners/cloudformation/parser/util.go +++ b/pkg/iac/scanners/cloudformation/parser/util.go @@ -3,11 +3,10 @@ package parser import ( "strconv" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" "github.com/liamg/jfather" "gopkg.in/yaml.v3" - - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/cftypes" - "github.com/aquasecurity/trivy/pkg/scanners/kubernetes/parser" ) func setPropertyValueFromJson(node jfather.Node, propertyData *PropertyInner) error { diff --git a/pkg/scanners/cloudformation/scanner.go b/pkg/iac/scanners/cloudformation/scanner.go similarity index 92% rename from pkg/scanners/cloudformation/scanner.go rename to pkg/iac/scanners/cloudformation/scanner.go index 615a37624d41..c93ca7c841e8 100644 --- a/pkg/scanners/cloudformation/scanner.go +++ b/pkg/iac/scanners/cloudformation/scanner.go @@ -15,15 +15,15 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" - adapter "github.com/aquasecurity/trivy/internal/adapters/cloudformation" - "github.com/aquasecurity/trivy/pkg/scanners" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" + adapter "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func WithParameters(params map[string]any) options.ScannerOption { return func(cs options.ConfigurableScanner) { if s, ok := cs.(*Scanner); ok { - s.addParserOptions(parser.WithParameters(params)) + s.addParserOptions(parser2.WithParameters(params)) } } } @@ -31,7 +31,7 @@ func WithParameters(params map[string]any) options.ScannerOption { func WithParameterFiles(files ...string) options.ScannerOption { return func(cs options.ConfigurableScanner) { if s, ok := cs.(*Scanner); ok { - s.addParserOptions(parser.WithParameterFiles(files...)) + s.addParserOptions(parser2.WithParameterFiles(files...)) } } } @@ -39,7 +39,7 @@ func WithParameterFiles(files ...string) options.ScannerOption { func WithConfigsFS(fsys fs.FS) options.ScannerOption { return func(cs options.ConfigurableScanner) { if s, ok := cs.(*Scanner); ok { - s.addParserOptions(parser.WithConfigsFS(fsys)) + s.addParserOptions(parser2.WithConfigsFS(fsys)) } } } @@ -51,7 +51,7 @@ type Scanner struct { // nolint: gocritic debug debug.Logger policyDirs []string policyReaders []io.Reader - parser *parser.Parser + parser *parser2.Parser regoScanner *rego.Scanner skipRequired bool regoOnly bool @@ -131,7 +131,7 @@ func New(opts ...options.ScannerOption) *Scanner { opt(s) } s.addParserOptions(options.ParserWithSkipRequiredCheck(s.skipRequired)) - s.parser = parser.New(s.parserOptions...) + s.parser = parser2.New(s.parserOptions...) return s } @@ -206,7 +206,7 @@ func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.R return results, nil } -func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser.FileContext, fsys fs.FS) (results scan.Results, err error) { +func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser2.FileContext, fsys fs.FS) (results scan.Results, err error) { state := adapter.Adapt(*cfCtx) if state == nil { return nil, nil diff --git a/pkg/scanners/cloudformation/scanner_test.go b/pkg/iac/scanners/cloudformation/scanner_test.go similarity index 100% rename from pkg/scanners/cloudformation/scanner_test.go rename to pkg/iac/scanners/cloudformation/scanner_test.go diff --git a/pkg/scanners/cloudformation/test/cf_scanning_test.go b/pkg/iac/scanners/cloudformation/test/cf_scanning_test.go similarity index 95% rename from pkg/scanners/cloudformation/test/cf_scanning_test.go rename to pkg/iac/scanners/cloudformation/test/cf_scanning_test.go index 04a8d5f1c4ec..8f52c7a197c0 100644 --- a/pkg/scanners/cloudformation/test/cf_scanning_test.go +++ b/pkg/iac/scanners/cloudformation/test/cf_scanning_test.go @@ -6,11 +6,11 @@ import ( "os" "testing" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation" ) func Test_basic_cloudformation_scanning(t *testing.T) { diff --git a/pkg/scanners/cloudformation/test/examples/bucket/bucket.yaml b/pkg/iac/scanners/cloudformation/test/examples/bucket/bucket.yaml similarity index 100% rename from pkg/scanners/cloudformation/test/examples/bucket/bucket.yaml rename to pkg/iac/scanners/cloudformation/test/examples/bucket/bucket.yaml diff --git a/pkg/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml b/pkg/iac/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml similarity index 100% rename from pkg/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml rename to pkg/iac/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml diff --git a/pkg/scanners/cloudformation/test/examples/roles/roles.yml b/pkg/iac/scanners/cloudformation/test/examples/roles/roles.yml similarity index 100% rename from pkg/scanners/cloudformation/test/examples/roles/roles.yml rename to pkg/iac/scanners/cloudformation/test/examples/roles/roles.yml diff --git a/pkg/scanners/dockerfile/parser/parser.go b/pkg/iac/scanners/dockerfile/parser/parser.go similarity index 98% rename from pkg/scanners/dockerfile/parser/parser.go rename to pkg/iac/scanners/dockerfile/parser/parser.go index 18ed130dccde..6cd8b2e3f3ec 100644 --- a/pkg/scanners/dockerfile/parser/parser.go +++ b/pkg/iac/scanners/dockerfile/parser/parser.go @@ -8,13 +8,13 @@ import ( "path/filepath" "strings" + "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/moby/buildkit/frontend/dockerfile/instructions" "github.com/moby/buildkit/frontend/dockerfile/parser" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/providers/dockerfile" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/scanners/dockerfile/parser/parser_test.go b/pkg/iac/scanners/dockerfile/parser/parser_test.go similarity index 100% rename from pkg/scanners/dockerfile/parser/parser_test.go rename to pkg/iac/scanners/dockerfile/parser/parser_test.go diff --git a/pkg/scanners/dockerfile/scanner.go b/pkg/iac/scanners/dockerfile/scanner.go similarity index 97% rename from pkg/scanners/dockerfile/scanner.go rename to pkg/iac/scanners/dockerfile/scanner.go index 10583fac0c53..5d7bca114562 100644 --- a/pkg/scanners/dockerfile/scanner.go +++ b/pkg/iac/scanners/dockerfile/scanner.go @@ -12,8 +12,8 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners" - "github.com/aquasecurity/trivy/pkg/scanners/dockerfile/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/dockerfile/parser" ) var _ scanners.FSScanner = (*Scanner)(nil) diff --git a/pkg/scanners/dockerfile/scanner_test.go b/pkg/iac/scanners/dockerfile/scanner_test.go similarity index 100% rename from pkg/scanners/dockerfile/scanner_test.go rename to pkg/iac/scanners/dockerfile/scanner_test.go diff --git a/pkg/scanners/helm/options.go b/pkg/iac/scanners/helm/options.go similarity index 95% rename from pkg/scanners/helm/options.go rename to pkg/iac/scanners/helm/options.go index 7754f2978df8..b2ec6ddf987d 100644 --- a/pkg/scanners/helm/options.go +++ b/pkg/iac/scanners/helm/options.go @@ -2,7 +2,7 @@ package helm import ( "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/scanners/helm/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" ) type ConfigurableHelmScanner interface { diff --git a/pkg/scanners/helm/parser/option.go b/pkg/iac/scanners/helm/parser/option.go similarity index 100% rename from pkg/scanners/helm/parser/option.go rename to pkg/iac/scanners/helm/parser/option.go diff --git a/pkg/scanners/helm/parser/parser.go b/pkg/iac/scanners/helm/parser/parser.go similarity index 97% rename from pkg/scanners/helm/parser/parser.go rename to pkg/iac/scanners/helm/parser/parser.go index 6ec20577f18b..9c94e911283e 100644 --- a/pkg/scanners/helm/parser/parser.go +++ b/pkg/iac/scanners/helm/parser/parser.go @@ -13,6 +13,7 @@ import ( "sort" "strings" + detection2 "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/google/uuid" "gopkg.in/yaml.v3" "helm.sh/helm/v3/pkg/action" @@ -23,7 +24,6 @@ import ( "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/detection" ) var manifestNameRegex = regexp.MustCompile("# Source: [^/]+/(.+)") @@ -119,7 +119,7 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) error { return nil } - if detection.IsArchive(path) { + if detection2.IsArchive(path) { tarFS, err := p.addTarToFS(path) if errors.Is(err, errSkipFS) { // an unpacked Chart already exists @@ -317,5 +317,5 @@ func (p *Parser) required(path string, workingFS fs.FS) bool { return false } - return detection.IsType(path, bytes.NewReader(content), detection.FileTypeHelm) + return detection2.IsType(path, bytes.NewReader(content), detection2.FileTypeHelm) } diff --git a/pkg/scanners/helm/parser/parser_tar.go b/pkg/iac/scanners/helm/parser/parser_tar.go similarity index 98% rename from pkg/scanners/helm/parser/parser_tar.go rename to pkg/iac/scanners/helm/parser/parser_tar.go index 4a9502c0e599..ad3abdad82ca 100644 --- a/pkg/scanners/helm/parser/parser_tar.go +++ b/pkg/iac/scanners/helm/parser/parser_tar.go @@ -11,9 +11,8 @@ import ( "os" "path/filepath" + "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/liamg/memoryfs" - - "github.com/aquasecurity/trivy/pkg/detection" ) var errSkipFS = errors.New("skip parse FS") diff --git a/pkg/scanners/helm/parser/parser_test.go b/pkg/iac/scanners/helm/parser/parser_test.go similarity index 100% rename from pkg/scanners/helm/parser/parser_test.go rename to pkg/iac/scanners/helm/parser/parser_test.go diff --git a/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz b/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz similarity index 100% rename from pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz rename to pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz diff --git a/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml b/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml similarity index 100% rename from pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml rename to pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml diff --git a/pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml b/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml similarity index 100% rename from pkg/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml rename to pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml diff --git a/pkg/scanners/helm/parser/vals.go b/pkg/iac/scanners/helm/parser/vals.go similarity index 100% rename from pkg/scanners/helm/parser/vals.go rename to pkg/iac/scanners/helm/parser/vals.go diff --git a/pkg/scanners/helm/scanner.go b/pkg/iac/scanners/helm/scanner.go similarity index 95% rename from pkg/scanners/helm/scanner.go rename to pkg/iac/scanners/helm/scanner.go index e9f8c9f68741..829c69b97fd1 100644 --- a/pkg/scanners/helm/scanner.go +++ b/pkg/iac/scanners/helm/scanner.go @@ -8,6 +8,10 @@ import ( "path/filepath" "strings" + "github.com/aquasecurity/trivy/pkg/iac/detection" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" + kparser "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" "github.com/liamg/memoryfs" "github.com/aquasecurity/defsec/pkg/debug" @@ -16,10 +20,6 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/detection" - "github.com/aquasecurity/trivy/pkg/scanners" - "github.com/aquasecurity/trivy/pkg/scanners/helm/parser" - kparser "github.com/aquasecurity/trivy/pkg/scanners/kubernetes/parser" ) var _ scanners.FSScanner = (*Scanner)(nil) diff --git a/pkg/scanners/helm/test/mysql/.helmignore b/pkg/iac/scanners/helm/test/mysql/.helmignore similarity index 100% rename from pkg/scanners/helm/test/mysql/.helmignore rename to pkg/iac/scanners/helm/test/mysql/.helmignore diff --git a/pkg/scanners/helm/test/mysql/Chart.lock b/pkg/iac/scanners/helm/test/mysql/Chart.lock similarity index 100% rename from pkg/scanners/helm/test/mysql/Chart.lock rename to pkg/iac/scanners/helm/test/mysql/Chart.lock diff --git a/pkg/scanners/helm/test/mysql/Chart.yaml b/pkg/iac/scanners/helm/test/mysql/Chart.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/Chart.yaml rename to pkg/iac/scanners/helm/test/mysql/Chart.yaml diff --git a/pkg/scanners/helm/test/mysql/README.md b/pkg/iac/scanners/helm/test/mysql/README.md similarity index 100% rename from pkg/scanners/helm/test/mysql/README.md rename to pkg/iac/scanners/helm/test/mysql/README.md diff --git a/pkg/scanners/helm/test/mysql/charts/common/.helmignore b/pkg/iac/scanners/helm/test/mysql/charts/common/.helmignore similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/.helmignore rename to pkg/iac/scanners/helm/test/mysql/charts/common/.helmignore diff --git a/pkg/scanners/helm/test/mysql/charts/common/Chart.yaml b/pkg/iac/scanners/helm/test/mysql/charts/common/Chart.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/Chart.yaml rename to pkg/iac/scanners/helm/test/mysql/charts/common/Chart.yaml diff --git a/pkg/scanners/helm/test/mysql/charts/common/README.md b/pkg/iac/scanners/helm/test/mysql/charts/common/README.md similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/README.md rename to pkg/iac/scanners/helm/test/mysql/charts/common/README.md diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_errors.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_errors.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_errors.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_errors.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_images.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_images.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_images.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_images.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_labels.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_labels.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_labels.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_labels.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_names.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_names.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_names.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_names.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_storage.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_storage.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_storage.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_storage.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_utils.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_utils.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_utils.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_utils.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl rename to pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl diff --git a/pkg/scanners/helm/test/mysql/charts/common/values.yaml b/pkg/iac/scanners/helm/test/mysql/charts/common/values.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/charts/common/values.yaml rename to pkg/iac/scanners/helm/test/mysql/charts/common/values.yaml diff --git a/pkg/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml b/pkg/iac/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml rename to pkg/iac/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/NOTES.txt b/pkg/iac/scanners/helm/test/mysql/templates/NOTES.txt similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/NOTES.txt rename to pkg/iac/scanners/helm/test/mysql/templates/NOTES.txt diff --git a/pkg/scanners/helm/test/mysql/templates/_helpers.tpl b/pkg/iac/scanners/helm/test/mysql/templates/_helpers.tpl similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/_helpers.tpl rename to pkg/iac/scanners/helm/test/mysql/templates/_helpers.tpl diff --git a/pkg/scanners/helm/test/mysql/templates/extra-list.yaml b/pkg/iac/scanners/helm/test/mysql/templates/extra-list.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/extra-list.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/extra-list.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/metrics-svc.yaml b/pkg/iac/scanners/helm/test/mysql/templates/metrics-svc.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/metrics-svc.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/metrics-svc.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/networkpolicy.yaml b/pkg/iac/scanners/helm/test/mysql/templates/networkpolicy.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/networkpolicy.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/networkpolicy.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/primary/configmap.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/configmap.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/primary/configmap.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/primary/configmap.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/primary/pdb.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/pdb.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/primary/pdb.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/primary/pdb.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/primary/statefulset.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/statefulset.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/primary/statefulset.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/primary/statefulset.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/primary/svc-headless.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/svc-headless.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/primary/svc-headless.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/primary/svc-headless.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/primary/svc.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/svc.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/primary/svc.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/primary/svc.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/role.yaml b/pkg/iac/scanners/helm/test/mysql/templates/role.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/role.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/role.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/rolebinding.yaml b/pkg/iac/scanners/helm/test/mysql/templates/rolebinding.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/rolebinding.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/rolebinding.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/secondary/configmap.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secondary/configmap.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/secondary/configmap.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/secondary/configmap.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/secondary/pdb.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secondary/pdb.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/secondary/pdb.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/secondary/pdb.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/secondary/statefulset.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secondary/statefulset.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/secondary/statefulset.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/secondary/statefulset.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/secondary/svc.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secondary/svc.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/secondary/svc.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/secondary/svc.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/secrets.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secrets.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/secrets.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/secrets.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/serviceaccount.yaml b/pkg/iac/scanners/helm/test/mysql/templates/serviceaccount.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/serviceaccount.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/serviceaccount.yaml diff --git a/pkg/scanners/helm/test/mysql/templates/servicemonitor.yaml b/pkg/iac/scanners/helm/test/mysql/templates/servicemonitor.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/templates/servicemonitor.yaml rename to pkg/iac/scanners/helm/test/mysql/templates/servicemonitor.yaml diff --git a/pkg/scanners/helm/test/mysql/values.schema.json b/pkg/iac/scanners/helm/test/mysql/values.schema.json similarity index 100% rename from pkg/scanners/helm/test/mysql/values.schema.json rename to pkg/iac/scanners/helm/test/mysql/values.schema.json diff --git a/pkg/scanners/helm/test/mysql/values.yaml b/pkg/iac/scanners/helm/test/mysql/values.yaml similarity index 100% rename from pkg/scanners/helm/test/mysql/values.yaml rename to pkg/iac/scanners/helm/test/mysql/values.yaml diff --git a/pkg/scanners/helm/test/option_test.go b/pkg/iac/scanners/helm/test/option_test.go similarity index 88% rename from pkg/scanners/helm/test/option_test.go rename to pkg/iac/scanners/helm/test/option_test.go index bdce55341fa3..66f961a6b53d 100644 --- a/pkg/scanners/helm/test/option_test.go +++ b/pkg/iac/scanners/helm/test/option_test.go @@ -7,11 +7,11 @@ import ( "strings" "testing" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/scanners/helm/parser" ) func Test_helm_parser_with_options_with_values_file(t *testing.T) { @@ -37,10 +37,10 @@ func Test_helm_parser_with_options_with_values_file(t *testing.T) { var opts []options.ParserOption if test.valuesFile != "" { - opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) + opts = append(opts, parser2.OptionWithValuesFile(test.valuesFile)) } - helmParser := parser.New(chartName, opts...) + helmParser := parser2.New(chartName, opts...) err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") require.NoError(t, err) manifests, err := helmParser.RenderedChartFiles() @@ -87,14 +87,14 @@ func Test_helm_parser_with_options_with_set_value(t *testing.T) { var opts []options.ParserOption if test.valuesFile != "" { - opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) + opts = append(opts, parser2.OptionWithValuesFile(test.valuesFile)) } if test.values != "" { - opts = append(opts, parser.OptionWithValues(test.values)) + opts = append(opts, parser2.OptionWithValues(test.values)) } - helmParser := parser.New(chartName, opts...) + helmParser := parser2.New(chartName, opts...) err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") require.NoError(t, err) manifests, err := helmParser.RenderedChartFiles() @@ -140,10 +140,10 @@ func Test_helm_parser_with_options_with_api_versions(t *testing.T) { var opts []options.ParserOption if len(test.apiVersions) > 0 { - opts = append(opts, parser.OptionWithAPIVersions(test.apiVersions...)) + opts = append(opts, parser2.OptionWithAPIVersions(test.apiVersions...)) } - helmParser := parser.New(chartName, opts...) + helmParser := parser2.New(chartName, opts...) err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") require.NoError(t, err) manifests, err := helmParser.RenderedChartFiles() diff --git a/pkg/scanners/helm/test/parser_test.go b/pkg/iac/scanners/helm/test/parser_test.go similarity index 97% rename from pkg/scanners/helm/test/parser_test.go rename to pkg/iac/scanners/helm/test/parser_test.go index 24130d00a104..6d9f5ad0cff3 100644 --- a/pkg/scanners/helm/test/parser_test.go +++ b/pkg/iac/scanners/helm/test/parser_test.go @@ -7,8 +7,8 @@ import ( "strings" "testing" - "github.com/aquasecurity/trivy/pkg/detection" - "github.com/aquasecurity/trivy/pkg/scanners/helm/parser" + "github.com/aquasecurity/trivy/pkg/iac/detection" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/helm/test/scanner_test.go b/pkg/iac/scanners/helm/test/scanner_test.go similarity index 99% rename from pkg/scanners/helm/test/scanner_test.go rename to pkg/iac/scanners/helm/test/scanner_test.go index 90cf3e65a243..9819589b2644 100644 --- a/pkg/scanners/helm/test/scanner_test.go +++ b/pkg/iac/scanners/helm/test/scanner_test.go @@ -11,7 +11,7 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/scanners/helm" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz b/pkg/iac/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz similarity index 100% rename from pkg/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz rename to pkg/iac/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml b/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml b/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml b/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml b/pkg/iac/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml b/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/testchart/templates/service.yaml b/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/service.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/testchart/templates/service.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/service.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml b/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml b/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml b/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml diff --git a/pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml b/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml rename to pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml diff --git a/pkg/scanners/helm/test/testdata/mysql-8.8.26.tar b/pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tar similarity index 100% rename from pkg/scanners/helm/test/testdata/mysql-8.8.26.tar rename to pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tar diff --git a/pkg/scanners/helm/test/testdata/mysql-8.8.26.tar.gz b/pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tar.gz similarity index 100% rename from pkg/scanners/helm/test/testdata/mysql-8.8.26.tar.gz rename to pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tar.gz diff --git a/pkg/scanners/helm/test/testdata/mysql-8.8.26.tgz b/pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tgz similarity index 100% rename from pkg/scanners/helm/test/testdata/mysql-8.8.26.tgz rename to pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tgz diff --git a/pkg/scanners/helm/test/testdata/nope.tgz b/pkg/iac/scanners/helm/test/testdata/nope.tgz similarity index 100% rename from pkg/scanners/helm/test/testdata/nope.tgz rename to pkg/iac/scanners/helm/test/testdata/nope.tgz diff --git a/pkg/scanners/helm/test/testdata/numberName/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/numberName/Chart.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/numberName/Chart.yaml rename to pkg/iac/scanners/helm/test/testdata/numberName/Chart.yaml diff --git a/pkg/scanners/helm/test/testdata/simmilar-templates/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/simmilar-templates/Chart.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/simmilar-templates/Chart.yaml rename to pkg/iac/scanners/helm/test/testdata/simmilar-templates/Chart.yaml diff --git a/pkg/scanners/helm/test/testdata/simmilar-templates/templates/deployment.yaml b/pkg/iac/scanners/helm/test/testdata/simmilar-templates/templates/deployment.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/simmilar-templates/templates/deployment.yaml rename to pkg/iac/scanners/helm/test/testdata/simmilar-templates/templates/deployment.yaml diff --git a/pkg/scanners/helm/test/testdata/simmilar-templates/templates/manifest.yaml b/pkg/iac/scanners/helm/test/testdata/simmilar-templates/templates/manifest.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/simmilar-templates/templates/manifest.yaml rename to pkg/iac/scanners/helm/test/testdata/simmilar-templates/templates/manifest.yaml diff --git a/pkg/scanners/helm/test/testdata/templated-name/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/templated-name/Chart.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/templated-name/Chart.yaml rename to pkg/iac/scanners/helm/test/testdata/templated-name/Chart.yaml diff --git a/pkg/scanners/helm/test/testdata/testchart/.helmignore b/pkg/iac/scanners/helm/test/testdata/testchart/.helmignore similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/.helmignore rename to pkg/iac/scanners/helm/test/testdata/testchart/.helmignore diff --git a/pkg/scanners/helm/test/testdata/testchart/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/Chart.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/Chart.yaml rename to pkg/iac/scanners/helm/test/testdata/testchart/Chart.yaml diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/NOTES.txt b/pkg/iac/scanners/helm/test/testdata/testchart/templates/NOTES.txt similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/templates/NOTES.txt rename to pkg/iac/scanners/helm/test/testdata/testchart/templates/NOTES.txt diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/_helpers.tpl b/pkg/iac/scanners/helm/test/testdata/testchart/templates/_helpers.tpl similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/templates/_helpers.tpl rename to pkg/iac/scanners/helm/test/testdata/testchart/templates/_helpers.tpl diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/deployment.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/templates/deployment.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/templates/deployment.yaml rename to pkg/iac/scanners/helm/test/testdata/testchart/templates/deployment.yaml diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/hpa.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/templates/hpa.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/templates/hpa.yaml rename to pkg/iac/scanners/helm/test/testdata/testchart/templates/hpa.yaml diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/ingress.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/templates/ingress.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/templates/ingress.yaml rename to pkg/iac/scanners/helm/test/testdata/testchart/templates/ingress.yaml diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/service.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/templates/service.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/templates/service.yaml rename to pkg/iac/scanners/helm/test/testdata/testchart/templates/service.yaml diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml rename to pkg/iac/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml diff --git a/pkg/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml rename to pkg/iac/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml diff --git a/pkg/scanners/helm/test/testdata/testchart/values.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/values.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/testchart/values.yaml rename to pkg/iac/scanners/helm/test/testdata/testchart/values.yaml diff --git a/pkg/scanners/helm/test/testdata/with-api-version/.helmignore b/pkg/iac/scanners/helm/test/testdata/with-api-version/.helmignore similarity index 100% rename from pkg/scanners/helm/test/testdata/with-api-version/.helmignore rename to pkg/iac/scanners/helm/test/testdata/with-api-version/.helmignore diff --git a/pkg/scanners/helm/test/testdata/with-api-version/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/with-api-version/Chart.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/with-api-version/Chart.yaml rename to pkg/iac/scanners/helm/test/testdata/with-api-version/Chart.yaml diff --git a/pkg/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl b/pkg/iac/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl similarity index 100% rename from pkg/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl rename to pkg/iac/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl diff --git a/pkg/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml b/pkg/iac/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml rename to pkg/iac/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml diff --git a/pkg/scanners/helm/test/testdata/with-api-version/values.yaml b/pkg/iac/scanners/helm/test/testdata/with-api-version/values.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/with-api-version/values.yaml rename to pkg/iac/scanners/helm/test/testdata/with-api-version/values.yaml diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/.helmignore b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/.helmignore similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/.helmignore rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/.helmignore diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/LICENSE b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/LICENSE similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/LICENSE rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/LICENSE diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/renovate.json b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/renovate.json similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/renovate.json rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/renovate.json diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml diff --git a/pkg/scanners/helm/test/testdata/with-tarred-dep/values.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/values.yaml similarity index 100% rename from pkg/scanners/helm/test/testdata/with-tarred-dep/values.yaml rename to pkg/iac/scanners/helm/test/testdata/with-tarred-dep/values.yaml diff --git a/pkg/scanners/helm/test/values/values.yaml b/pkg/iac/scanners/helm/test/values/values.yaml similarity index 100% rename from pkg/scanners/helm/test/values/values.yaml rename to pkg/iac/scanners/helm/test/values/values.yaml diff --git a/pkg/scanners/json/parser/parser.go b/pkg/iac/scanners/json/parser/parser.go similarity index 97% rename from pkg/scanners/json/parser/parser.go rename to pkg/iac/scanners/json/parser/parser.go index 1ff75eae3a30..ff3417b8f0b9 100644 --- a/pkg/scanners/json/parser/parser.go +++ b/pkg/iac/scanners/json/parser/parser.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/detection" + "github.com/aquasecurity/trivy/pkg/iac/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/scanners/json/parser/parser_test.go b/pkg/iac/scanners/json/parser/parser_test.go similarity index 100% rename from pkg/scanners/json/parser/parser_test.go rename to pkg/iac/scanners/json/parser/parser_test.go diff --git a/pkg/scanners/json/scanner.go b/pkg/iac/scanners/json/scanner.go similarity index 97% rename from pkg/scanners/json/scanner.go rename to pkg/iac/scanners/json/scanner.go index 8276bcfeb55a..eaa8fc43aa5a 100644 --- a/pkg/scanners/json/scanner.go +++ b/pkg/iac/scanners/json/scanner.go @@ -12,8 +12,8 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners" - "github.com/aquasecurity/trivy/pkg/scanners/json/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/json/parser" ) var _ scanners.FSScanner = (*Scanner)(nil) diff --git a/pkg/scanners/json/scanner_test.go b/pkg/iac/scanners/json/scanner_test.go similarity index 100% rename from pkg/scanners/json/scanner_test.go rename to pkg/iac/scanners/json/scanner_test.go diff --git a/pkg/scanners/kubernetes/parser/manifest.go b/pkg/iac/scanners/kubernetes/parser/manifest.go similarity index 100% rename from pkg/scanners/kubernetes/parser/manifest.go rename to pkg/iac/scanners/kubernetes/parser/manifest.go diff --git a/pkg/scanners/kubernetes/parser/manifest_node.go b/pkg/iac/scanners/kubernetes/parser/manifest_node.go similarity index 97% rename from pkg/scanners/kubernetes/parser/manifest_node.go rename to pkg/iac/scanners/kubernetes/parser/manifest_node.go index 1f82ca1e3680..f83c352f01ca 100644 --- a/pkg/scanners/kubernetes/parser/manifest_node.go +++ b/pkg/iac/scanners/kubernetes/parser/manifest_node.go @@ -23,9 +23,9 @@ type ManifestNode struct { StartLine int EndLine int Offset int - Value interface{} - Type TagType - Path string + Value interface{} + Type TagType + Path string } func (r *ManifestNode) ToRego() interface{} { diff --git a/pkg/scanners/kubernetes/parser/parser.go b/pkg/iac/scanners/kubernetes/parser/parser.go similarity index 98% rename from pkg/scanners/kubernetes/parser/parser.go rename to pkg/iac/scanners/kubernetes/parser/parser.go index a1847686cef6..618990e5bfee 100644 --- a/pkg/scanners/kubernetes/parser/parser.go +++ b/pkg/iac/scanners/kubernetes/parser/parser.go @@ -11,11 +11,11 @@ import ( "regexp" "strings" + "github.com/aquasecurity/trivy/pkg/iac/detection" "gopkg.in/yaml.v3" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/scanners/kubernetes/scanner.go b/pkg/iac/scanners/kubernetes/scanner.go similarity index 97% rename from pkg/scanners/kubernetes/scanner.go rename to pkg/iac/scanners/kubernetes/scanner.go index 6c5c3b7ff2ab..2f34e8fef9f5 100644 --- a/pkg/scanners/kubernetes/scanner.go +++ b/pkg/iac/scanners/kubernetes/scanner.go @@ -8,6 +8,8 @@ import ( "sort" "sync" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" "github.com/liamg/memoryfs" "github.com/aquasecurity/defsec/pkg/debug" @@ -16,8 +18,6 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners" - "github.com/aquasecurity/trivy/pkg/scanners/kubernetes/parser" ) var _ scanners.FSScanner = (*Scanner)(nil) diff --git a/pkg/scanners/kubernetes/scanner_test.go b/pkg/iac/scanners/kubernetes/scanner_test.go similarity index 100% rename from pkg/scanners/kubernetes/scanner_test.go rename to pkg/iac/scanners/kubernetes/scanner_test.go diff --git a/pkg/scanners/scanner.go b/pkg/iac/scanners/scanner.go similarity index 100% rename from pkg/scanners/scanner.go rename to pkg/iac/scanners/scanner.go diff --git a/pkg/scanners/terraform/executor/executor.go b/pkg/iac/scanners/terraform/executor/executor.go similarity index 99% rename from pkg/scanners/terraform/executor/executor.go rename to pkg/iac/scanners/terraform/executor/executor.go index 943283652a0e..ece352daf0bd 100644 --- a/pkg/scanners/terraform/executor/executor.go +++ b/pkg/iac/scanners/terraform/executor/executor.go @@ -14,7 +14,7 @@ import ( "github.com/aquasecurity/defsec/pkg/severity" "github.com/aquasecurity/defsec/pkg/state" "github.com/aquasecurity/defsec/pkg/terraform" - adapter "github.com/aquasecurity/trivy/internal/adapters/terraform" + adapter "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform" ) // Executor scans HCL blocks by running all registered rules against them diff --git a/pkg/scanners/terraform/executor/executor_test.go b/pkg/iac/scanners/terraform/executor/executor_test.go similarity index 89% rename from pkg/scanners/terraform/executor/executor_test.go rename to pkg/iac/scanners/terraform/executor/executor_test.go index 26de5bd2bc0f..fbebdd8ccefd 100644 --- a/pkg/scanners/terraform/executor/executor_test.go +++ b/pkg/iac/scanners/terraform/executor/executor_test.go @@ -9,10 +9,10 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/severity" "github.com/aquasecurity/defsec/pkg/terraform" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" "github.com/aquasecurity/trivy/test/testutil" ) @@ -48,7 +48,7 @@ resource "problem" "this" { `, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -70,7 +70,7 @@ resource "problem" "this" { `, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -92,7 +92,7 @@ resource "problem" "this" { `, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -114,7 +114,7 @@ resource "problem" "this" { `, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) diff --git a/pkg/scanners/terraform/executor/option.go b/pkg/iac/scanners/terraform/executor/option.go similarity index 100% rename from pkg/scanners/terraform/executor/option.go rename to pkg/iac/scanners/terraform/executor/option.go diff --git a/pkg/scanners/terraform/executor/pool.go b/pkg/iac/scanners/terraform/executor/pool.go similarity index 100% rename from pkg/scanners/terraform/executor/pool.go rename to pkg/iac/scanners/terraform/executor/pool.go diff --git a/pkg/scanners/terraform/executor/statistics.go b/pkg/iac/scanners/terraform/executor/statistics.go similarity index 100% rename from pkg/scanners/terraform/executor/statistics.go rename to pkg/iac/scanners/terraform/executor/statistics.go diff --git a/pkg/scanners/terraform/options.go b/pkg/iac/scanners/terraform/options.go similarity index 97% rename from pkg/scanners/terraform/options.go rename to pkg/iac/scanners/terraform/options.go index 9c57af3400f6..3fd3ba63579f 100644 --- a/pkg/scanners/terraform/options.go +++ b/pkg/iac/scanners/terraform/options.go @@ -8,8 +8,8 @@ import ( "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/severity" "github.com/aquasecurity/defsec/pkg/state" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" ) type ConfigurableTerraformScanner interface { diff --git a/pkg/scanners/terraform/parser/evaluator.go b/pkg/iac/scanners/terraform/parser/evaluator.go similarity index 100% rename from pkg/scanners/terraform/parser/evaluator.go rename to pkg/iac/scanners/terraform/parser/evaluator.go diff --git a/pkg/scanners/terraform/parser/evaluator_test.go b/pkg/iac/scanners/terraform/parser/evaluator_test.go similarity index 100% rename from pkg/scanners/terraform/parser/evaluator_test.go rename to pkg/iac/scanners/terraform/parser/evaluator_test.go diff --git a/pkg/scanners/terraform/parser/funcs/cidr.go b/pkg/iac/scanners/terraform/parser/funcs/cidr.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/cidr.go rename to pkg/iac/scanners/terraform/parser/funcs/cidr.go diff --git a/pkg/scanners/terraform/parser/funcs/collection.go b/pkg/iac/scanners/terraform/parser/funcs/collection.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/collection.go rename to pkg/iac/scanners/terraform/parser/funcs/collection.go diff --git a/pkg/scanners/terraform/parser/funcs/conversion.go b/pkg/iac/scanners/terraform/parser/funcs/conversion.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/conversion.go rename to pkg/iac/scanners/terraform/parser/funcs/conversion.go diff --git a/pkg/scanners/terraform/parser/funcs/crypto.go b/pkg/iac/scanners/terraform/parser/funcs/crypto.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/crypto.go rename to pkg/iac/scanners/terraform/parser/funcs/crypto.go diff --git a/pkg/scanners/terraform/parser/funcs/datetime.go b/pkg/iac/scanners/terraform/parser/funcs/datetime.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/datetime.go rename to pkg/iac/scanners/terraform/parser/funcs/datetime.go diff --git a/pkg/scanners/terraform/parser/funcs/defaults.go b/pkg/iac/scanners/terraform/parser/funcs/defaults.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/defaults.go rename to pkg/iac/scanners/terraform/parser/funcs/defaults.go diff --git a/pkg/scanners/terraform/parser/funcs/encoding.go b/pkg/iac/scanners/terraform/parser/funcs/encoding.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/encoding.go rename to pkg/iac/scanners/terraform/parser/funcs/encoding.go diff --git a/pkg/scanners/terraform/parser/funcs/filesystem.go b/pkg/iac/scanners/terraform/parser/funcs/filesystem.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/filesystem.go rename to pkg/iac/scanners/terraform/parser/funcs/filesystem.go diff --git a/pkg/scanners/terraform/parser/funcs/marks.go b/pkg/iac/scanners/terraform/parser/funcs/marks.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/marks.go rename to pkg/iac/scanners/terraform/parser/funcs/marks.go diff --git a/pkg/scanners/terraform/parser/funcs/number.go b/pkg/iac/scanners/terraform/parser/funcs/number.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/number.go rename to pkg/iac/scanners/terraform/parser/funcs/number.go diff --git a/pkg/scanners/terraform/parser/funcs/sensitive.go b/pkg/iac/scanners/terraform/parser/funcs/sensitive.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/sensitive.go rename to pkg/iac/scanners/terraform/parser/funcs/sensitive.go diff --git a/pkg/scanners/terraform/parser/funcs/string.go b/pkg/iac/scanners/terraform/parser/funcs/string.go similarity index 100% rename from pkg/scanners/terraform/parser/funcs/string.go rename to pkg/iac/scanners/terraform/parser/funcs/string.go diff --git a/pkg/scanners/terraform/parser/functions.go b/pkg/iac/scanners/terraform/parser/functions.go similarity index 52% rename from pkg/scanners/terraform/parser/functions.go rename to pkg/iac/scanners/terraform/parser/functions.go index f6e9cd62664f..0c517b9e7dca 100644 --- a/pkg/scanners/terraform/parser/functions.go +++ b/pkg/iac/scanners/terraform/parser/functions.go @@ -3,13 +3,12 @@ package parser import ( "io/fs" + funcs2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/funcs" "github.com/hashicorp/hcl/v2/ext/tryfunc" ctyyaml "github.com/zclconf/go-cty-yaml" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" "github.com/zclconf/go-cty/cty/function/stdlib" - - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/funcs" ) // Functions returns the set of functions that should be used to when evaluating @@ -17,104 +16,104 @@ import ( func Functions(target fs.FS, baseDir string) map[string]function.Function { return map[string]function.Function{ "abs": stdlib.AbsoluteFunc, - "abspath": funcs.AbsPathFunc, - "basename": funcs.BasenameFunc, - "base64decode": funcs.Base64DecodeFunc, - "base64encode": funcs.Base64EncodeFunc, - "base64gzip": funcs.Base64GzipFunc, - "base64sha256": funcs.Base64Sha256Func, - "base64sha512": funcs.Base64Sha512Func, - "bcrypt": funcs.BcryptFunc, + "abspath": funcs2.AbsPathFunc, + "basename": funcs2.BasenameFunc, + "base64decode": funcs2.Base64DecodeFunc, + "base64encode": funcs2.Base64EncodeFunc, + "base64gzip": funcs2.Base64GzipFunc, + "base64sha256": funcs2.Base64Sha256Func, + "base64sha512": funcs2.Base64Sha512Func, + "bcrypt": funcs2.BcryptFunc, "can": tryfunc.CanFunc, "ceil": stdlib.CeilFunc, "chomp": stdlib.ChompFunc, - "cidrhost": funcs.CidrHostFunc, - "cidrnetmask": funcs.CidrNetmaskFunc, - "cidrsubnet": funcs.CidrSubnetFunc, - "cidrsubnets": funcs.CidrSubnetsFunc, - "coalesce": funcs.CoalesceFunc, + "cidrhost": funcs2.CidrHostFunc, + "cidrnetmask": funcs2.CidrNetmaskFunc, + "cidrsubnet": funcs2.CidrSubnetFunc, + "cidrsubnets": funcs2.CidrSubnetsFunc, + "coalesce": funcs2.CoalesceFunc, "coalescelist": stdlib.CoalesceListFunc, "compact": stdlib.CompactFunc, "concat": stdlib.ConcatFunc, "contains": stdlib.ContainsFunc, "csvdecode": stdlib.CSVDecodeFunc, - "dirname": funcs.DirnameFunc, + "dirname": funcs2.DirnameFunc, "distinct": stdlib.DistinctFunc, "element": stdlib.ElementFunc, "chunklist": stdlib.ChunklistFunc, - "file": funcs.MakeFileFunc(target, baseDir, false), - "fileexists": funcs.MakeFileExistsFunc(baseDir), - "fileset": funcs.MakeFileSetFunc(baseDir), - "filebase64": funcs.MakeFileFunc(target, baseDir, true), - "filebase64sha256": funcs.MakeFileBase64Sha256Func(target, baseDir), - "filebase64sha512": funcs.MakeFileBase64Sha512Func(target, baseDir), - "filemd5": funcs.MakeFileMd5Func(target, baseDir), - "filesha1": funcs.MakeFileSha1Func(target, baseDir), - "filesha256": funcs.MakeFileSha256Func(target, baseDir), - "filesha512": funcs.MakeFileSha512Func(target, baseDir), + "file": funcs2.MakeFileFunc(target, baseDir, false), + "fileexists": funcs2.MakeFileExistsFunc(baseDir), + "fileset": funcs2.MakeFileSetFunc(baseDir), + "filebase64": funcs2.MakeFileFunc(target, baseDir, true), + "filebase64sha256": funcs2.MakeFileBase64Sha256Func(target, baseDir), + "filebase64sha512": funcs2.MakeFileBase64Sha512Func(target, baseDir), + "filemd5": funcs2.MakeFileMd5Func(target, baseDir), + "filesha1": funcs2.MakeFileSha1Func(target, baseDir), + "filesha256": funcs2.MakeFileSha256Func(target, baseDir), + "filesha512": funcs2.MakeFileSha512Func(target, baseDir), "flatten": stdlib.FlattenFunc, "floor": stdlib.FloorFunc, "format": stdlib.FormatFunc, "formatdate": stdlib.FormatDateFunc, "formatlist": stdlib.FormatListFunc, "indent": stdlib.IndentFunc, - "index": funcs.IndexFunc, // stdlib.IndexFunc is not compatible + "index": funcs2.IndexFunc, // stdlib.IndexFunc is not compatible "join": stdlib.JoinFunc, "jsondecode": stdlib.JSONDecodeFunc, "jsonencode": stdlib.JSONEncodeFunc, "keys": stdlib.KeysFunc, - "length": funcs.LengthFunc, - "list": funcs.ListFunc, + "length": funcs2.LengthFunc, + "list": funcs2.ListFunc, "log": stdlib.LogFunc, - "lookup": funcs.LookupFunc, + "lookup": funcs2.LookupFunc, "lower": stdlib.LowerFunc, - "map": funcs.MapFunc, - "matchkeys": funcs.MatchkeysFunc, + "map": funcs2.MapFunc, + "matchkeys": funcs2.MatchkeysFunc, "max": stdlib.MaxFunc, - "md5": funcs.Md5Func, + "md5": funcs2.Md5Func, "merge": stdlib.MergeFunc, "min": stdlib.MinFunc, "parseint": stdlib.ParseIntFunc, - "pathexpand": funcs.PathExpandFunc, + "pathexpand": funcs2.PathExpandFunc, "pow": stdlib.PowFunc, "range": stdlib.RangeFunc, "regex": stdlib.RegexFunc, "regexall": stdlib.RegexAllFunc, - "replace": funcs.ReplaceFunc, + "replace": funcs2.ReplaceFunc, "reverse": stdlib.ReverseListFunc, - "rsadecrypt": funcs.RsaDecryptFunc, + "rsadecrypt": funcs2.RsaDecryptFunc, "setintersection": stdlib.SetIntersectionFunc, "setproduct": stdlib.SetProductFunc, "setsubtract": stdlib.SetSubtractFunc, "setunion": stdlib.SetUnionFunc, - "sha1": funcs.Sha1Func, - "sha256": funcs.Sha256Func, - "sha512": funcs.Sha512Func, + "sha1": funcs2.Sha1Func, + "sha256": funcs2.Sha256Func, + "sha512": funcs2.Sha512Func, "signum": stdlib.SignumFunc, "slice": stdlib.SliceFunc, "sort": stdlib.SortFunc, "split": stdlib.SplitFunc, "strrev": stdlib.ReverseFunc, "substr": stdlib.SubstrFunc, - "timestamp": funcs.TimestampFunc, + "timestamp": funcs2.TimestampFunc, "timeadd": stdlib.TimeAddFunc, "title": stdlib.TitleFunc, - "tostring": funcs.MakeToFunc(cty.String), - "tonumber": funcs.MakeToFunc(cty.Number), - "tobool": funcs.MakeToFunc(cty.Bool), - "toset": funcs.MakeToFunc(cty.Set(cty.DynamicPseudoType)), - "tolist": funcs.MakeToFunc(cty.List(cty.DynamicPseudoType)), - "tomap": funcs.MakeToFunc(cty.Map(cty.DynamicPseudoType)), - "transpose": funcs.TransposeFunc, + "tostring": funcs2.MakeToFunc(cty.String), + "tonumber": funcs2.MakeToFunc(cty.Number), + "tobool": funcs2.MakeToFunc(cty.Bool), + "toset": funcs2.MakeToFunc(cty.Set(cty.DynamicPseudoType)), + "tolist": funcs2.MakeToFunc(cty.List(cty.DynamicPseudoType)), + "tomap": funcs2.MakeToFunc(cty.Map(cty.DynamicPseudoType)), + "transpose": funcs2.TransposeFunc, "trim": stdlib.TrimFunc, "trimprefix": stdlib.TrimPrefixFunc, "trimspace": stdlib.TrimSpaceFunc, "trimsuffix": stdlib.TrimSuffixFunc, "try": tryfunc.TryFunc, "upper": stdlib.UpperFunc, - "urlencode": funcs.URLEncodeFunc, - "uuid": funcs.UUIDFunc, - "uuidv5": funcs.UUIDV5Func, + "urlencode": funcs2.URLEncodeFunc, + "uuid": funcs2.UUIDFunc, + "uuidv5": funcs2.UUIDV5Func, "values": stdlib.ValuesFunc, "yamldecode": ctyyaml.YAMLDecodeFunc, "yamlencode": ctyyaml.YAMLEncodeFunc, diff --git a/pkg/scanners/terraform/parser/load_blocks.go b/pkg/iac/scanners/terraform/parser/load_blocks.go similarity index 100% rename from pkg/scanners/terraform/parser/load_blocks.go rename to pkg/iac/scanners/terraform/parser/load_blocks.go diff --git a/pkg/scanners/terraform/parser/load_blocks_test.go b/pkg/iac/scanners/terraform/parser/load_blocks_test.go similarity index 100% rename from pkg/scanners/terraform/parser/load_blocks_test.go rename to pkg/iac/scanners/terraform/parser/load_blocks_test.go diff --git a/pkg/scanners/terraform/parser/load_module.go b/pkg/iac/scanners/terraform/parser/load_module.go similarity index 98% rename from pkg/scanners/terraform/parser/load_module.go rename to pkg/iac/scanners/terraform/parser/load_module.go index 7abb8ba18455..df8cf606688f 100644 --- a/pkg/scanners/terraform/parser/load_module.go +++ b/pkg/iac/scanners/terraform/parser/load_module.go @@ -8,10 +8,10 @@ import ( "path/filepath" "strings" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" "github.com/zclconf/go-cty/cty" "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/resolvers" ) type moduleLoadError struct { diff --git a/pkg/scanners/terraform/parser/load_module_metadata.go b/pkg/iac/scanners/terraform/parser/load_module_metadata.go similarity index 100% rename from pkg/scanners/terraform/parser/load_module_metadata.go rename to pkg/iac/scanners/terraform/parser/load_module_metadata.go diff --git a/pkg/scanners/terraform/parser/load_vars.go b/pkg/iac/scanners/terraform/parser/load_vars.go similarity index 100% rename from pkg/scanners/terraform/parser/load_vars.go rename to pkg/iac/scanners/terraform/parser/load_vars.go diff --git a/pkg/scanners/terraform/parser/load_vars_test.go b/pkg/iac/scanners/terraform/parser/load_vars_test.go similarity index 100% rename from pkg/scanners/terraform/parser/load_vars_test.go rename to pkg/iac/scanners/terraform/parser/load_vars_test.go diff --git a/pkg/scanners/terraform/parser/module_retrieval.go b/pkg/iac/scanners/terraform/parser/module_retrieval.go similarity index 63% rename from pkg/scanners/terraform/parser/module_retrieval.go rename to pkg/iac/scanners/terraform/parser/module_retrieval.go index 57248613f651..cae84359498d 100644 --- a/pkg/scanners/terraform/parser/module_retrieval.go +++ b/pkg/iac/scanners/terraform/parser/module_retrieval.go @@ -5,21 +5,21 @@ import ( "fmt" "io/fs" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/resolvers" + resolvers2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" ) type ModuleResolver interface { - Resolve(context.Context, fs.FS, resolvers.Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) + Resolve(context.Context, fs.FS, resolvers2.Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) } var defaultResolvers = []ModuleResolver{ - resolvers.Cache, - resolvers.Local, - resolvers.Remote, - resolvers.Registry, + resolvers2.Cache, + resolvers2.Local, + resolvers2.Remote, + resolvers2.Registry, } -func resolveModule(ctx context.Context, current fs.FS, opt resolvers.Options) (filesystem fs.FS, sourcePrefix, downloadPath string, err error) { +func resolveModule(ctx context.Context, current fs.FS, opt resolvers2.Options) (filesystem fs.FS, sourcePrefix, downloadPath string, err error) { opt.Debug("Resolving module '%s' with source: '%s'...", opt.Name, opt.Source) for _, resolver := range defaultResolvers { if filesystem, prefix, path, applies, err := resolver.Resolve(ctx, current, opt); err != nil { diff --git a/pkg/scanners/terraform/parser/option.go b/pkg/iac/scanners/terraform/parser/option.go similarity index 100% rename from pkg/scanners/terraform/parser/option.go rename to pkg/iac/scanners/terraform/parser/option.go diff --git a/pkg/scanners/terraform/parser/parser.go b/pkg/iac/scanners/terraform/parser/parser.go similarity index 97% rename from pkg/scanners/terraform/parser/parser.go rename to pkg/iac/scanners/terraform/parser/parser.go index 7f3fa52b2307..c670f32e3d9a 100644 --- a/pkg/scanners/terraform/parser/parser.go +++ b/pkg/iac/scanners/terraform/parser/parser.go @@ -48,15 +48,15 @@ type Parser struct { modulePath string moduleSource string moduleFS fs.FS - moduleBlock *terraform.Block - files []sourceFile - tfvarsPaths []string + moduleBlock *terraform.Block + files []sourceFile + tfvarsPaths []string stopOnHCLError bool workspaceName string - underlying *hclparse.Parser - children []*Parser - metrics Metrics - options []options.ParserOption + underlying *hclparse.Parser + children []*Parser + metrics Metrics + options []options.ParserOption debug debug.Logger allowDownloads bool skipCachedModules bool diff --git a/pkg/scanners/terraform/parser/parser_integration_test.go b/pkg/iac/scanners/terraform/parser/parser_integration_test.go similarity index 100% rename from pkg/scanners/terraform/parser/parser_integration_test.go rename to pkg/iac/scanners/terraform/parser/parser_integration_test.go diff --git a/pkg/scanners/terraform/parser/parser_test.go b/pkg/iac/scanners/terraform/parser/parser_test.go similarity index 100% rename from pkg/scanners/terraform/parser/parser_test.go rename to pkg/iac/scanners/terraform/parser/parser_test.go diff --git a/pkg/scanners/terraform/parser/resolvers/cache.go b/pkg/iac/scanners/terraform/parser/resolvers/cache.go similarity index 100% rename from pkg/scanners/terraform/parser/resolvers/cache.go rename to pkg/iac/scanners/terraform/parser/resolvers/cache.go diff --git a/pkg/scanners/terraform/parser/resolvers/local.go b/pkg/iac/scanners/terraform/parser/resolvers/local.go similarity index 100% rename from pkg/scanners/terraform/parser/resolvers/local.go rename to pkg/iac/scanners/terraform/parser/resolvers/local.go diff --git a/pkg/scanners/terraform/parser/resolvers/options.go b/pkg/iac/scanners/terraform/parser/resolvers/options.go similarity index 100% rename from pkg/scanners/terraform/parser/resolvers/options.go rename to pkg/iac/scanners/terraform/parser/resolvers/options.go diff --git a/pkg/scanners/terraform/parser/resolvers/registry.go b/pkg/iac/scanners/terraform/parser/resolvers/registry.go similarity index 100% rename from pkg/scanners/terraform/parser/resolvers/registry.go rename to pkg/iac/scanners/terraform/parser/resolvers/registry.go diff --git a/pkg/scanners/terraform/parser/resolvers/remote.go b/pkg/iac/scanners/terraform/parser/resolvers/remote.go similarity index 100% rename from pkg/scanners/terraform/parser/resolvers/remote.go rename to pkg/iac/scanners/terraform/parser/resolvers/remote.go diff --git a/pkg/scanners/terraform/parser/resolvers/writable.go b/pkg/iac/scanners/terraform/parser/resolvers/writable.go similarity index 100% rename from pkg/scanners/terraform/parser/resolvers/writable.go rename to pkg/iac/scanners/terraform/parser/resolvers/writable.go diff --git a/pkg/scanners/terraform/parser/resolvers/writable_windows.go b/pkg/iac/scanners/terraform/parser/resolvers/writable_windows.go similarity index 100% rename from pkg/scanners/terraform/parser/resolvers/writable_windows.go rename to pkg/iac/scanners/terraform/parser/resolvers/writable_windows.go diff --git a/pkg/scanners/terraform/parser/sort.go b/pkg/iac/scanners/terraform/parser/sort.go similarity index 100% rename from pkg/scanners/terraform/parser/sort.go rename to pkg/iac/scanners/terraform/parser/sort.go diff --git a/pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars b/pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars similarity index 100% rename from pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars rename to pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars diff --git a/pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json b/pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json similarity index 100% rename from pkg/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json rename to pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json diff --git a/pkg/scanners/terraform/scanner.go b/pkg/iac/scanners/terraform/scanner.go similarity index 92% rename from pkg/scanners/terraform/scanner.go rename to pkg/iac/scanners/terraform/scanner.go index b6636460723f..f3e84262f705 100644 --- a/pkg/scanners/terraform/scanner.go +++ b/pkg/iac/scanners/terraform/scanner.go @@ -10,6 +10,10 @@ import ( "sync" "time" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + executor2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" "golang.org/x/exp/slices" "github.com/aquasecurity/defsec/pkg/debug" @@ -20,10 +24,6 @@ import ( "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/extrafs" - "github.com/aquasecurity/trivy/pkg/scanners" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser/resolvers" ) var _ scanners.FSScanner = (*Scanner)(nil) @@ -34,7 +34,7 @@ type Scanner struct { // nolint: gocritic sync.Mutex options []options.ScannerOption parserOpt []options.ParserOption - executorOpt []executor.Option + executorOpt []executor2.Option dirs map[string]struct{} forceAllDirs bool policyDirs []string @@ -53,7 +53,7 @@ func (s *Scanner) SetSpec(spec string) { } func (s *Scanner) SetRegoOnly(regoOnly bool) { - s.executorOpt = append(s.executorOpt, executor.OptionWithRegoOnly(regoOnly)) + s.executorOpt = append(s.executorOpt, executor2.OptionWithRegoOnly(regoOnly)) } func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { @@ -80,7 +80,7 @@ func (s *Scanner) AddParserOptions(opts ...options.ParserOption) { s.parserOpt = append(s.parserOpt, opts...) } -func (s *Scanner) AddExecutorOptions(opts ...executor.Option) { +func (s *Scanner) AddExecutorOptions(opts ...executor2.Option) { s.executorOpt = append(s.executorOpt, opts...) } @@ -94,7 +94,7 @@ func (s *Scanner) SetSkipRequiredCheck(skip bool) { func (s *Scanner) SetDebugWriter(writer io.Writer) { s.parserOpt = append(s.parserOpt, options.ParserWithDebug(writer)) - s.executorOpt = append(s.executorOpt, executor.OptionWithDebugWriter(writer)) + s.executorOpt = append(s.executorOpt, executor2.OptionWithDebugWriter(writer)) s.debug = debug.New(writer, "terraform", "scanner") } @@ -122,7 +122,7 @@ func (s *Scanner) SetRegoErrorLimit(_ int) {} type Metrics struct { Parser parser.Metrics - Executor executor.Metrics + Executor executor2.Metrics Timings struct { Total time.Duration } @@ -206,7 +206,7 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin } s.execLock.Lock() - s.executorOpt = append(s.executorOpt, executor.OptionWithRegoScanner(regoScanner), executor.OptionWithFrameworks(s.frameworks...)) + s.executorOpt = append(s.executorOpt, executor2.OptionWithRegoScanner(regoScanner), executor2.OptionWithFrameworks(s.frameworks...)) s.execLock.Unlock() var allResults scan.Results @@ -246,7 +246,7 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin for _, module := range rootModules { s.execLock.RLock() - e := executor.New(s.executorOpt...) + e := executor2.New(s.executorOpt...) s.execLock.RUnlock() results, execMetrics, err := e.Execute(module.childs) if err != nil { diff --git a/pkg/scanners/terraform/scanner_integration_test.go b/pkg/iac/scanners/terraform/scanner_integration_test.go similarity index 100% rename from pkg/scanners/terraform/scanner_integration_test.go rename to pkg/iac/scanners/terraform/scanner_integration_test.go diff --git a/pkg/scanners/terraform/scanner_test.go b/pkg/iac/scanners/terraform/scanner_test.go similarity index 100% rename from pkg/scanners/terraform/scanner_test.go rename to pkg/iac/scanners/terraform/scanner_test.go diff --git a/pkg/scanners/terraformplan/parser/option.go b/pkg/iac/scanners/terraformplan/parser/option.go similarity index 100% rename from pkg/scanners/terraformplan/parser/option.go rename to pkg/iac/scanners/terraformplan/parser/option.go diff --git a/pkg/scanners/terraformplan/parser/parser.go b/pkg/iac/scanners/terraformplan/parser/parser.go similarity index 100% rename from pkg/scanners/terraformplan/parser/parser.go rename to pkg/iac/scanners/terraformplan/parser/parser.go diff --git a/pkg/scanners/terraformplan/parser/plan_file.go b/pkg/iac/scanners/terraformplan/parser/plan_file.go similarity index 100% rename from pkg/scanners/terraformplan/parser/plan_file.go rename to pkg/iac/scanners/terraformplan/parser/plan_file.go diff --git a/pkg/scanners/terraformplan/scanner.go b/pkg/iac/scanners/terraformplan/scanner.go similarity index 94% rename from pkg/scanners/terraformplan/scanner.go rename to pkg/iac/scanners/terraformplan/scanner.go index aee33c256369..c678674f2b72 100644 --- a/pkg/scanners/terraformplan/scanner.go +++ b/pkg/iac/scanners/terraformplan/scanner.go @@ -6,15 +6,15 @@ import ( "io" "io/fs" + terraformScanner "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraformplan/parser" "github.com/bmatcuk/doublestar/v4" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" - terraformScanner "github.com/aquasecurity/trivy/pkg/scanners/terraform" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" - "github.com/aquasecurity/trivy/pkg/scanners/terraformplan/parser" ) var tfPlanExts = []string{ diff --git a/pkg/scanners/terraformplan/scanner_test.go b/pkg/iac/scanners/terraformplan/scanner_test.go similarity index 100% rename from pkg/scanners/terraformplan/scanner_test.go rename to pkg/iac/scanners/terraformplan/scanner_test.go diff --git a/pkg/scanners/terraformplan/test/parser_test.go b/pkg/iac/scanners/terraformplan/test/parser_test.go similarity index 83% rename from pkg/scanners/terraformplan/test/parser_test.go rename to pkg/iac/scanners/terraformplan/test/parser_test.go index feb60f1611a9..bcc04fd0affb 100644 --- a/pkg/scanners/terraformplan/test/parser_test.go +++ b/pkg/iac/scanners/terraformplan/test/parser_test.go @@ -3,7 +3,7 @@ package terraformplan import ( "testing" - "github.com/aquasecurity/trivy/pkg/scanners/terraformplan/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraformplan/parser" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/scanners/terraformplan/test/scanner_test.go b/pkg/iac/scanners/terraformplan/test/scanner_test.go similarity index 93% rename from pkg/scanners/terraformplan/test/scanner_test.go rename to pkg/iac/scanners/terraformplan/test/scanner_test.go index 9dc4253fe379..5762e4e9bc3b 100644 --- a/pkg/scanners/terraformplan/test/scanner_test.go +++ b/pkg/iac/scanners/terraformplan/test/scanner_test.go @@ -6,11 +6,11 @@ import ( "testing/fstest" "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraformplan" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/scanners/terraformplan" ) func Test_Scanning_Plan(t *testing.T) { diff --git a/pkg/scanners/terraformplan/test/testdata/plan.json b/pkg/iac/scanners/terraformplan/test/testdata/plan.json similarity index 100% rename from pkg/scanners/terraformplan/test/testdata/plan.json rename to pkg/iac/scanners/terraformplan/test/testdata/plan.json diff --git a/pkg/scanners/toml/parser/parser.go b/pkg/iac/scanners/toml/parser/parser.go similarity index 97% rename from pkg/scanners/toml/parser/parser.go rename to pkg/iac/scanners/toml/parser/parser.go index 3af651b9d2ab..620da384060d 100644 --- a/pkg/scanners/toml/parser/parser.go +++ b/pkg/iac/scanners/toml/parser/parser.go @@ -7,10 +7,10 @@ import ( "path/filepath" "github.com/BurntSushi/toml" + "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/scanners/toml/parser/parser_test.go b/pkg/iac/scanners/toml/parser/parser_test.go similarity index 100% rename from pkg/scanners/toml/parser/parser_test.go rename to pkg/iac/scanners/toml/parser/parser_test.go diff --git a/pkg/scanners/toml/scanner.go b/pkg/iac/scanners/toml/scanner.go similarity index 98% rename from pkg/scanners/toml/scanner.go rename to pkg/iac/scanners/toml/scanner.go index 9af56eb73ac3..290a3d139df1 100644 --- a/pkg/scanners/toml/scanner.go +++ b/pkg/iac/scanners/toml/scanner.go @@ -12,7 +12,7 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/toml/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/toml/parser" ) var _ options.ConfigurableScanner = (*Scanner)(nil) diff --git a/pkg/scanners/toml/scanner_test.go b/pkg/iac/scanners/toml/scanner_test.go similarity index 100% rename from pkg/scanners/toml/scanner_test.go rename to pkg/iac/scanners/toml/scanner_test.go diff --git a/pkg/scanners/universal/scanner.go b/pkg/iac/scanners/universal/scanner.go similarity index 65% rename from pkg/scanners/universal/scanner.go rename to pkg/iac/scanners/universal/scanner.go index 7040bd43054b..d289f1961f6c 100644 --- a/pkg/scanners/universal/scanner.go +++ b/pkg/iac/scanners/universal/scanner.go @@ -6,16 +6,16 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/scanners" - "github.com/aquasecurity/trivy/pkg/scanners/azure/arm" - "github.com/aquasecurity/trivy/pkg/scanners/cloudformation" - "github.com/aquasecurity/trivy/pkg/scanners/dockerfile" - "github.com/aquasecurity/trivy/pkg/scanners/helm" - "github.com/aquasecurity/trivy/pkg/scanners/json" - "github.com/aquasecurity/trivy/pkg/scanners/kubernetes" - "github.com/aquasecurity/trivy/pkg/scanners/terraform" - "github.com/aquasecurity/trivy/pkg/scanners/toml" - "github.com/aquasecurity/trivy/pkg/scanners/yaml" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation" + "github.com/aquasecurity/trivy/pkg/iac/scanners/dockerfile" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm" + "github.com/aquasecurity/trivy/pkg/iac/scanners/json" + "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform" + "github.com/aquasecurity/trivy/pkg/iac/scanners/toml" + "github.com/aquasecurity/trivy/pkg/iac/scanners/yaml" ) type nestableFSScanners interface { diff --git a/pkg/scanners/yaml/parser/parser.go b/pkg/iac/scanners/yaml/parser/parser.go similarity index 97% rename from pkg/scanners/yaml/parser/parser.go rename to pkg/iac/scanners/yaml/parser/parser.go index 783d6e693b30..5b6220e021f5 100644 --- a/pkg/scanners/yaml/parser/parser.go +++ b/pkg/iac/scanners/yaml/parser/parser.go @@ -8,11 +8,11 @@ import ( "path/filepath" "strings" + "github.com/aquasecurity/trivy/pkg/iac/detection" "gopkg.in/yaml.v3" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/scanners/yaml/parser/parser_test.go b/pkg/iac/scanners/yaml/parser/parser_test.go similarity index 100% rename from pkg/scanners/yaml/parser/parser_test.go rename to pkg/iac/scanners/yaml/parser/parser_test.go diff --git a/pkg/scanners/yaml/scanner.go b/pkg/iac/scanners/yaml/scanner.go similarity index 98% rename from pkg/scanners/yaml/scanner.go rename to pkg/iac/scanners/yaml/scanner.go index 607462061fa6..4c4a03e85d5d 100644 --- a/pkg/scanners/yaml/scanner.go +++ b/pkg/iac/scanners/yaml/scanner.go @@ -12,7 +12,7 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/scanners/yaml/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/yaml/parser" ) var _ options.ConfigurableScanner = (*Scanner)(nil) diff --git a/pkg/scanners/yaml/scanner_test.go b/pkg/iac/scanners/yaml/scanner_test.go similarity index 100% rename from pkg/scanners/yaml/scanner_test.go rename to pkg/iac/scanners/yaml/scanner_test.go diff --git a/pkg/misconf/scanner.go b/pkg/misconf/scanner.go index 3dc1c436d26b..6120df0b66e4 100644 --- a/pkg/misconf/scanner.go +++ b/pkg/misconf/scanner.go @@ -11,26 +11,26 @@ import ( "sort" "strings" + "github.com/aquasecurity/trivy/pkg/iac/detection" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm" + cfscanner "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation" + cfparser "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + dfscanner "github.com/aquasecurity/trivy/pkg/iac/scanners/dockerfile" + helm2 "github.com/aquasecurity/trivy/pkg/iac/scanners/helm" + k8sscanner "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform" + tfpscanner "github.com/aquasecurity/trivy/pkg/iac/scanners/terraformplan" "github.com/samber/lo" "golang.org/x/xerrors" + _ "embed" + "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/detection" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/mapfs" - "github.com/aquasecurity/trivy/pkg/scanners" - "github.com/aquasecurity/trivy/pkg/scanners/azure/arm" - cfscanner "github.com/aquasecurity/trivy/pkg/scanners/cloudformation" - cfparser "github.com/aquasecurity/trivy/pkg/scanners/cloudformation/parser" - dfscanner "github.com/aquasecurity/trivy/pkg/scanners/dockerfile" - "github.com/aquasecurity/trivy/pkg/scanners/helm" - k8sscanner "github.com/aquasecurity/trivy/pkg/scanners/kubernetes" - tfscanner "github.com/aquasecurity/trivy/pkg/scanners/terraform" - tfpscanner "github.com/aquasecurity/trivy/pkg/scanners/terraformplan" - - _ "embed" ) var enabledDefsecTypes = map[detection.FileType]types.ConfigType{ @@ -118,11 +118,11 @@ func newScanner(t detection.FileType, filePatterns []string, opt ScannerOption) case detection.FileTypeDockerfile: scanner = dfscanner.NewScanner(opts...) case detection.FileTypeHelm: - scanner = helm.New(opts...) + scanner = helm2.New(opts...) case detection.FileTypeKubernetes: scanner = k8sscanner.NewScanner(opts...) case detection.FileTypeTerraform: - scanner = tfscanner.New(opts...) + scanner = terraform.New(opts...) case detection.FileTypeTerraformPlan: scanner = tfpscanner.New(opts...) } @@ -279,14 +279,14 @@ func addTFOpts(opts []options.ScannerOption, scannerOption ScannerOption) ([]opt } opts = append( opts, - tfscanner.ScannerWithTFVarsPaths(scannerOption.TerraformTFVars...), - tfscanner.ScannerWithConfigsFileSystem(configFS), + terraform.ScannerWithTFVarsPaths(scannerOption.TerraformTFVars...), + terraform.ScannerWithConfigsFileSystem(configFS), ) } opts = append(opts, - tfscanner.ScannerWithAllDirectories(true), - tfscanner.ScannerWithSkipDownloaded(scannerOption.TfExcludeDownloaded), + terraform.ScannerWithAllDirectories(true), + terraform.ScannerWithSkipDownloaded(scannerOption.TfExcludeDownloaded), ) return opts, nil @@ -309,19 +309,19 @@ func addCFOpts(opts []options.ScannerOption, scannerOption ScannerOption) ([]opt func addHelmOpts(opts []options.ScannerOption, scannerOption ScannerOption) []options.ScannerOption { if len(scannerOption.HelmValueFiles) > 0 { - opts = append(opts, helm.ScannerWithValuesFile(scannerOption.HelmValueFiles...)) + opts = append(opts, helm2.ScannerWithValuesFile(scannerOption.HelmValueFiles...)) } if len(scannerOption.HelmValues) > 0 { - opts = append(opts, helm.ScannerWithValues(scannerOption.HelmValues...)) + opts = append(opts, helm2.ScannerWithValues(scannerOption.HelmValues...)) } if len(scannerOption.HelmFileValues) > 0 { - opts = append(opts, helm.ScannerWithFileValues(scannerOption.HelmFileValues...)) + opts = append(opts, helm2.ScannerWithFileValues(scannerOption.HelmFileValues...)) } if len(scannerOption.HelmStringValues) > 0 { - opts = append(opts, helm.ScannerWithStringValues(scannerOption.HelmStringValues...)) + opts = append(opts, helm2.ScannerWithStringValues(scannerOption.HelmStringValues...)) } return opts diff --git a/test/deterministic_test.go b/test/deterministic_test.go index ae715ffee6cb..63a4777913fe 100644 --- a/test/deterministic_test.go +++ b/test/deterministic_test.go @@ -5,10 +5,10 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" "github.com/aquasecurity/trivy/test/testutil" ) @@ -40,7 +40,7 @@ locals { }) for i := 0; i < 100; i++ { - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), ".") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) diff --git a/test/docker_test.go b/test/docker_test.go index 2c7f3a93d761..1dd84e4bef2a 100644 --- a/test/docker_test.go +++ b/test/docker_test.go @@ -8,10 +8,9 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/scanners/dockerfile" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - - "github.com/aquasecurity/trivy/pkg/scanners/dockerfile" ) // func addFilesToMemFS(memfs *memoryfs.FS, typePolicy bool, folderName string) error { diff --git a/test/fs_test.go b/test/fs_test.go index 2ce5b517027b..938977188344 100644 --- a/test/fs_test.go +++ b/test/fs_test.go @@ -6,10 +6,9 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - - "github.com/aquasecurity/trivy/pkg/scanners/terraform" ) func Test_OS_FS(t *testing.T) { diff --git a/test/kubernetes_test.go b/test/kubernetes_test.go index 0987f0866d14..f3fb25f3e5fc 100644 --- a/test/kubernetes_test.go +++ b/test/kubernetes_test.go @@ -9,10 +9,9 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - - "github.com/aquasecurity/trivy/pkg/scanners/kubernetes" ) func Test_Kubernetes_RegoPoliciesFromDisk(t *testing.T) { diff --git a/test/module_test.go b/test/module_test.go index c60f2d3772f7..658dee77313d 100644 --- a/test/module_test.go +++ b/test/module_test.go @@ -13,11 +13,11 @@ import ( "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/severity" "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/stretchr/testify/require" "github.com/aquasecurity/trivy-policies/checks/cloud/aws/iam" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" "github.com/aquasecurity/trivy/test/testutil" ) @@ -88,7 +88,7 @@ resource "problem" "uhoh" { debug := bytes.NewBuffer([]byte{}) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true), options.ParserWithDebug(debug)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true), options.ParserWithDebug(debug)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -119,7 +119,7 @@ resource "problem" "uhoh" { `}, ) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -148,7 +148,7 @@ resource "problem" "uhoh" { `}, ) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -175,7 +175,7 @@ resource "problem" "uhoh" { } `}) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -202,7 +202,7 @@ resource "problem" "uhoh" { } `}) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -238,7 +238,7 @@ resource "problem" "uhoh" { } `}) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -276,7 +276,7 @@ resource "problem" "uhoh" { `, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true), options.ParserWithDebug(os.Stderr)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true), options.ParserWithDebug(os.Stderr)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -331,7 +331,7 @@ resource "problem" "uhoh" { `, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -380,7 +380,7 @@ resource "problem" "uhoh" { `, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -418,7 +418,7 @@ resource "problem" "uhoh" { `, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -473,7 +473,7 @@ resource "problem" "uhoh" { `, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -523,7 +523,7 @@ resource "bad" "thing" { reg := rules.Register(r1) defer rules.Deregister(reg) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -572,7 +572,7 @@ resource "bad" "thing" { reg := rules.Register(r1) defer rules.Deregister(reg) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) @@ -621,7 +621,7 @@ data "aws_iam_policy_document" "policy" { } `}) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) diff --git a/test/performance_test.go b/test/performance_test.go index 81fa5cfe2b66..66842f5c96b1 100644 --- a/test/performance_test.go +++ b/test/performance_test.go @@ -7,9 +7,9 @@ import ( "testing" "github.com/aquasecurity/defsec/pkg/rules" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/executor" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" "github.com/aquasecurity/trivy/test/testutil" ) @@ -22,7 +22,7 @@ func BenchmarkCalculate(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - p := parser.New(f, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(f, "", parser2.OptionStopOnHCLError(true)) if err := p.ParseFS(context.TODO(), "project"); err != nil { b.Fatal(err) } diff --git a/test/setup_test.go b/test/setup_test.go index 226bf3400ed8..ead5628b8fd3 100644 --- a/test/setup_test.go +++ b/test/setup_test.go @@ -7,10 +7,10 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/terraform" + terraform2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/stretchr/testify/require" - tfScanner "github.com/aquasecurity/trivy/pkg/scanners/terraform" - "github.com/aquasecurity/trivy/pkg/scanners/terraform/parser" "github.com/aquasecurity/trivy/test/testutil" ) @@ -19,7 +19,7 @@ func createModulesFromSource(t *testing.T, source string, ext string) terraform. "source" + ext: source, }) - p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) if err := p.ParseFS(context.TODO(), "."); err != nil { t.Fatal(err) } @@ -31,7 +31,7 @@ func createModulesFromSource(t *testing.T, source string, ext string) terraform. } func scanHCLWithWorkspace(t *testing.T, source string, workspace string) scan.Results { - return scanHCL(t, source, tfScanner.ScannerWithWorkspaceName(workspace)) + return scanHCL(t, source, terraform2.ScannerWithWorkspaceName(workspace)) } func scanHCL(t *testing.T, source string, opts ...options.ScannerOption) scan.Results { @@ -40,7 +40,7 @@ func scanHCL(t *testing.T, source string, opts ...options.ScannerOption) scan.Re "main.tf": source, }) - localScanner := tfScanner.New(append(opts, options.ScannerWithEmbeddedPolicies(false))...) + localScanner := terraform2.New(append(opts, options.ScannerWithEmbeddedPolicies(false))...) results, err := localScanner.ScanFS(context.TODO(), fs, ".") require.NoError(t, err) return results @@ -52,7 +52,7 @@ func scanJSON(t *testing.T, source string) scan.Results { "main.tf.json": source, }) - s := tfScanner.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + s := terraform2.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) results, _, err := s.ScanFSWithMetrics(context.TODO(), fs, ".") require.NoError(t, err) return results From e01ae362696205b9fb8073e85e304e420e79de64 Mon Sep 17 00:00:00 2001 From: Simar Date: Mon, 29 Jan 2024 20:01:40 -0700 Subject: [PATCH 10/13] fix lint --- .../cloudformation/aws/ecr/repository.go | 2 +- .../adapters/cloudformation/aws/iam/policy.go | 2 +- .../cloudformation/aws/sam/function.go | 2 +- .../cloudformation/aws/sam/state_machines.go | 2 +- .../adapters/cloudformation/aws/sqs/queue.go | 2 +- pkg/iac/adapters/terraform/aws/ecr/adapt.go | 2 +- pkg/iac/adapters/terraform/aws/rds/adapt.go | 30 +++++++++---------- pkg/iac/adapters/terraform/aws/sqs/adapt.go | 2 +- pkg/iac/detection/detect.go | 2 +- pkg/iac/scanners/azure/value.go | 2 +- .../cloudformation/parser/fn_builtin.go | 1 + .../cloudformation/parser/parameter.go | 3 +- .../scanners/cloudformation/parser/parser.go | 2 +- .../cloudformation/parser/property.go | 2 +- .../scanners/cloudformation/parser/util.go | 5 ++-- pkg/iac/scanners/dockerfile/parser/parser.go | 2 +- pkg/iac/scanners/helm/parser/parser.go | 2 +- pkg/iac/scanners/helm/parser/parser_tar.go | 3 +- pkg/iac/scanners/helm/scanner.go | 8 ++--- .../kubernetes/parser/manifest_node.go | 6 ++-- pkg/iac/scanners/kubernetes/parser/parser.go | 2 +- pkg/iac/scanners/kubernetes/scanner.go | 4 +-- .../scanners/terraform/parser/functions.go | 3 +- .../scanners/terraform/parser/load_module.go | 2 +- pkg/iac/scanners/terraform/parser/parser.go | 14 ++++----- pkg/iac/scanners/terraform/scanner.go | 8 ++--- pkg/iac/scanners/terraformplan/scanner.go | 6 ++-- pkg/iac/scanners/toml/parser/parser.go | 2 +- pkg/iac/scanners/yaml/parser/parser.go | 2 +- pkg/misconf/scanner.go | 16 +++++----- 30 files changed, 73 insertions(+), 68 deletions(-) diff --git a/pkg/iac/adapters/cloudformation/aws/ecr/repository.go b/pkg/iac/adapters/cloudformation/aws/ecr/repository.go index 7e4a3710c70d..298d18ec0500 100644 --- a/pkg/iac/adapters/cloudformation/aws/ecr/repository.go +++ b/pkg/iac/adapters/cloudformation/aws/ecr/repository.go @@ -3,12 +3,12 @@ package ecr import ( "fmt" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getRepositories(ctx parser2.FileContext) (repositories []ecr.Repository) { diff --git a/pkg/iac/adapters/cloudformation/aws/iam/policy.go b/pkg/iac/adapters/cloudformation/aws/iam/policy.go index 20fd1b9c7e38..0f4569d291bf 100644 --- a/pkg/iac/adapters/cloudformation/aws/iam/policy.go +++ b/pkg/iac/adapters/cloudformation/aws/iam/policy.go @@ -1,11 +1,11 @@ package iam import ( - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getPolicies(ctx parser2.FileContext) (policies []iam.Policy) { diff --git a/pkg/iac/adapters/cloudformation/aws/sam/function.go b/pkg/iac/adapters/cloudformation/aws/sam/function.go index 84db2ecc6c8f..71ef168c696d 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/function.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/function.go @@ -1,12 +1,12 @@ package sam import ( - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getFunctions(cfFile parser2.FileContext) (functions []sam.Function) { diff --git a/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go b/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go index 344df4006c3e..e61c960814db 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go @@ -1,12 +1,12 @@ package sam import ( - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getStateMachines(cfFile parser2.FileContext) (stateMachines []sam.StateMachine) { diff --git a/pkg/iac/adapters/cloudformation/aws/sqs/queue.go b/pkg/iac/adapters/cloudformation/aws/sqs/queue.go index 72af950b5fa8..bbfeb02a08b2 100644 --- a/pkg/iac/adapters/cloudformation/aws/sqs/queue.go +++ b/pkg/iac/adapters/cloudformation/aws/sqs/queue.go @@ -3,12 +3,12 @@ package sqs import ( "fmt" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getQueues(ctx parser.FileContext) (queues []sqs.Queue) { diff --git a/pkg/iac/adapters/terraform/aws/ecr/adapt.go b/pkg/iac/adapters/terraform/aws/ecr/adapt.go index 14f82f96046c..64ef4346c584 100644 --- a/pkg/iac/adapters/terraform/aws/ecr/adapt.go +++ b/pkg/iac/adapters/terraform/aws/ecr/adapt.go @@ -1,13 +1,13 @@ package ecr import ( - "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/iam" "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" iamp "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/iam" ) func Adapt(modules terraform.Modules) ecr.ECR { diff --git a/pkg/iac/adapters/terraform/aws/rds/adapt.go b/pkg/iac/adapters/terraform/aws/rds/adapt.go index 517c7635c77d..a03b3d124058 100644 --- a/pkg/iac/adapters/terraform/aws/rds/adapt.go +++ b/pkg/iac/adapters/terraform/aws/rds/adapt.go @@ -146,21 +146,21 @@ func adaptInstance(resource *terraform.Block, modules terraform.Modules) rds.Ins } } return rds.Instance{ - Metadata: resource.GetMetadata(), - BackupRetentionPeriodDays: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(0, resource), - ReplicationSourceARN: defsecTypes.StringExplicit(replicaSourceValue, resource.GetMetadata()), - PerformanceInsights: adaptPerformanceInsights(resource), - Encryption: adaptEncryption(resource), - PublicAccess: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(false, resource), - Engine: resource.GetAttribute("engine").AsStringValueOrDefault(rds.EngineAurora, resource), - IAMAuthEnabled: resource.GetAttribute("iam_database_authentication_enabled").AsBoolValueOrDefault(false, resource), - DeletionProtection: resource.GetAttribute("deletion_protection").AsBoolValueOrDefault(false, resource), - DBInstanceArn: resource.GetAttribute("arn").AsStringValueOrDefault("", resource), - StorageEncrypted: resource.GetAttribute("storage_encrypted").AsBoolValueOrDefault(true, resource), - DBInstanceIdentifier: resource.GetAttribute("identifier").AsStringValueOrDefault("", resource), - EngineVersion: resource.GetAttribute("engine_version").AsStringValueOrDefault("", resource), - AutoMinorVersionUpgrade: resource.GetAttribute("auto_minor_version_upgrade").AsBoolValueOrDefault(false, resource), - MultiAZ: resource.GetAttribute("multi_az").AsBoolValueOrDefault(false, resource), + Metadata: resource.GetMetadata(), + BackupRetentionPeriodDays: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(0, resource), + ReplicationSourceARN: defsecTypes.StringExplicit(replicaSourceValue, resource.GetMetadata()), + PerformanceInsights: adaptPerformanceInsights(resource), + Encryption: adaptEncryption(resource), + PublicAccess: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(false, resource), + Engine: resource.GetAttribute("engine").AsStringValueOrDefault(rds.EngineAurora, resource), + IAMAuthEnabled: resource.GetAttribute("iam_database_authentication_enabled").AsBoolValueOrDefault(false, resource), + DeletionProtection: resource.GetAttribute("deletion_protection").AsBoolValueOrDefault(false, resource), + DBInstanceArn: resource.GetAttribute("arn").AsStringValueOrDefault("", resource), + StorageEncrypted: resource.GetAttribute("storage_encrypted").AsBoolValueOrDefault(true, resource), + DBInstanceIdentifier: resource.GetAttribute("identifier").AsStringValueOrDefault("", resource), + EngineVersion: resource.GetAttribute("engine_version").AsStringValueOrDefault("", resource), + AutoMinorVersionUpgrade: resource.GetAttribute("auto_minor_version_upgrade").AsBoolValueOrDefault(false, resource), + MultiAZ: resource.GetAttribute("multi_az").AsBoolValueOrDefault(false, resource), PubliclyAccessible: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(false, resource), LatestRestorableTime: defsecTypes.TimeUnresolvable(resource.GetMetadata()), ReadReplicaDBInstanceIdentifiers: ReadReplicaDBInstanceIdentifiers, diff --git a/pkg/iac/adapters/terraform/aws/sqs/adapt.go b/pkg/iac/adapters/terraform/aws/sqs/adapt.go index c90517ed8263..bb5cd5047ce3 100644 --- a/pkg/iac/adapters/terraform/aws/sqs/adapt.go +++ b/pkg/iac/adapters/terraform/aws/sqs/adapt.go @@ -1,7 +1,6 @@ package sqs import ( - "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/iam" "github.com/google/uuid" "github.com/liamg/iamgo" @@ -9,6 +8,7 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform/aws/iam" ) func Adapt(modules terraform.Modules) sqs.SQS { diff --git a/pkg/iac/detection/detect.go b/pkg/iac/detection/detect.go index 6c5f965c6b30..9050a735172a 100644 --- a/pkg/iac/detection/detect.go +++ b/pkg/iac/detection/detect.go @@ -7,10 +7,10 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" "gopkg.in/yaml.v3" "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" ) type FileType string diff --git a/pkg/iac/scanners/azure/value.go b/pkg/iac/scanners/azure/value.go index 58871118e263..eb57927ebbbe 100644 --- a/pkg/iac/scanners/azure/value.go +++ b/pkg/iac/scanners/azure/value.go @@ -4,10 +4,10 @@ import ( "strings" "time" - armjson2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" "golang.org/x/exp/slices" "github.com/aquasecurity/defsec/pkg/types" + armjson2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" ) type EvalContext struct{} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_builtin.go b/pkg/iac/scanners/cloudformation/parser/fn_builtin.go index 4094ea7515f8..3fb21dca82de 100644 --- a/pkg/iac/scanners/cloudformation/parser/fn_builtin.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_builtin.go @@ -5,6 +5,7 @@ import ( "net" "github.com/apparentlymart/go-cidr/cidr" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) diff --git a/pkg/iac/scanners/cloudformation/parser/parameter.go b/pkg/iac/scanners/cloudformation/parser/parameter.go index b3683ed02e8c..ecd727270022 100644 --- a/pkg/iac/scanners/cloudformation/parser/parameter.go +++ b/pkg/iac/scanners/cloudformation/parser/parameter.go @@ -7,9 +7,10 @@ import ( "strconv" "strings" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/liamg/jfather" "gopkg.in/yaml.v3" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) type Parameter struct { diff --git a/pkg/iac/scanners/cloudformation/parser/parser.go b/pkg/iac/scanners/cloudformation/parser/parser.go index 58486a9109c5..5ba989220544 100644 --- a/pkg/iac/scanners/cloudformation/parser/parser.go +++ b/pkg/iac/scanners/cloudformation/parser/parser.go @@ -11,12 +11,12 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/liamg/jfather" "gopkg.in/yaml.v3" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/iac/scanners/cloudformation/parser/property.go b/pkg/iac/scanners/cloudformation/parser/property.go index 0711bae8f826..466de3497c85 100644 --- a/pkg/iac/scanners/cloudformation/parser/property.go +++ b/pkg/iac/scanners/cloudformation/parser/property.go @@ -6,11 +6,11 @@ import ( "strconv" "strings" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" "github.com/liamg/jfather" "gopkg.in/yaml.v3" defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) type EqualityOptions = int diff --git a/pkg/iac/scanners/cloudformation/parser/util.go b/pkg/iac/scanners/cloudformation/parser/util.go index a5786103d18a..a0792cf32865 100644 --- a/pkg/iac/scanners/cloudformation/parser/util.go +++ b/pkg/iac/scanners/cloudformation/parser/util.go @@ -3,10 +3,11 @@ package parser import ( "strconv" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" - "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" "github.com/liamg/jfather" "gopkg.in/yaml.v3" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" ) func setPropertyValueFromJson(node jfather.Node, propertyData *PropertyInner) error { diff --git a/pkg/iac/scanners/dockerfile/parser/parser.go b/pkg/iac/scanners/dockerfile/parser/parser.go index 6cd8b2e3f3ec..8b13d1e0e61e 100644 --- a/pkg/iac/scanners/dockerfile/parser/parser.go +++ b/pkg/iac/scanners/dockerfile/parser/parser.go @@ -8,13 +8,13 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/moby/buildkit/frontend/dockerfile/instructions" "github.com/moby/buildkit/frontend/dockerfile/parser" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/providers/dockerfile" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/iac/scanners/helm/parser/parser.go b/pkg/iac/scanners/helm/parser/parser.go index 9c94e911283e..b6d8f77ad311 100644 --- a/pkg/iac/scanners/helm/parser/parser.go +++ b/pkg/iac/scanners/helm/parser/parser.go @@ -13,7 +13,6 @@ import ( "sort" "strings" - detection2 "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/google/uuid" "gopkg.in/yaml.v3" "helm.sh/helm/v3/pkg/action" @@ -24,6 +23,7 @@ import ( "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" + detection2 "github.com/aquasecurity/trivy/pkg/iac/detection" ) var manifestNameRegex = regexp.MustCompile("# Source: [^/]+/(.+)") diff --git a/pkg/iac/scanners/helm/parser/parser_tar.go b/pkg/iac/scanners/helm/parser/parser_tar.go index ad3abdad82ca..5455ab780683 100644 --- a/pkg/iac/scanners/helm/parser/parser_tar.go +++ b/pkg/iac/scanners/helm/parser/parser_tar.go @@ -11,8 +11,9 @@ import ( "os" "path/filepath" - "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/liamg/memoryfs" + + "github.com/aquasecurity/trivy/pkg/iac/detection" ) var errSkipFS = errors.New("skip parse FS") diff --git a/pkg/iac/scanners/helm/scanner.go b/pkg/iac/scanners/helm/scanner.go index 829c69b97fd1..4a4fe4f4b0b0 100644 --- a/pkg/iac/scanners/helm/scanner.go +++ b/pkg/iac/scanners/helm/scanner.go @@ -8,10 +8,6 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/trivy/pkg/iac/detection" - "github.com/aquasecurity/trivy/pkg/iac/scanners" - "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" - kparser "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" "github.com/liamg/memoryfs" "github.com/aquasecurity/defsec/pkg/debug" @@ -20,6 +16,10 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/detection" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" + kparser "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" ) var _ scanners.FSScanner = (*Scanner)(nil) diff --git a/pkg/iac/scanners/kubernetes/parser/manifest_node.go b/pkg/iac/scanners/kubernetes/parser/manifest_node.go index f83c352f01ca..1f82ca1e3680 100644 --- a/pkg/iac/scanners/kubernetes/parser/manifest_node.go +++ b/pkg/iac/scanners/kubernetes/parser/manifest_node.go @@ -23,9 +23,9 @@ type ManifestNode struct { StartLine int EndLine int Offset int - Value interface{} - Type TagType - Path string + Value interface{} + Type TagType + Path string } func (r *ManifestNode) ToRego() interface{} { diff --git a/pkg/iac/scanners/kubernetes/parser/parser.go b/pkg/iac/scanners/kubernetes/parser/parser.go index 618990e5bfee..297f196ac9f0 100644 --- a/pkg/iac/scanners/kubernetes/parser/parser.go +++ b/pkg/iac/scanners/kubernetes/parser/parser.go @@ -11,11 +11,11 @@ import ( "regexp" "strings" - "github.com/aquasecurity/trivy/pkg/iac/detection" "gopkg.in/yaml.v3" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/iac/scanners/kubernetes/scanner.go b/pkg/iac/scanners/kubernetes/scanner.go index 2f34e8fef9f5..546c07f9bf0a 100644 --- a/pkg/iac/scanners/kubernetes/scanner.go +++ b/pkg/iac/scanners/kubernetes/scanner.go @@ -8,8 +8,6 @@ import ( "sort" "sync" - "github.com/aquasecurity/trivy/pkg/iac/scanners" - "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" "github.com/liamg/memoryfs" "github.com/aquasecurity/defsec/pkg/debug" @@ -18,6 +16,8 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" ) var _ scanners.FSScanner = (*Scanner)(nil) diff --git a/pkg/iac/scanners/terraform/parser/functions.go b/pkg/iac/scanners/terraform/parser/functions.go index 0c517b9e7dca..39b6c268b345 100644 --- a/pkg/iac/scanners/terraform/parser/functions.go +++ b/pkg/iac/scanners/terraform/parser/functions.go @@ -3,12 +3,13 @@ package parser import ( "io/fs" - funcs2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/funcs" "github.com/hashicorp/hcl/v2/ext/tryfunc" ctyyaml "github.com/zclconf/go-cty-yaml" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" "github.com/zclconf/go-cty/cty/function/stdlib" + + funcs2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/funcs" ) // Functions returns the set of functions that should be used to when evaluating diff --git a/pkg/iac/scanners/terraform/parser/load_module.go b/pkg/iac/scanners/terraform/parser/load_module.go index df8cf606688f..7f1ef3060995 100644 --- a/pkg/iac/scanners/terraform/parser/load_module.go +++ b/pkg/iac/scanners/terraform/parser/load_module.go @@ -8,10 +8,10 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" "github.com/zclconf/go-cty/cty" "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" ) type moduleLoadError struct { diff --git a/pkg/iac/scanners/terraform/parser/parser.go b/pkg/iac/scanners/terraform/parser/parser.go index c670f32e3d9a..7f3fa52b2307 100644 --- a/pkg/iac/scanners/terraform/parser/parser.go +++ b/pkg/iac/scanners/terraform/parser/parser.go @@ -48,15 +48,15 @@ type Parser struct { modulePath string moduleSource string moduleFS fs.FS - moduleBlock *terraform.Block - files []sourceFile - tfvarsPaths []string + moduleBlock *terraform.Block + files []sourceFile + tfvarsPaths []string stopOnHCLError bool workspaceName string - underlying *hclparse.Parser - children []*Parser - metrics Metrics - options []options.ParserOption + underlying *hclparse.Parser + children []*Parser + metrics Metrics + options []options.ParserOption debug debug.Logger allowDownloads bool skipCachedModules bool diff --git a/pkg/iac/scanners/terraform/scanner.go b/pkg/iac/scanners/terraform/scanner.go index f3e84262f705..d3b5add1c088 100644 --- a/pkg/iac/scanners/terraform/scanner.go +++ b/pkg/iac/scanners/terraform/scanner.go @@ -10,10 +10,6 @@ import ( "sync" "time" - "github.com/aquasecurity/trivy/pkg/iac/scanners" - executor2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" - "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" - "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" "golang.org/x/exp/slices" "github.com/aquasecurity/defsec/pkg/debug" @@ -24,6 +20,10 @@ import ( "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/extrafs" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + executor2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" ) var _ scanners.FSScanner = (*Scanner)(nil) diff --git a/pkg/iac/scanners/terraformplan/scanner.go b/pkg/iac/scanners/terraformplan/scanner.go index c678674f2b72..f15b060ae047 100644 --- a/pkg/iac/scanners/terraformplan/scanner.go +++ b/pkg/iac/scanners/terraformplan/scanner.go @@ -6,15 +6,15 @@ import ( "io" "io/fs" - terraformScanner "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform" - "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" - "github.com/aquasecurity/trivy/pkg/iac/scanners/terraformplan/parser" "github.com/bmatcuk/doublestar/v4" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" + terraformScanner "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraformplan/parser" ) var tfPlanExts = []string{ diff --git a/pkg/iac/scanners/toml/parser/parser.go b/pkg/iac/scanners/toml/parser/parser.go index 620da384060d..ac909ba1563b 100644 --- a/pkg/iac/scanners/toml/parser/parser.go +++ b/pkg/iac/scanners/toml/parser/parser.go @@ -7,10 +7,10 @@ import ( "path/filepath" "github.com/BurntSushi/toml" - "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/iac/scanners/yaml/parser/parser.go b/pkg/iac/scanners/yaml/parser/parser.go index 5b6220e021f5..177d2289dd30 100644 --- a/pkg/iac/scanners/yaml/parser/parser.go +++ b/pkg/iac/scanners/yaml/parser/parser.go @@ -8,11 +8,11 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/trivy/pkg/iac/detection" "gopkg.in/yaml.v3" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/detection" ) var _ options.ConfigurableParser = (*Parser)(nil) diff --git a/pkg/misconf/scanner.go b/pkg/misconf/scanner.go index 6120df0b66e4..bcab56dafadd 100644 --- a/pkg/misconf/scanner.go +++ b/pkg/misconf/scanner.go @@ -11,6 +11,12 @@ import ( "sort" "strings" + "github.com/samber/lo" + "golang.org/x/xerrors" + + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/aquasecurity/trivy/pkg/iac/scanners" "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm" @@ -21,16 +27,10 @@ import ( k8sscanner "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform" tfpscanner "github.com/aquasecurity/trivy/pkg/iac/scanners/terraformplan" - "github.com/samber/lo" - "golang.org/x/xerrors" - - _ "embed" - - "github.com/aquasecurity/defsec/pkg/scan" - "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/mapfs" + + _ "embed" ) var enabledDefsecTypes = map[detection.FileType]types.ConfigType{ From 0d7c0fa1fc953e175b228c6c83e178a2a3845c4b Mon Sep 17 00:00:00 2001 From: Simar Date: Tue, 30 Jan 2024 15:37:18 -0700 Subject: [PATCH 11/13] update codeowners.md --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 08805c2a4318..5a6e9f523df8 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -6,6 +6,7 @@ docs/docs/scanner/misconfiguration @knqyf263 @simar7 docs/docs/target/aws.md @knqyf263 @simar7 pkg/fanal/analyzer/config @knqyf263 @simar7 pkg/cloud @knqyf263 @simar7 +pkg/iac @knqyf263 @simar7 # Helm chart helm/trivy/ @chen-keinan From 3e5b28fa5e55ad40dece9ca85830c449d23c749e Mon Sep 17 00:00:00 2001 From: Simar Date: Tue, 30 Jan 2024 21:21:43 -0700 Subject: [PATCH 12/13] refactor(deps): Move pkg/spec from trivy-policies to trivy --- go.mod | 6 +- go.sum | 10 +- pkg/compliance/spec/compliance.go | 2 +- pkg/iac/spec/spec.go | 23 + pkg/iac/specs/compliance/aws-cis-1.2.yaml | 210 +++++ pkg/iac/specs/compliance/aws-cis-1.4.yaml | 239 +++++ pkg/iac/specs/compliance/docker-cis.yaml | 70 ++ pkg/iac/specs/compliance/k8s-cis-1.23.yaml | 818 ++++++++++++++++++ pkg/iac/specs/compliance/k8s-nsa-1.0.yaml | 175 ++++ .../specs/compliance/k8s-pss-baseline.yaml | 75 ++ .../specs/compliance/k8s-pss-restricted.yaml | 111 +++ pkg/iac/specs/loader.go | 61 ++ pkg/iac/specs/loader_test.go | 39 + test/module_test.go | 3 +- 14 files changed, 1835 insertions(+), 7 deletions(-) create mode 100644 pkg/iac/spec/spec.go create mode 100644 pkg/iac/specs/compliance/aws-cis-1.2.yaml create mode 100644 pkg/iac/specs/compliance/aws-cis-1.4.yaml create mode 100644 pkg/iac/specs/compliance/docker-cis.yaml create mode 100644 pkg/iac/specs/compliance/k8s-cis-1.23.yaml create mode 100644 pkg/iac/specs/compliance/k8s-nsa-1.0.yaml create mode 100644 pkg/iac/specs/compliance/k8s-pss-baseline.yaml create mode 100644 pkg/iac/specs/compliance/k8s-pss-restricted.yaml create mode 100644 pkg/iac/specs/loader.go create mode 100644 pkg/iac/specs/loader_test.go diff --git a/go.mod b/go.mod index 62f0c54fe9b4..c912f7b0b27b 100644 --- a/go.mod +++ b/go.mod @@ -27,7 +27,7 @@ require ( github.com/aquasecurity/trivy-db v0.0.0-20231005141211-4fc651f7ac8d github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48 github.com/aquasecurity/trivy-kubernetes v0.6.3-0.20240118072219-c433b06f98e1 - github.com/aquasecurity/trivy-policies v0.8.0 + github.com/aquasecurity/trivy-policies v0.8.0 // indirect github.com/aws/aws-sdk-go-v2 v1.24.1 github.com/aws/aws-sdk-go-v2/config v1.26.3 github.com/aws/aws-sdk-go-v2/credentials v1.16.14 @@ -41,7 +41,7 @@ require ( github.com/cheggaaa/pb/v3 v3.1.4 github.com/containerd/containerd v1.7.11 github.com/csaf-poc/csaf_distribution/v3 v3.0.0 - github.com/docker/docker v24.0.7+incompatible + github.com/docker/docker v25.0.0+incompatible github.com/docker/go-connections v0.4.0 github.com/fatih/color v1.15.0 github.com/go-git/go-git/v5 v5.11.0 @@ -236,6 +236,7 @@ require ( github.com/cyphar/filepath-securejoin v0.2.4 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/distribution/reference v0.5.0 // indirect github.com/dlclark/regexp2 v1.4.0 // indirect github.com/docker/cli v24.0.6+incompatible // indirect github.com/docker/distribution v2.8.2+incompatible // indirect @@ -328,6 +329,7 @@ require ( github.com/moby/sys/mountinfo v0.6.2 // indirect github.com/moby/sys/sequential v0.5.0 // indirect github.com/moby/sys/signal v0.7.0 // indirect + github.com/moby/sys/user v0.1.0 // indirect github.com/moby/term v0.5.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect diff --git a/go.sum b/go.sum index 570e1d31a49d..de336a67e987 100644 --- a/go.sum +++ b/go.sum @@ -709,6 +709,8 @@ github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48 h1:fRzb/w+pyskVMQ+ github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= github.com/distribution/distribution/v3 v3.0.0-20221208165359-362910506bc2 h1:aBfCb7iqHmDEIp6fBvC/hQUddQfg+3qdYjwzaiP9Hnc= github.com/distribution/distribution/v3 v3.0.0-20221208165359-362910506bc2/go.mod h1:WHNsWjnIn2V1LYOrME7e8KxSeKunYHsxEm4am0BUtcI= +github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0= +github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= @@ -724,8 +726,8 @@ github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m3 github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/docker v20.10.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v23.0.0-rc.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v24.0.7+incompatible h1:Wo6l37AuwP3JaMnZa226lzVXGA3F9Ig1seQen0cKYlM= -github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v25.0.0+incompatible h1:g9b6wZTblhMgzOT2tspESstfw6ySZ9kdm94BLDKaZac= +github.com/docker/docker v25.0.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y= github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A= github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0= @@ -1386,6 +1388,8 @@ github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWK github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI= github.com/moby/sys/signal v0.7.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg= github.com/moby/sys/symlink v0.1.0/go.mod h1:GGDODQmbFOjFsXvfLVn3+ZRxkch54RkSiGqsZeMYowQ= +github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= +github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= @@ -1783,6 +1787,8 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0 h1:cl5P5/GIfFh4t6xyruO go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0/go.mod h1:zgBdWWAu7oEEMC06MMKc5NLbA/1YDXV1sMpSqEeLQLg= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0 h1:tIqheXEFWAZ7O8A7m+J0aPTmpJN3YQ7qetUAdkkkKpk= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0/go.mod h1:nUeKExfxAQVbiVFn32YXpXZZHZ61Cc3s3Rn1pDBGAb0= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU= go.opentelemetry.io/otel/metric v1.21.0 h1:tlYWfeo+Bocx5kLEloTjbcDwBuELRrIFxwdQ36PlJu4= go.opentelemetry.io/otel/metric v1.21.0/go.mod h1:o1p3CA8nNHW8j5yuQLdc1eeqEaPfzug24uvsyIEJRWM= go.opentelemetry.io/otel/sdk v1.21.0 h1:FTt8qirL1EysG6sTQRZ5TokkU8d0ugCj8htOgThZXQ8= diff --git a/pkg/compliance/spec/compliance.go b/pkg/compliance/spec/compliance.go index 73b7dfe635c1..a88a9762f261 100644 --- a/pkg/compliance/spec/compliance.go +++ b/pkg/compliance/spec/compliance.go @@ -10,7 +10,7 @@ import ( "gopkg.in/yaml.v3" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - sp "github.com/aquasecurity/trivy-policies/pkg/spec" + sp "github.com/aquasecurity/trivy/pkg/iac/spec" "github.com/aquasecurity/trivy/pkg/types" ) diff --git a/pkg/iac/spec/spec.go b/pkg/iac/spec/spec.go new file mode 100644 index 000000000000..8e8ddea9b723 --- /dev/null +++ b/pkg/iac/spec/spec.go @@ -0,0 +1,23 @@ +package spec + +import ( + "github.com/aquasecurity/trivy/pkg/iac/specs" +) + +// Loader access compliance specs +type Loader interface { + GetSpecByName(name string) string +} + +type specLoader struct { +} + +// NewSpecLoader instansiate spec loader +func NewSpecLoader() Loader { + return &specLoader{} +} + +// GetSpecByName get spec name and return spec data +func (sl specLoader) GetSpecByName(name string) string { + return specs.GetSpec(name) +} diff --git a/pkg/iac/specs/compliance/aws-cis-1.2.yaml b/pkg/iac/specs/compliance/aws-cis-1.2.yaml new file mode 100644 index 000000000000..d0912333b81f --- /dev/null +++ b/pkg/iac/specs/compliance/aws-cis-1.2.yaml @@ -0,0 +1,210 @@ +spec: + id: aws-cis-1.2 + title: AWS CIS Foundations v1.2 + description: AWS CIS Foundations + version: "1.2" + relatedResources: + - https://www.cisecurity.org/benchmark/amazon_web_services + controls: + - id: "1.1" + name: limit-root-account-usage + description: |- + The "root" account has unrestricted access to all resources in the AWS account. It is highly + recommended that the use of this account be avoided. + checks: + - id: AVD-AWS-0140 + severity: LOW + - id: "1.10" + name: no-password-reuse + description: IAM Password policy should prevent password reuse. + checks: + - id: AVD-AWS-0056 + severity: MEDIUM + - id: "1.11" + name: set-max-password-age + description: IAM Password policy should have expiry less than or equal to 90 days. + checks: + - id: AVD-AWS-0062 + severity: MEDIUM + - id: "1.12" + name: no-root-access-keys + description: The root user has complete access to all services and resources in an AWS account. AWS Access Keys provide programmatic access to a given account. + checks: + - id: AVD-AWS-0141 + severity: CRITICAL + - id: "1.13" + name: enforce-root-mfa + description: |- + The "root" account has unrestricted access to all resources in the AWS account. It is highly + recommended that this account have MFA enabled. + checks: + - id: AVD-AWS-0142 + severity: CRITICAL + - id: "1.16" + name: no-user-attached-policies + description: IAM policies should not be granted directly to users. + checks: + - id: AVD-AWS-0143 + severity: LOW + - id: "1.2" + name: enforce-user-mfa + description: IAM Users should have MFA enforcement activated. + checks: + - id: AVD-AWS-0145 + severity: MEDIUM + - id: "1.3" + name: disable-unused-credentials + description: Credentials which are no longer used should be disabled. + checks: + - id: AVD-AWS-0144 + severity: MEDIUM + - id: "1.4" + name: rotate-access-keys + description: Access keys should be rotated at least every 90 days + checks: + - id: AVD-AWS-0146 + severity: LOW + - id: "1.5" + name: require-uppercase-in-passwords + description: IAM Password policy should have requirement for at least one uppercase character. + checks: + - id: AVD-AWS-0061 + severity: MEDIUM + - id: "1.6" + name: require-lowercase-in-passwords + description: IAM Password policy should have requirement for at least one lowercase character. + checks: + - id: AVD-AWS-0058 + severity: MEDIUM + - id: "1.7" + name: require-symbols-in-passwords + description: IAM Password policy should have requirement for at least one symbol in the password. + checks: + - id: AVD-AWS-0060 + severity: MEDIUM + - id: "1.8" + name: require-numbers-in-passwords + description: IAM Password policy should have requirement for at least one number in the password. + checks: + - id: AVD-AWS-0059 + severity: MEDIUM + - id: "1.9" + name: set-minimum-password-length + description: IAM Password policy should have minimum password length of 14 or more characters. + checks: + - id: AVD-AWS-0063 + severity: MEDIUM + - id: "2.3" + name: no-public-log-access + description: The S3 Bucket backing Cloudtrail should be private + checks: + - id: AVD-AWS-0161 + severity: CRITICAL + - id: "2.4" + name: ensure-cloudwatch-integration + description: CloudTrail logs should be stored in S3 and also sent to CloudWatch Logs + checks: + - id: AVD-AWS-0162 + severity: LOW + - id: "2.5" + name: enable-all-regions + description: Cloudtrail should be enabled in all regions regardless of where your AWS resources are generally homed + checks: + - id: AVD-AWS-0014 + severity: MEDIUM + - id: "2.6" + name: require-bucket-access-logging + description: You should enable bucket access logging on the CloudTrail S3 bucket. + checks: + - id: AVD-AWS-0163 + severity: LOW + - id: "3.1" + name: require-unauthorised-api-call-alarm + description: Ensure a log metric filter and alarm exist for unauthorized API calls + checks: + - id: AVD-AWS-0147 + severity: LOW + - id: "3.10" + name: require-sg-change-alarms + description: Ensure a log metric filter and alarm exist for security group changes + checks: + - id: AVD-AWS-0156 + severity: LOW + - id: "3.11" + name: require-nacl-changes-alarm + description: Ensure a log metric filter and alarm exist for changes to Network Access Control Lists (NACL) + checks: + - id: AVD-AWS-0157 + severity: LOW + - id: "3.12" + name: require-network-gateway-changes-alarm + description: Ensure a log metric filter and alarm exist for changes to network gateways + checks: + - id: AVD-AWS-0158 + severity: LOW + - id: "3.13" + name: require-network-gateway-changes-alarm + description: Ensure a log metric filter and alarm exist for route table changes + checks: + - id: AVD-AWS-0159 + severity: LOW + - id: "3.14" + name: require-vpc-changes-alarm + description: Ensure a log metric filter and alarm exist for VPC changes + checks: + - id: AVD-AWS-0160 + severity: LOW + - id: "3.2" + name: require-non-mfa-login-alarm + description: Ensure a log metric filter and alarm exist for AWS Management Console sign-in without MFA + checks: + - id: AVD-AWS-0148 + severity: LOW + - id: "3.3" + name: require-root-user-usage-alarm + description: Ensure a log metric filter and alarm exist for usage of root user + checks: + - id: AVD-AWS-0149 + severity: LOW + - id: "3.4" + name: require-iam-policy-change-alarm + description: Ensure a log metric filter and alarm exist for IAM policy changes + checks: + - id: AVD-AWS-0150 + severity: LOW + - id: "3.5" + name: require-cloud-trail-change-alarm + description: Ensure a log metric filter and alarm exist for CloudTrail configuration changes + checks: + - id: AVD-AWS-0151 + severity: LOW + - id: "3.6" + name: require-console-login-failures-alarm + description: Ensure a log metric filter and alarm exist for AWS Management Console authentication failures + checks: + - id: AVD-AWS-0152 + severity: LOW + - id: "3.7" + name: require-cmk-disabled-alarm + description: Ensure a log metric filter and alarm exist for disabling or scheduled deletion of customer managed keys + checks: + - id: AVD-AWS-0153 + severity: LOW + - id: "3.8" + name: require-s3-bucket-policy-change-alarm + description: Ensure a log metric filter and alarm exist for S3 bucket policy changes + checks: + - id: AVD-AWS-0154 + severity: LOW + - id: "3.9" + name: require-config-configuration-changes-alarm + description: Ensure a log metric filter and alarm exist for AWS Config configuration changes + checks: + - id: AVD-AWS-0155 + severity: LOW + - id: "4.1" + name: no-public-ingress-sgr + description: An ingress security group rule allows traffic from /0. + checks: + - id: AVD-AWS-0107 + severity: CRITICAL \ No newline at end of file diff --git a/pkg/iac/specs/compliance/aws-cis-1.4.yaml b/pkg/iac/specs/compliance/aws-cis-1.4.yaml new file mode 100644 index 000000000000..883e406594f0 --- /dev/null +++ b/pkg/iac/specs/compliance/aws-cis-1.4.yaml @@ -0,0 +1,239 @@ +spec: + id: aws-cis-1.4 + title: AWS CIS Foundations v1.4 + description: AWS CIS Foundations + version: "1.4" + relatedResources: + - https://www.cisecurity.org/benchmark/amazon_web_services + controls: + - id: 2.1.3 + name: require-mfa-delete + description: Buckets should have MFA deletion protection enabled. + checks: + - id: AVD-AWS-0170 + severity: LOW + - id: "1.12" + name: disable-unused-credentials-45-days + description: |- + AWS IAM users can access AWS resources using different types of credentials, such as + passwords or access keys. It is recommended that all credentials that have been unused in + 45 or greater days be deactivated or removed. + checks: + - id: AVD-AWS-0166 + severity: LOW + - id: "1.13" + name: limit-user-access-keys + description: No user should have more than one active access key. + checks: + - id: AVD-AWS-0167 + severity: LOW + - id: "1.14" + name: rotate-access-keys + description: Access keys should be rotated at least every 90 days + checks: + - id: AVD-AWS-0146 + severity: LOW + - id: "1.15" + name: no-user-attached-policies + description: IAM policies should not be granted directly to users. + checks: + - id: AVD-AWS-0143 + severity: LOW + - id: "1.16" + name: no-policy-wildcards + description: IAM policy should avoid use of wildcards and instead apply the principle of least privilege + checks: + - id: AVD-AWS-0057 + severity: HIGH + - id: "1.17" + name: require-support-role + description: Missing IAM Role to allow authorized users to manage incidents with AWS Support. + checks: + - id: AVD-AWS-0169 + severity: LOW + - id: "1.19" + name: remove-expired-certificates + description: Delete expired TLS certificates + checks: + - id: AVD-AWS-0168 + severity: LOW + - id: "1.20" + name: enable-access-analyzer + description: Enable IAM Access analyzer for IAM policies about all resources in each region. + checks: + - id: AVD-AWS-0175 + severity: LOW + - id: "1.4" + name: enforce-user-mfa + description: IAM Users should have MFA enforcement activated. + checks: + - id: AVD-AWS-0145 + severity: MEDIUM + - id: "1.4" + name: no-root-access-keys + description: The root user has complete access to all services and resources in an AWS account. AWS Access Keys provide programmatic access to a given account. + checks: + - id: AVD-AWS-0141 + severity: CRITICAL + - id: "1.5" + name: enforce-root-mfa + description: |- + The "root" account has unrestricted access to all resources in the AWS account. It is highly + recommended that this account have MFA enabled. + checks: + - id: AVD-AWS-0142 + severity: CRITICAL + - id: "1.6" + name: enforce-root-hardware-mfa + description: |- + The "root" account has unrestricted access to all resources in the AWS account. It is highly + recommended that this account have hardware MFA enabled. + checks: + - id: AVD-AWS-0165 + severity: MEDIUM + - id: "1.7" + name: limit-root-account-usage + description: |- + The "root" account has unrestricted access to all resources in the AWS account. It is highly + recommended that the use of this account be avoided. + checks: + - id: AVD-AWS-0140 + severity: LOW + - id: "1.8" + name: set-minimum-password-length + description: IAM Password policy should have minimum password length of 14 or more characters. + checks: + - id: AVD-AWS-0063 + severity: MEDIUM + - id: "1.9" + name: no-password-reuse + description: IAM Password policy should prevent password reuse. + checks: + - id: AVD-AWS-0056 + severity: MEDIUM + - id: "3.10" + name: enable-object-write-logging + description: S3 object-level API operations such as GetObject, DeleteObject, and PutObject are called data events. By default, CloudTrail trails don't log data events and so it is recommended to enable Object-level logging for S3 buckets. + checks: + - id: AVD-AWS-0171 + severity: LOW + - id: "3.11" + name: enable-object-read-logging + description: S3 object-level API operations such as GetObject, DeleteObject, and PutObject are called data events. By default, CloudTrail trails don't log data events and so it is recommended to enable Object-level logging for S3 buckets. + checks: + - id: AVD-AWS-0172 + severity: LOW + - id: "3.3" + name: no-public-log-access + description: The S3 Bucket backing Cloudtrail should be private + checks: + - id: AVD-AWS-0161 + severity: CRITICAL + - id: "3.4" + name: ensure-cloudwatch-integration + description: CloudTrail logs should be stored in S3 and also sent to CloudWatch Logs + checks: + - id: AVD-AWS-0162 + severity: LOW + - id: "3.6" + name: require-bucket-access-logging + description: You should enable bucket access logging on the CloudTrail S3 bucket. + checks: + - id: AVD-AWS-0163 + severity: LOW + - id: "4.10" + name: require-sg-change-alarms + description: Ensure a log metric filter and alarm exist for security group changes + checks: + - id: AVD-AWS-0156 + severity: LOW + - id: "4.1" + name: require-unauthorised-api-call-alarm + description: Ensure a log metric filter and alarm exist for unauthorized API calls + checks: + - id: AVD-AWS-0147 + severity: LOW + - id: "4.11" + name: require-nacl-changes-alarm + description: Ensure a log metric filter and alarm exist for changes to Network Access Control Lists (NACL) + checks: + - id: AVD-AWS-0157 + severity: LOW + - id: "4.12" + name: require-network-gateway-changes-alarm + description: Ensure a log metric filter and alarm exist for changes to network gateways + checks: + - id: AVD-AWS-0158 + severity: LOW + - id: "4.13" + name: require-network-gateway-changes-alarm + description: Ensure a log metric filter and alarm exist for route table changes + checks: + - id: AVD-AWS-0159 + severity: LOW + - id: "4.14" + name: require-vpc-changes-alarm + description: Ensure a log metric filter and alarm exist for VPC changes + checks: + - id: AVD-AWS-0160 + severity: LOW + - id: "4.15" + name: require-org-changes-alarm + description: Ensure a log metric filter and alarm exist for organisation changes + checks: + - id: AVD-AWS-0174 + severity: LOW + - id: "4.2" + name: require-non-mfa-login-alarm + description: Ensure a log metric filter and alarm exist for AWS Management Console sign-in without MFA + checks: + - id: AVD-AWS-0148 + severity: LOW + - id: "4.3" + name: require-root-user-usage-alarm + description: Ensure a log metric filter and alarm exist for usage of root user + checks: + - id: AVD-AWS-0149 + severity: LOW + - id: "4.4" + name: require-iam-policy-change-alarm + description: Ensure a log metric filter and alarm exist for IAM policy changes + checks: + - id: AVD-AWS-0150 + severity: LOW + - id: "4.5" + name: require-cloud-trail-change-alarm + description: Ensure a log metric filter and alarm exist for CloudTrail configuration changes + checks: + - id: AVD-AWS-0151 + severity: LOW + - id: "4.6" + name: require-console-login-failures-alarm + description: Ensure a log metric filter and alarm exist for AWS Management Console authentication failures + checks: + - id: AVD-AWS-0152 + severity: LOW + - id: "4.7" + name: require-cmk-disabled-alarm + description: Ensure a log metric filter and alarm exist for disabling or scheduled deletion of customer managed keys + checks: + - id: AVD-AWS-0153 + severity: LOW + - id: "4.8" + name: require-s3-bucket-policy-change-alarm + description: Ensure a log metric filter and alarm exist for S3 bucket policy changes + checks: + - id: AVD-AWS-0154 + severity: LOW + - id: "4.9" + name: require-config-configuration-changes-alarm + description: Ensure a log metric filter and alarm exist for AWS Config configuration changes + checks: + - id: AVD-AWS-0155 + severity: LOW + - id: "5.3" + name: restrict-all-in-default-sg + description: Default security group should restrict all traffic + checks: + - id: AVD-AWS-0173 + severity: LOW \ No newline at end of file diff --git a/pkg/iac/specs/compliance/docker-cis.yaml b/pkg/iac/specs/compliance/docker-cis.yaml new file mode 100644 index 000000000000..2027168f15f1 --- /dev/null +++ b/pkg/iac/specs/compliance/docker-cis.yaml @@ -0,0 +1,70 @@ +--- +spec: + id: docker-cis + title: CIS Docker Community Edition Benchmark v1.1.0 + description: CIS Docker Community Edition Benchmark + relatedResources : + - https://www.cisecurity.org/benchmark/docker + version: "1.1.0" + controls: + - id: '4.1' + name: Ensure a user for the container has been created + description: 'Create a non-root user for the container in the Dockerfile for the container image.' + checks: + - id: AVD-DS-0002 + severity: 'HIGH' + - id: '4.2' + name: Ensure that containers use trusted base images (Manual) + description: 'Ensure that the container image is written either from scratch or is based on another established and trusted base image downloaded over a secure channel.' + checks: + severity: 'HIGH' + - id: '4.3' + name: Ensure unnecessary packages are not installed in the container (Manual) + description: 'Containers tend to be minimal and slim down versions of the Operating System. Do not install anything that does not justify the purpose of container.' + checks: + severity: 'HIGH' + - id: '4.4' + name: Ensure images are scanned and rebuilt to include security patches + description: 'Images should be scanned "frequently" for any vulnerabilities. Rebuild the images to include patches and then instantiate new containers from it.' + checks: + - id: VULN-CRITICAL # special ID for filtering vulnerabilities + severity: 'CRITICAL' + - id: '4.5' + name: Ensure Content trust for Docker is Enabled (Manual) + description: 'Content trust is disabled by default. You should enable it.' + checks: + severity: 'LOW' + - id: '4.6' + name: Ensure HEALTHCHECK instructions have been added to the container image + description: 'Add HEALTHCHECK instruction in your docker container images to perform the health check on running containers.' + checks: + - id: AVD-DS-0026 + severity: 'LOW' + - id: '4.7' + name: Ensure update instructions are not use alone in the Dockerfile + description: 'Do not use update instructions such as apt-get update alone or in a single line in the Dockerfile.' + checks: + - id: AVD-DS-0017 + severity: 'HIGH' + - id: '4.8' + name: Ensure setuid and setgid permissions are removed in the images (Manual) + description: 'Removing setuid and setgid permissions in the images would prevent privilege escalation attacks in the containers.' + checks: + severity: 'HIGH' + - id: '4.9' + name: Ensure COPY is used instead of ADD in Dockerfile + description: 'Use COPY instruction instead of ADD instruction in the Dockerfile.' + checks: + - id: AVD-DS-0005 + severity: 'LOW' + - id: '4.10' + name: Ensure secrets are not stored in Dockerfiles + description: 'Do not store any secrets in Dockerfiles.' + checks: + - id: SECRET-CRITICAL # special ID for filtering secrets + severity: 'CRITICAL' + - id: '4.11' + name: Ensure verified packages are only Installed (Manual) + description: 'Verify authenticity of the packages before installing them in the image.' + checks: # TODO + severity: 'MEDIUM' diff --git a/pkg/iac/specs/compliance/k8s-cis-1.23.yaml b/pkg/iac/specs/compliance/k8s-cis-1.23.yaml new file mode 100644 index 000000000000..6d5527f05f19 --- /dev/null +++ b/pkg/iac/specs/compliance/k8s-cis-1.23.yaml @@ -0,0 +1,818 @@ +--- +spec: + id: k8s-cis + title: CIS Kubernetes Benchmarks v1.23 + description: CIS Kubernetes Benchmarks + version: "1.23" + relatedResources: + - https://www.cisecurity.org/benchmark/kubernetes + controls: + - id: 1.1.1 + name: Ensure that the API server pod specification file permissions are set to + 600 or more restrictive + description: Ensure that the API server pod specification file has permissions + of 600 or more restrictive + checks: + - id: AVD-KCV-0048 + severity: HIGH + - id: 1.1.2 + name: Ensure that the API server pod specification file ownership is set to + root:root + description: Ensure that the API server pod specification file ownership is set + to root:root + checks: + - id: AVD-KCV-0049 + severity: HIGH + - id: 1.1.3 + name: Ensure that the controller manager pod specification file permissions are + set to 600 or more restrictive + description: Ensure that the controller manager pod specification file has + permissions of 600 or more restrictive + checks: + - id: AVD-KCV-0050 + severity: HIGH + - id: 1.1.4 + name: Ensure that the controller manager pod specification file ownership is set + to root:root + description: Ensure that the controller manager pod specification file ownership + is set to root:root + checks: + - id: AVD-KCV-0051 + severity: HIGH + - id: 1.1.5 + name: Ensure that the scheduler pod specification file permissions are set to + 600 or more restrictive + description: Ensure that the scheduler pod specification file has permissions of + 600 or more restrictive + checks: + - id: AVD-KCV-0052 + severity: HIGH + - id: 1.1.6 + name: Ensure that the scheduler pod specification file ownership is set to + root:root + description: Ensure that the scheduler pod specification file ownership is set + to root:root + checks: + - id: AVD-KCV-0053 + severity: HIGH + - id: 1.1.7 + name: Ensure that the etcd pod specification file permissions are set to 600 or + more restrictive + description: Ensure that the etcd pod specification file has permissions of 600 + or more restrictive + checks: + - id: AVD-KCV-0054 + severity: HIGH + - id: 1.1.8 + name: Ensure that the etcd pod specification file ownership is set to root:root + description: Ensure that the etcd pod specification file ownership is set to + root:root. + checks: + - id: AVD-KCV-0055 + severity: HIGH + - id: 1.1.9 + name: Ensure that the Container Network Interface file permissions are set to + 600 or more restrictive + description: Ensure that the Container Network Interface files have permissions + of 600 or more restrictive + checks: + - id: AVD-KCV-0056 + severity: HIGH + - id: 1.1.10 + name: Ensure that the Container Network Interface file ownership is set to + root:root + description: Ensure that the Container Network Interface files have ownership + set to root:root + checks: + - id: AVD-KCV-0057 + severity: HIGH + - id: 1.1.11 + name: Ensure that the etcd data directory permissions are set to 700 or more + restrictive + description: Ensure that the etcd data directory has permissions of 700 or more + restrictive + checks: + - id: AVD-KCV-0058 + severity: HIGH + - id: 1.1.12 + name: Ensure that the etcd data directory ownership is set to etcd:etcd + description: Ensure that the etcd data directory ownership is set to etcd:etcd + checks: + - id: AVD-KCV-0059 + severity: LOW + - id: 1.1.13 + name: Ensure that the admin.conf file permissions are set to 600 + description: Ensure that the admin.conf file has permissions of 600 + checks: + - id: AVD-KCV-0060 + severity: CRITICAL + - id: 1.1.14 + name: Ensure that the admin.conf file ownership is set to root:root + description: Ensure that the admin.conf file ownership is set to root:root + checks: + - id: AVD-KCV-0061 + severity: CRITICAL + - id: 1.1.15 + name: Ensure that the scheduler.conf file permissions are set to 600 or more + restrictive + description: Ensure that the scheduler.conf file has permissions of 600 or more + restrictive + checks: + - id: AVD-KCV-0062 + severity: HIGH + - id: 1.1.16 + name: Ensure that the scheduler.conf file ownership is set to root:root + description: Ensure that the scheduler.conf file ownership is set to root:root + checks: + - id: AVD-KCV-0063 + severity: HIGH + - id: 1.1.17 + name: Ensure that the controller-manager.conf file permissions are set to 600 or + more restrictive + description: Ensure that the controller-manager.conf file has permissions of 600 + or more restrictive + checks: + - id: AVD-KCV-0064 + severity: HIGH + - id: 1.1.18 + name: Ensure that the controller-manager.conf file ownership is set to root:root + description: Ensure that the controller-manager.conf file ownership is set to + root:root. + checks: + - id: AVD-KCV-0065 + severity: HIGH + - id: 1.1.19 + name: Ensure that the Kubernetes PKI directory and file ownership is set to + root:root + description: Ensure that the Kubernetes PKI directory and file ownership is set + to root:root + checks: + - id: AVD-KCV-0066 + severity: CRITICAL + - id: 1.1.20 + name: Ensure that the Kubernetes PKI certificate file permissions are set to 600 + or more restrictive + description: Ensure that Kubernetes PKI certificate files have permissions of + 600 or more restrictive + checks: + - id: AVD-KCV-0068 + severity: CRITICAL + - id: 1.1.21 + name: Ensure that the Kubernetes PKI key file permissions are set to 600 + description: Ensure that Kubernetes PKI key files have permissions of 600 + checks: + - id: AVD-KCV-0067 + severity: CRITICAL + - id: 1.2.1 + name: Ensure that the --anonymous-auth argument is set to false + description: Disable anonymous requests to the API server + checks: + - id: AVD-KCV-0001 + severity: MEDIUM + - id: 1.2.2 + name: Ensure that the --token-auth-file parameter is not set + description: Do not use token based authentication + checks: + - id: AVD-KCV-0002 + severity: LOW + - id: 1.2.3 + name: Ensure that the --DenyServiceExternalIPs is not set + description: This admission controller rejects all net-new usage of the Service + field externalIPs + checks: + - id: AVD-KCV-0003 + severity: LOW + - id: 1.2.4 + name: Ensure that the --kubelet-https argument is set to true + description: Use https for kubelet connections + checks: + - id: AVD-KCV-0004 + severity: LOW + - id: 1.2.5 + name: Ensure that the --kubelet-client-certificate and --kubelet-client-key + arguments are set as appropriate + description: Enable certificate based kubelet authentication + checks: + - id: AVD-KCV-0005 + severity: HIGH + - id: 1.2.6 + name: Ensure that the --kubelet-certificate-authority argument is set as + appropriate + description: Verify kubelets certificate before establishing connection + checks: + - id: AVD-KCV-0006 + severity: HIGH + - id: 1.2.7 + name: Ensure that the --authorization-mode argument is not set to AlwaysAllow + description: Do not always authorize all requests + checks: + - id: AVD-KCV-0007 + severity: LOW + - id: 1.2.8 + name: Ensure that the --authorization-mode argument includes Node + description: Restrict kubelet nodes to reading only objects associated with them + checks: + - id: AVD-KCV-0008 + severity: HIGH + - id: 1.2.9 + name: Ensure that the --authorization-mode argument includes RBAC + description: Turn on Role Based Access Control + checks: + - id: AVD-KCV-0009 + severity: HIGH + - id: 1.2.10 + name: Ensure that the admission control plugin EventRateLimit is set + description: Limit the rate at which the API server accepts requests + checks: + - id: AVD-KCV-0010 + severity: HIGH + - id: 1.2.11 + name: Ensure that the admission control plugin AlwaysAdmit is not set + description: Do not allow all requests + checks: + - id: AVD-KCV-0011 + severity: LOW + - id: 1.2.12 + name: Ensure that the admission control plugin AlwaysPullImages is set + description: Always pull images + checks: + - id: AVD-KCV-0012 + severity: MEDIUM + - id: 1.2.13 + name: Ensure that the admission control plugin SecurityContextDeny is set if + PodSecurityPolicy is not used + description: The SecurityContextDeny admission controller can be used to deny + pods which make use of some SecurityContext fields which could allow for + privilege escalation in the cluster. This should be used where + PodSecurityPolicy is not in place within the cluster + checks: + - id: AVD-KCV-0013 + severity: MEDIUM + - id: 1.2.14 + name: Ensure that the admission control plugin ServiceAccount is set + description: Automate service accounts management + checks: + - id: AVD-KCV-0014 + severity: LOW + - id: 1.2.15 + name: Ensure that the admission control plugin NamespaceLifecycle is set + description: Reject creating objects in a namespace that is undergoing termination + checks: + - id: AVD-KCV-0015 + severity: LOW + - id: 1.2.16 + name: Ensure that the admission control plugin NodeRestriction is set + description: Limit the Node and Pod objects that a kubelet could modify + checks: + - id: AVD-KCV-0016 + severity: LOW + - id: 1.2.17 + name: Ensure that the --secure-port argument is not set to 0 + description: Do not disable the secure port + checks: + - id: AVD-KCV-0017 + severity: HIGH + - id: 1.2.18 + name: Ensure that the --profiling argument is set to false + description: Disable profiling, if not needed + checks: + - id: AVD-KCV-0018 + severity: LOW + - id: 1.2.19 + name: Ensure that the --audit-log-path argument is set + description: Enable auditing on the Kubernetes API Server and set the desired + audit log path. + checks: + - id: AVD-KCV-0019 + severity: LOW + - id: 1.2.20 + name: Ensure that the --audit-log-maxage argument is set to 30 or as appropriate + description: Retain the logs for at least 30 days or as appropriate + checks: + - id: AVD-KCV-0020 + severity: LOW + - id: 1.2.21 + name: Ensure that the --audit-log-maxbackup argument is set to 10 or as + appropriate + description: Retain 10 or an appropriate number of old log file + checks: + - id: AVD-KCV-0021 + severity: LOW + - id: 1.2.22 + name: Ensure that the --audit-log-maxsize argument is set to 100 or as + appropriate + description: Rotate log files on reaching 100 MB or as appropriate + checks: + - id: AVD-KCV-0022 + severity: LOW + - id: 1.2.24 + name: Ensure that the --service-account-lookup argument is set to true + description: Validate service account before validating token + checks: + - id: AVD-KCV-0024 + severity: LOW + - id: 1.2.25 + name: Ensure that the --service-account-key-file argument is set as appropriate + description: Explicitly set a service account public key file for service + accounts on the apiserver + checks: + - id: AVD-KCV-0025 + severity: LOW + - id: 1.2.26 + name: Ensure that the --etcd-certfile and --etcd-keyfile arguments are set as + appropriate + description: etcd should be configured to make use of TLS encryption for client + connections + checks: + - id: AVD-KCV-0026 + severity: LOW + - id: 1.2.27 + name: Ensure that the --tls-cert-file and --tls-private-key-file arguments are + set as appropriate + description: Setup TLS connection on the API server + checks: + - id: AVD-KCV-0027 + severity: MEDIUM + - id: 1.2.28 + name: Ensure that the --client-ca-file argument is set appropriate + description: Setup TLS connection on the API server + checks: + - id: AVD-KCV-0028 + severity: LOW + - id: 1.2.29 + name: Ensure that the --etcd-cafile argument is set as appropriate + description: etcd should be configured to make use of TLS encryption for client + connections. + checks: + - id: AVD-KCV-0029 + severity: LOW + - id: 1.2.30 + name: Ensure that the --encryption-provider-config argument is set as + appropriate + description: Encrypt etcd key-value store + checks: + - id: AVD-KCV-0030 + severity: LOW + - id: 1.3.1 + name: Ensure that the --terminated-pod-gc-threshold argument is set as + appropriate + description: Activate garbage collector on pod termination, as appropriate + checks: + - id: AVD-KCV-0033 + severity: MEDIUM + - id: 1.3.3 + name: Ensure that the --use-service-account-credentials argument is set to true + description: Use individual service account credentials for each controller + checks: + - id: AVD-KCV-0035 + severity: MEDIUM + - id: 1.3.4 + name: Ensure that the --service-account-private-key-file argument is set as + appropriate + description: Explicitly set a service account private key file for service + accounts on the controller manager + checks: + - id: AVD-KCV-0036 + severity: MEDIUM + - id: 1.3.5 + name: Ensure that the --root-ca-file argument is set as appropriate + description: Allow pods to verify the API servers serving certificate before + establishing connections + checks: + - id: AVD-KCV-0037 + severity: MEDIUM + - id: 1.3.6 + name: Ensure that the RotateKubeletServerCertificate argument is set to true + description: Enable kubelet server certificate rotation on controller-manager + checks: + - id: AVD-KCV-0038 + severity: MEDIUM + - id: 1.3.7 + name: Ensure that the --bind-address argument is set to 127.0.0.1 + description: Do not bind the scheduler service to non-loopback insecure addresses + checks: + - id: AVD-KCV-0039 + severity: LOW + - id: 1.4.1 + name: Ensure that the --profiling argument is set to false + description: Disable profiling, if not needed + checks: + - id: AVD-KCV-0034 + severity: MEDIUM + - id: 1.4.2 + name: Ensure that the --bind-address argument is set to 127.0.0.1 + description: Do not bind the scheduler service to non-loopback insecure addresses + checks: + - id: AVD-KCV-0041 + severity: CRITICAL + - id: 2.1 + name: Ensure that the --cert-file and --key-file arguments are set as + appropriate + description: Configure TLS encryption for the etcd service + checks: + - id: AVD-KCV-0042 + severity: MEDIUM + - id: 2.2 + name: Ensure that the --client-cert-auth argument is set to true + description: Enable client authentication on etcd service + checks: + - id: AVD-KCV-0043 + severity: CRITICAL + - id: 2.3 + name: Ensure that the --auto-tls argument is not set to true + description: Do not use self-signed certificates for TLS + checks: + - id: AVD-KCV-0044 + severity: CRITICAL + - id: 2.4 + name: Ensure that the --peer-cert-file and --peer-key-file arguments are set as + appropriate + description: etcd should be configured to make use of TLS encryption for peer + connections. + checks: + - id: AVD-KCV-0045 + severity: CRITICAL + - id: 2.5 + name: Ensure that the --peer-client-cert-auth argument is set to true + description: etcd should be configured for peer authentication + checks: + - id: AVD-KCV-0046 + severity: CRITICAL + - id: 2.6 + name: Ensure that the --peer-auto-tls argument is not set to true + description: Do not use self-signed certificates for TLS + checks: + - id: AVD-KCV-0047 + severity: HIGH + - id: 3.1.1 + name: Client certificate authentication should not be used for users (Manual) + description: Kubernetes provides the option to use client certificates for user + authentication. However as there is no way to revoke these certificates + when a user leaves an organization or loses their credential, they are + not suitable for this purpose + checks: null + severity: HIGH + - id: 3.2.1 + name: Ensure that a minimal audit policy is created (Manual) + description: Kubernetes can audit the details of requests made to the API + server. The --audit- policy-file flag must be set for this logging to be + enabled. + checks: null + severity: HIGH + - id: 3.2.2 + name: Ensure that the audit policy covers key security concerns (Manual) + description: Ensure that the audit policy created for the cluster covers key + security concerns + checks: null + severity: HIGH + - id: 4.1.1 + name: Ensure that the kubelet service file permissions are set to 600 or more + restrictive + description: Ensure that the kubelet service file has permissions of 600 or more + restrictive. + checks: + - id: AVD-KCV-0069 + severity: HIGH + - id: 4.1.2 + name: Ensure that the kubelet service file ownership is set to root:root + description: Ensure that the kubelet service file ownership is set to root:root + checks: + - id: AVD-KCV-0070 + severity: HIGH + - id: 4.1.3 + name: If proxy kubeconfig file exists ensure permissions are set to 600 or more + restrictive + description: If kube-proxy is running, and if it is using a file-based + kubeconfig file, ensure that the proxy kubeconfig file has permissions + of 600 or more restrictive + checks: + - id: AVD-KCV-0071 + severity: HIGH + - id: 4.1.4 + name: If proxy kubeconfig file exists ensure ownership is set to root:root + description: If kube-proxy is running, ensure that the file ownership of its + kubeconfig file is set to root:root + checks: + - id: AVD-KCV-0072 + severity: HIGH + - id: 4.1.5 + name: Ensure that the --kubeconfig kubelet.conf file permissions are set to 600 + or more restrictive + description: Ensure that the kubelet.conf file has permissions of 600 or more + restrictive + checks: + - id: AVD-KCV-0073 + severity: HIGH + - id: 4.1.6 + name: Ensure that the --kubeconfig kubelet.conf file ownership is set to + root:root + description: Ensure that the kubelet.conf file ownership is set to root:root + checks: + - id: AVD-KCV-0074 + severity: HIGH + - id: 4.1.7 + name: Ensure that the certificate authorities file permissions are set to 600 or + more restrictive + description: Ensure that the certificate authorities file has permissions of 600 + or more restrictive + checks: + - id: AVD-KCV-0075 + severity: CRITICAL + - id: 4.1.8 + name: Ensure that the client certificate authorities file ownership is set to + root:root + description: Ensure that the certificate authorities file ownership is set to + root:root + checks: + - id: AVD-KCV-0076 + severity: CRITICAL + - id: 4.1.9 + name: If the kubelet config.yaml configuration file is being used validate + permissions set to 600 or more restrictive + description: Ensure that if the kubelet refers to a configuration file with the + --config argument, that file has permissions of 600 or more restrictive + checks: + - id: AVD-KCV-0077 + severity: HIGH + - id: 4.1.10 + name: If the kubelet config.yaml configuration file is being used validate file + ownership is set to root:root + description: Ensure that if the kubelet refers to a configuration file with the + --config argument, that file is owned by root:root + checks: + - id: AVD-KCV-0078 + severity: HIGH + - id: 4.2.1 + name: Ensure that the --anonymous-auth argument is set to false + description: Disable anonymous requests to the Kubelet server + checks: + - id: AVD-KCV-0079 + severity: CRITICAL + - id: 4.2.2 + name: Ensure that the --authorization-mode argument is not set to AlwaysAllow + description: Do not allow all requests. Enable explicit authorization + checks: + - id: AVD-KCV-0080 + severity: CRITICAL + - id: 4.2.3 + name: Ensure that the --client-ca-file argument is set as appropriate + description: Enable Kubelet authentication using certificates + checks: + - id: AVD-KCV-0081 + severity: CRITICAL + - id: 4.2.4 + name: Verify that the --read-only-port argument is set to 0 + description: Disable the read-only port + checks: + - id: AVD-KCV-0082 + severity: HIGH + - id: 4.2.5 + name: Ensure that the --streaming-connection-idle-timeout argument is not set to + 0 + description: Do not disable timeouts on streaming connections + checks: + - id: AVD-KCV-0085 + severity: HIGH + - id: 4.2.6 + name: Ensure that the --protect-kernel-defaults argument is set to true + description: Protect tuned kernel parameters from overriding kubelet default + kernel parameter values + checks: + - id: AVD-KCV-0083 + severity: HIGH + - id: 4.2.7 + name: Ensure that the --make-iptables-util-chains argument is set to true + description: Allow Kubelet to manage iptables + checks: + - id: AVD-KCV-0084 + severity: HIGH + - id: 4.2.8 + name: Ensure that the --hostname-override argument is not set + description: Do not override node hostnames + checks: + - id: AVD-KCV-0086 + severity: HIGH + - id: 4.2.9 + name: Ensure that the --event-qps argument is set to 0 or a level which ensures + appropriate event capture + description: Security relevant information should be captured. The --event-qps + flag on the Kubelet can be used to limit the rate at which events are + gathered + checks: + - id: AVD-KCV-0087 + severity: HIGH + - id: 4.2.10 + name: Ensure that the --tls-cert-file and --tls-private-key-file arguments are + set as appropriate + description: Setup TLS connection on the Kubelets + checks: + - id: AVD-KCV-0088 + - id: AVD-KCV-0089 + severity: CRITICAL + - id: 4.2.11 + name: Ensure that the --rotate-certificates argument is not set to false + description: Enable kubelet client certificate rotation + checks: + - id: AVD-KCV-0090 + severity: CRITICAL + - id: 4.2.12 + name: Verify that the RotateKubeletServerCertificate argument is set to true + description: Enable kubelet server certificate rotation + checks: + - id: AVD-KCV-0091 + severity: CRITICAL + - id: 4.2.13 + name: Ensure that the Kubelet only makes use of Strong Cryptographic Ciphers + description: Ensure that the Kubelet is configured to only use strong + cryptographic ciphers + checks: + - id: AVD-KCV-0092 + severity: CRITICAL + - id: 5.1.1 + name: Ensure that the cluster-admin role is only used where required + description: The RBAC role cluster-admin provides wide-ranging powers over the + environment and should be used only where and when needed + checks: + - id: AVD-KSV-0111 + severity: HIGH + - id: 5.1.2 + name: Minimize access to secrets + description: The Kubernetes API stores secrets, which may be service account + tokens for the Kubernetes API or credentials used by workloads in the + cluster + checks: + - id: AVD-KSV-0041 + severity: HIGH + - id: 5.1.3 + name: Minimize wildcard use in Roles and ClusterRoles + description: Kubernetes Roles and ClusterRoles provide access to resources based + on sets of objects and actions that can be taken on those objects. It is + possible to set either of these to be the wildcard "*" which matches all + items + checks: + - id: AVD-KSV-0044 + - id: AVD-KSV-0045 + - id: AVD-KSV-0046 + severity: HIGH + - id: 5.1.6 + name: Ensure that Service Account Tokens are only mounted where necessary + description: Service accounts tokens should not be mounted in pods except where + the workload running in the pod explicitly needs to communicate with the + API server + checks: + - id: AVD-KSV-0036 + severity: HIGH + - id: 5.1.8 + name: Limit use of the Bind, Impersonate and Escalate permissions in the + Kubernetes cluster + description: Cluster roles and roles with the impersonate, bind or escalate + permissions should not be granted unless strictly required + checks: + - id: AVD-KSV-0043 + severity: HIGH + - id: 5.2.2 + name: Minimize the admission of privileged containers + description: Do not generally permit containers to be run with the + securityContext.privileged flag set to true + checks: + - id: AVD-KSV-0017 + severity: HIGH + - id: 5.2.3 + name: Minimize the admission of containers wishing to share the host process ID + namespace + description: Do not generally permit containers to be run with the hostPID flag + set to true. + checks: + - id: AVD-KSV-0010 + severity: HIGH + - id: 5.2.4 + name: Minimize the admission of containers wishing to share the host IPC + namespace + description: Do not generally permit containers to be run with the hostIPC flag + set to true + checks: + - id: AVD-KSV-0008 + severity: HIGH + - id: 5.2.5 + name: Minimize the admission of containers wishing to share the host network + namespace + description: Do not generally permit containers to be run with the hostNetwork + flag set to true + checks: + - id: AVD-KSV-0009 + severity: HIGH + - id: 5.2.6 + name: Minimize the admission of containers with allowPrivilegeEscalation + description: Do not generally permit containers to be run with the + allowPrivilegeEscalation flag set to true + checks: + - id: AVD-KSV-0001 + severity: HIGH + - id: 5.2.7 + name: Minimize the admission of root containers + description: Do not generally permit containers to be run as the root user + checks: + - id: AVD-KSV-0012 + severity: MEDIUM + - id: 5.2.8 + name: Minimize the admission of containers with the NET_RAW capability + description: Do not generally permit containers with the potentially dangerous + NET_RAW capability + checks: + - id: AVD-KSV-0022 + severity: MEDIUM + - id: 5.2.9 + name: Minimize the admission of containers with added capabilities + description: Do not generally permit containers with capabilities assigned + beyond the default set + checks: + - id: AVD-KSV-0004 + severity: LOW + - id: 5.2.10 + name: Minimize the admission of containers with capabilities assigned + description: Do not generally permit containers with capabilities + checks: + - id: AVD-KSV-0003 + severity: LOW + - id: 5.2.11 + name: Minimize the admission of containers with capabilities assigned + description: Do not generally permit containers with capabilities + checks: + - id: AVD-KSV-0103 + severity: MEDIUM + - id: 5.2.12 + name: Minimize the admission of HostPath volumes + description: Do not generally admit containers which make use of hostPath volumes + checks: + - id: AVD-KSV-0023 + severity: MEDIUM + - id: 5.2.13 + name: Minimize the admission of containers which use HostPorts + description: Do not generally permit containers which require the use of HostPorts + checks: + - id: AVD-KSV-0024 + severity: MEDIUM + - id: 5.3.1 + name: Ensure that the CNI in use supports Network Policies (Manual) + description: There are a variety of CNI plugins available for Kubernetes. If the + CNI in use does not support Network Policies it may not be possible to + effectively restrict traffic in the cluster + checks: null + severity: MEDIUM + - id: 5.3.2 + name: Ensure that all Namespaces have Network Policies defined + description: Use network policies to isolate traffic in your cluster network + checks: + - id: AVD-KSV-0038 + severity: MEDIUM + - id: 5.4.1 + name: Prefer using secrets as files over secrets as environment variables + (Manual) + description: Kubernetes supports mounting secrets as data volumes or as + environment variables. Minimize the use of environment variable secrets + checks: null + severity: MEDIUM + - id: 5.4.2 + name: Consider external secret storage (Manual) + description: Consider the use of an external secrets storage and management + system, instead of using Kubernetes Secrets directly, if you have more + complex secret management needs + checks: null + severity: MEDIUM + - id: 5.5.1 + name: Configure Image Provenance using ImagePolicyWebhook admission controller + (Manual) + description: Configure Image Provenance for your deployment + checks: null + severity: MEDIUM + - id: 5.7.1 + name: Create administrative boundaries between resources using namespaces + (Manual) + description: Use namespaces to isolate your Kubernetes objects + checks: null + severity: MEDIUM + - id: 5.7.2 + name: Ensure that the seccomp profile is set to docker/default in your pod + definitions + description: Enable docker/default seccomp profile in your pod definitions + checks: + - id: AVD-KSV-0104 + severity: MEDIUM + - id: 5.7.3 + name: Apply Security Context to Your Pods and Containers + description: Apply Security Context to Your Pods and Containers + checks: + - id: AVD-KSV-0021 + - id: AVD-KSV-0020 + - id: AVD-KSV-0005 + - id: AVD-KSV-0025 + - id: AVD-KSV-0104 + - id: AVD-KSV-0030 + severity: HIGH + - id: 5.7.4 + name: The default namespace should not be used + description: Kubernetes provides a default namespace, where objects are placed + if no namespace is specified for them + checks: + - id: AVD-KSV-0110 + severity: MEDIUM diff --git a/pkg/iac/specs/compliance/k8s-nsa-1.0.yaml b/pkg/iac/specs/compliance/k8s-nsa-1.0.yaml new file mode 100644 index 000000000000..3f3efd84c24d --- /dev/null +++ b/pkg/iac/specs/compliance/k8s-nsa-1.0.yaml @@ -0,0 +1,175 @@ +--- +spec: + id: k8s-nsa + title: National Security Agency - Kubernetes Hardening Guidance v1.0 + description: National Security Agency - Kubernetes Hardening Guidance + relatedResources : + - https://www.nsa.gov/Press-Room/News-Highlights/Article/Article/2716980/nsa-cisa-release-kubernetes-hardening-guidance/ + version: "1.0" + controls: + - name: Non-root containers + description: 'Check that container is not running as root' + id: '1.0' + checks: + - id: AVD-KSV-0012 + severity: 'MEDIUM' + - name: Immutable container file systems + description: 'Check that container root file system is immutable' + id: '1.1' + checks: + - id: AVD-KSV-0014 + severity: 'LOW' + - name: Preventing privileged containers + description: 'Controls whether Pods can run privileged containers' + id: '1.2' + checks: + - id: AVD-KSV-0017 + severity: 'HIGH' + - name: Share containers process namespaces + description: 'Controls whether containers can share process namespaces' + id: '1.3' + checks: + - id: AVD-KSV-0008 + severity: 'HIGH' + - name: Share host process namespaces + description: 'Controls whether share host process namespaces' + id: '1.4' + checks: + - id: AVD-KSV-0009 + severity: 'HIGH' + - name: Use the host network + description: 'Controls whether containers can use the host network' + id: '1.5' + checks: + - id: AVD-KSV-0010 + severity: 'HIGH' + - name: Run with root privileges or with root group membership + description: 'Controls whether container applications can run with root privileges or with root group membership' + id: '1.6' + checks: + - id: AVD-KSV-0029 + severity: 'LOW' + - name: Restricts escalation to root privileges + description: 'Control check restrictions escalation to root privileges' + id: '1.7' + checks: + - id: AVD-KSV-0001 + severity: 'MEDIUM' + - name: Sets the SELinux context of the container + description: 'Control checks if pod sets the SELinux context of the container' + id: '1.8' + checks: + - id: AVD-KSV-0002 + severity: 'MEDIUM' + - name: Restrict a container's access to resources with AppArmor + description: 'Control checks the restriction of containers access to resources with AppArmor' + id: '1.9' + checks: + - id: AVD-KSV-0030 + severity: 'MEDIUM' + - name: Sets the seccomp profile used to sandbox containers. + description: 'Control checks the sets the seccomp profile used to sandbox containers' + id: '1.10' + checks: + - id: AVD-KSV-0030 + severity: 'LOW' + - name: Protecting Pod service account tokens + description: 'Control check whether disable secret token been mount ,automountServiceAccountToken: false' + id: '1.11' + checks: + - id: AVD-KSV-0036 + severity: 'MEDIUM' + - name: Namespace kube-system should not be used by users + description: 'Control check whether Namespace kube-system is not be used by users' + id: '1.12' + defaultStatus: 'FAIL' + checks: + - id: AVD-KSV-0037 + severity: 'MEDIUM' + - name: Pod and/or namespace Selectors usage + description: 'Control check validate the pod and/or namespace Selectors usage' + id: '2.0' + defaultStatus: 'FAIL' + checks: + - id: AVD-KSV-0038 + severity: 'MEDIUM' + - name: Use CNI plugin that supports NetworkPolicy API (Manual) + description: 'Control check whether check cni plugin installed' + id: '3.0' + defaultStatus: 'FAIL' + checks: + severity: 'CRITICAL' + - name: Use ResourceQuota policies to limit resources + description: 'Control check the use of ResourceQuota policy to limit aggregate resource usage within namespace' + id: '4.0' + defaultStatus: 'FAIL' + checks: + - id: AVD-KSV-0040 + severity: 'MEDIUM' + - name: Use LimitRange policies to limit resources + description: 'Control check the use of LimitRange policy limit resource usage for namespaces or nodes' + id: '4.1' + defaultStatus: 'FAIL' + checks: + - id: AVD-KSV-0039 + severity: 'MEDIUM' + - name: Control plan disable insecure port (Manual) + description: 'Control check whether control plan disable insecure port' + id: '5.0' + defaultStatus: 'FAIL' + checks: + severity: 'CRITICAL' + - name: Encrypt etcd communication + description: 'Control check whether etcd communication is encrypted' + id: '5.1' + checks: + - id: AVD-KCV-0030 + severity: 'CRITICAL' + - name: Ensure kube config file permission (Manual) + description: 'Control check whether kube config file permissions' + id: '6.0' + defaultStatus: 'FAIL' + checks: + severity: 'CRITICAL' + - name: Check that encryption resource has been set + description: 'Control checks whether encryption resource has been set' + id: '6.1' + checks: + - id: AVD-KCV-0029 + severity: 'CRITICAL' + - name: Check encryption provider + description: 'Control checks whether encryption provider has been set' + id: '6.2' + checks: + - id: AVD-KCV-0004 + severity: 'CRITICAL' + - name: Make sure anonymous-auth is unset + description: 'Control checks whether anonymous-auth is unset' + id: '7.0' + checks: + - id: AVD-KCV-0001 + severity: 'CRITICAL' + - name: Make sure -authorization-mode=RBAC + description: 'Control check whether RBAC permission is in use' + id: '7.1' + checks: + - id: AVD-KCV-0008 + severity: 'CRITICAL' + - name: Audit policy is configure (Manual) + description: 'Control check whether audit policy is configure' + id: '8.0' + defaultStatus: 'FAIL' + checks: + severity: 'HIGH' + - name: Audit log path is configure + description: 'Control check whether audit log path is configure' + id: '8.1' + checks: + - id: AVD-KCV-0019 + severity: 'MEDIUM' + - name: Audit log aging + description: 'Control check whether audit log aging is configure' + id: '8.2' + checks: + - id: AVD-KCV-0020 + severity: 'MEDIUM' \ No newline at end of file diff --git a/pkg/iac/specs/compliance/k8s-pss-baseline.yaml b/pkg/iac/specs/compliance/k8s-pss-baseline.yaml new file mode 100644 index 000000000000..3d6524d9fbca --- /dev/null +++ b/pkg/iac/specs/compliance/k8s-pss-baseline.yaml @@ -0,0 +1,75 @@ +spec: + id: "k8s-pss-baseline" + title: "Kubernetes Pod Security Standards - Baseline" + description: Kubernetes Pod Security Standards - Baseline + relatedResources : + - https://kubernetes.io/docs/concepts/security/pod-security-standards/#baseline + version: "0.1" + controls: + - name: HostProcess + description: "Windows pods offer the ability to run HostProcess containers which enables privileged access to the Windows node. Privileged access to the host is disallowed in the baseline policy" + id: "1" + checks: + - id: AVD-KSV-0103 + severity: 'HIGH' + - name: Host Namespaces + description: "Sharing the host namespaces must be disallowed." + id: "2" + checks: + - id: AVD-KSV-0008 + severity: 'HIGH' + - name: Privileged Containers + description: "Privileged Pods disable most security mechanisms and must be disallowed." + id: "3" + checks: + - id: AVD-KSV-0017 + severity: 'HIGH' + - name: Capabilities + description: "Adding additional capabilities beyond those listed below must be disallowed." + id: "4" + checks: + - id: AVD-KSV-0022 + severity: 'MEDIUM' + - name: HostPath Volumes + description: "HostPath volumes must be forbidden." + id: "5" + checks: + - id: AVD-KSV-0023 + severity: 'MEDIUM' + - name: host ports + description: "hostports should be disallowed, or at minimum restricted to a known list." + id: "6" + checks: + - id: avd-ksv-0024 + severity: 'HIGH' + - name: AppArmor + description: "On supported hosts, the runtime/default AppArmor profile is applied by default. The baseline policy should prevent overriding or disabling the default AppArmor profile, or restrict overrides to an allowed set of profiles." + id: "7" + checks: + - id: avd-ksv-0002 + severity: 'HIGH' + - name: SELinux + description: "Setting the SELinux type is restricted, and setting a custom SELinux user or role option is forbidden." + id: "8" + checks: + - id: avd-ksv-0025 + severity: 'MEDIUM' + - name: "/proc Mount Type" + description: "The default /proc masks are set up to reduce attack surface, and should be required." + id: "9" + checks: + - id: avd-ksv-0027 + severity: 'MEDIUM' + - name: "Seccomp" + description: "Seccomp profile must not be explicitly set to Unconfined." + id: "10" + checks: + - id: avd-ksv-0104 + severity: 'MEDIUM' + - name: "Sysctls" + description: 'Sysctls can disable security mechanisms or affect all containers on a host, and should be disallowed except for an allowed "safe" subset. A sysctl is considered safe if it is namespaced in the container or the Pod, and it is isolated from other Pods or processes on the same Node.' + id: "11" + checks: + - id: avd-ksv-0026 + severity: 'MEDIUM' + diff --git a/pkg/iac/specs/compliance/k8s-pss-restricted.yaml b/pkg/iac/specs/compliance/k8s-pss-restricted.yaml new file mode 100644 index 000000000000..6b0eb5dfb19b --- /dev/null +++ b/pkg/iac/specs/compliance/k8s-pss-restricted.yaml @@ -0,0 +1,111 @@ +spec: + id: "k8s-pss-restricted" + title: "Kubernetes Pod Security Standards - Restricted" + description: Kubernetes Pod Security Standards - Restricted + relatedResources : + - https://kubernetes.io/docs/concepts/security/pod-security-standards/#restricted + version: "0.1" + controls: + - name: HostProcess + description: "Windows pods offer the ability to run HostProcess containers which enables privileged access to the Windows node. Privileged access to the host is disallowed in the baseline policy" + id: "1" + checks: + - id: AVD-KSV-0103 + severity: 'HIGH' + - name: Host Namespaces + description: "Sharing the host namespaces must be disallowed." + id: "2" + checks: + - id: AVD-KSV-0008 + severity: 'HIGH' + - name: Privileged Containers + description: "Privileged Pods disable most security mechanisms and must be disallowed." + id: "3" + checks: + - id: AVD-KSV-0017 + severity: 'HIGH' + - name: Capabilities + description: "Adding additional capabilities beyond those listed below must be disallowed." + id: "4" + checks: + - id: AVD-KSV-0022 + severity: 'MEDIUM' + - name: HostPath Volumes + description: "HostPath volumes must be forbidden." + id: "5" + checks: + - id: AVD-KSV-0023 + severity: 'MEDIUM' + - name: host ports + description: "hostports should be disallowed, or at minimum restricted to a known list." + id: "6" + checks: + - id: avd-ksv-0024 + severity: 'HIGH' + - name: AppArmor + description: "On supported hosts, the runtime/default AppArmor profile is applied by default. The baseline policy should prevent overriding or disabling the default AppArmor profile, or restrict overrides to an allowed set of profiles." + id: "7" + checks: + - id: avd-ksv-0002 + severity: 'HIGH' + - name: SELinux + description: "Setting the SELinux type is restricted, and setting a custom SELinux user or role option is forbidden." + id: "8" + checks: + - id: avd-ksv-0025 + severity: 'MEDIUM' + - name: "/proc Mount Type" + description: "The default /proc masks are set up to reduce attack surface, and should be required." + id: "9" + checks: + - id: avd-ksv-0027 + severity: 'MEDIUM' + - name: "Seccomp" + description: "Seccomp profile must not be explicitly set to Unconfined." + id: "10" + checks: + - id: avd-ksv-0104 + severity: 'MEDIUM' + - name: "Sysctls" + description: 'Sysctls can disable security mechanisms or affect all containers on a host, and should be disallowed except for an allowed "safe" subset. A sysctl is considered safe if it is namespaced in the container or the Pod, and it is isolated from other Pods or processes on the same Node.' + id: "11" + checks: + - id: avd-ksv-0026 + severity: 'MEDIUM' + - name: "Volume Types" + description: 'The restricted policy only permits specific volume types.' + id: "12" + checks: + - id: avd-ksv-0028 + severity: 'LOW' + - name: "Privilege Escalation" + description: 'Privilege escalation (such as via set-user-ID or set-group-ID file mode) should not be allowed.' + id: "13" + checks: + - id: avd-ksv-0001 + severity: 'MEDIUM' + - name: "Running as Non-root" + description: 'Containers must be required to run as non-root users.' + id: "14" + checks: + - id: avd-ksv-0012 + severity: 'MEDIUM' + - name: "Running as Non-root user" + description: 'Containers must not set runAsUser to 0' + id: "15" + checks: + - id: avd-ksv-0105 + severity: 'LOW' + - name: "Seccomp" + description: 'Seccomp profile must be explicitly set to one of the allowed values. Both the Unconfined profile and the absence of a profile are prohibited' + id: "16" + checks: + - id: avd-ksv-0030 + severity: 'LOW' + - name: "Capabilities" + description: 'Containers must drop ALL capabilities, and are only permitted to add back the NET_BIND_SERVICE capability.' + id: "17" + checks: + - id: avd-ksv-0106 + severity: 'LOW' + diff --git a/pkg/iac/specs/loader.go b/pkg/iac/specs/loader.go new file mode 100644 index 000000000000..1cbefbb07d19 --- /dev/null +++ b/pkg/iac/specs/loader.go @@ -0,0 +1,61 @@ +package specs + +import ( + "embed" + "fmt" + "io" + "os" + "strings" + + "gopkg.in/yaml.v3" +) + +const ComplianceFolder = "compliance" + +var ( + //go:embed compliance + complainceFS embed.FS +) + +var complianceSpecMap map[string]string + +// Load compliance specs +func init() { + dir, _ := complainceFS.ReadDir(ComplianceFolder) + complianceSpecMap = make(map[string]string, 0) + for _, r := range dir { + if !strings.Contains(r.Name(), ".yaml") { + continue + } + file, err := complainceFS.Open(fmt.Sprintf("%s/%s", ComplianceFolder, r.Name())) + if err != nil { + panic(err) + } + specContent, err := io.ReadAll(file) + if err != nil { + panic(err) + } + var fileSpec map[string]interface{} + err = yaml.Unmarshal(specContent, &fileSpec) + if err != nil { + panic(err) + } + if specVal, ok := fileSpec["spec"].(map[string]interface{}); ok { + if idVal, ok := specVal["id"].(string); ok { + complianceSpecMap[idVal] = string(specContent) + } + } + } +} + +// GetSpec returns the spec content +func GetSpec(name string) string { + if spec, ok := complianceSpecMap[name]; ok { // use embedded spec + return spec + } + spec, err := os.ReadFile(strings.TrimPrefix(name, "@")) // use custom spec by filepath + if err != nil { + return "" + } + return string(spec) +} diff --git a/pkg/iac/specs/loader_test.go b/pkg/iac/specs/loader_test.go new file mode 100644 index 000000000000..4618fe7baae5 --- /dev/null +++ b/pkg/iac/specs/loader_test.go @@ -0,0 +1,39 @@ +package specs + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLoadSpecs(t *testing.T) { + tests := []struct { + name string + specName string + wantSpecPath string + }{ + {name: "nsa spec", specName: "k8s-nsa", wantSpecPath: "./compliance/k8s-nsa-1.0.yaml"}, + {name: "k8s cis bench", specName: "k8s-cis", wantSpecPath: "./compliance/k8s-cis-1.23.yaml"}, + {name: "k8s pss baseline", specName: "k8s-pss-baseline", wantSpecPath: "./compliance/k8s-pss-baseline.yaml"}, + {name: "k8s pss restricted", specName: "k8s-pss-restricted", wantSpecPath: "./compliance/k8s-pss-restricted.yaml"}, + {name: "awscis1.2", specName: "aws-cis-1.2", wantSpecPath: "./compliance/aws-cis-1.2.yaml"}, + {name: "awscis1.4", specName: "aws-cis-1.4", wantSpecPath: "./compliance/aws-cis-1.4.yaml"}, + {name: "docker cis bench", specName: "docker-cis", wantSpecPath: "./compliance/docker-cis.yaml"}, + {name: "awscis1.2 by filepath", specName: "@./compliance/aws-cis-1.2.yaml", wantSpecPath: "./compliance/aws-cis-1.2.yaml"}, + {name: "bogus spec", specName: "foobarbaz"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.wantSpecPath != "" { + wantSpecData, err := os.ReadFile(tt.wantSpecPath) + assert.NoError(t, err) + gotSpecData := GetSpec(tt.specName) + assert.Equal(t, gotSpecData, string(wantSpecData)) + } else { + assert.Empty(t, GetSpec(tt.specName), tt.name) + } + }) + } +} diff --git a/test/module_test.go b/test/module_test.go index 658dee77313d..5631643681d8 100644 --- a/test/module_test.go +++ b/test/module_test.go @@ -17,7 +17,6 @@ import ( parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy-policies/checks/cloud/aws/iam" "github.com/aquasecurity/trivy/test/testutil" ) @@ -627,6 +626,6 @@ data "aws_iam_policy_document" "policy" { modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) results, _, _ := executor.New().Execute(modules) - testutil.AssertRuleNotFound(t, iam.CheckEnforceGroupMFA.LongID(), results, "") + testutil.AssertRuleNotFound(t, "aws-iam-enforce-group-mfa", results, "") } From a334eff4a3601347e3a1eab7d30e29a8925afc44 Mon Sep 17 00:00:00 2001 From: Simar Date: Thu, 1 Feb 2024 21:53:40 -0700 Subject: [PATCH 13/13] revert docker upgrade --- go.mod | 4 +--- go.sum | 10 ++-------- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/go.mod b/go.mod index c912f7b0b27b..931ece3947ec 100644 --- a/go.mod +++ b/go.mod @@ -41,7 +41,7 @@ require ( github.com/cheggaaa/pb/v3 v3.1.4 github.com/containerd/containerd v1.7.11 github.com/csaf-poc/csaf_distribution/v3 v3.0.0 - github.com/docker/docker v25.0.0+incompatible + github.com/docker/docker v24.0.7+incompatible github.com/docker/go-connections v0.4.0 github.com/fatih/color v1.15.0 github.com/go-git/go-git/v5 v5.11.0 @@ -236,7 +236,6 @@ require ( github.com/cyphar/filepath-securejoin v0.2.4 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect - github.com/distribution/reference v0.5.0 // indirect github.com/dlclark/regexp2 v1.4.0 // indirect github.com/docker/cli v24.0.6+incompatible // indirect github.com/docker/distribution v2.8.2+incompatible // indirect @@ -329,7 +328,6 @@ require ( github.com/moby/sys/mountinfo v0.6.2 // indirect github.com/moby/sys/sequential v0.5.0 // indirect github.com/moby/sys/signal v0.7.0 // indirect - github.com/moby/sys/user v0.1.0 // indirect github.com/moby/term v0.5.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect diff --git a/go.sum b/go.sum index de336a67e987..570e1d31a49d 100644 --- a/go.sum +++ b/go.sum @@ -709,8 +709,6 @@ github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48 h1:fRzb/w+pyskVMQ+ github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= github.com/distribution/distribution/v3 v3.0.0-20221208165359-362910506bc2 h1:aBfCb7iqHmDEIp6fBvC/hQUddQfg+3qdYjwzaiP9Hnc= github.com/distribution/distribution/v3 v3.0.0-20221208165359-362910506bc2/go.mod h1:WHNsWjnIn2V1LYOrME7e8KxSeKunYHsxEm4am0BUtcI= -github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0= -github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= @@ -726,8 +724,8 @@ github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m3 github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/docker v20.10.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v23.0.0-rc.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v25.0.0+incompatible h1:g9b6wZTblhMgzOT2tspESstfw6ySZ9kdm94BLDKaZac= -github.com/docker/docker v25.0.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v24.0.7+incompatible h1:Wo6l37AuwP3JaMnZa226lzVXGA3F9Ig1seQen0cKYlM= +github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y= github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A= github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0= @@ -1388,8 +1386,6 @@ github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWK github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI= github.com/moby/sys/signal v0.7.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg= github.com/moby/sys/symlink v0.1.0/go.mod h1:GGDODQmbFOjFsXvfLVn3+ZRxkch54RkSiGqsZeMYowQ= -github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= -github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= @@ -1787,8 +1783,6 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0 h1:cl5P5/GIfFh4t6xyruO go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0/go.mod h1:zgBdWWAu7oEEMC06MMKc5NLbA/1YDXV1sMpSqEeLQLg= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0 h1:tIqheXEFWAZ7O8A7m+J0aPTmpJN3YQ7qetUAdkkkKpk= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0/go.mod h1:nUeKExfxAQVbiVFn32YXpXZZHZ61Cc3s3Rn1pDBGAb0= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU= go.opentelemetry.io/otel/metric v1.21.0 h1:tlYWfeo+Bocx5kLEloTjbcDwBuELRrIFxwdQ36PlJu4= go.opentelemetry.io/otel/metric v1.21.0/go.mod h1:o1p3CA8nNHW8j5yuQLdc1eeqEaPfzug24uvsyIEJRWM= go.opentelemetry.io/otel/sdk v1.21.0 h1:FTt8qirL1EysG6sTQRZ5TokkU8d0ugCj8htOgThZXQ8=